diff --git a/gen/abusiveexperiencereport1-cli/Cargo.toml b/gen/abusiveexperiencereport1-cli/Cargo.toml index 630ff1920a..61941b64ac 100644 --- a/gen/abusiveexperiencereport1-cli/Cargo.toml +++ b/gen/abusiveexperiencereport1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-abusiveexperiencereport1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Abusive Experience Report (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/abusiveexperiencereport1-cli" @@ -20,13 +20,13 @@ name = "abusiveexperiencereport1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-abusiveexperiencereport1] path = "../abusiveexperiencereport1" -version = "4.0.1+20220303" +version = "5.0.2+20230114" + diff --git a/gen/abusiveexperiencereport1-cli/README.md b/gen/abusiveexperiencereport1-cli/README.md index 413331d7a0..55907d45ff 100644 --- a/gen/abusiveexperiencereport1-cli/README.md +++ b/gen/abusiveexperiencereport1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Abusive Experience Report* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Abusive Experience Report* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash abusiveexperiencereport1 [options] diff --git a/gen/abusiveexperiencereport1-cli/mkdocs.yml b/gen/abusiveexperiencereport1-cli/mkdocs.yml index 7b9be0c7da..d539e95c30 100644 --- a/gen/abusiveexperiencereport1-cli/mkdocs.yml +++ b/gen/abusiveexperiencereport1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Abusive Experience Report v4.0.1+20220303 +site_name: Abusive Experience Report v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-abusiveexperiencereport1-cli site_description: A complete library to interact with Abusive Experience Report (protocol v1) @@ -7,10 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/abusiveexperienc docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['sites_get.md', 'Sites', 'Get'] -- ['violating-sites_list.md', 'Violating Sites', 'List'] +nav: +- Home: 'index.md' +- 'Sites': + - 'Get': 'sites_get.md' +- 'Violating Sites': + - 'List': 'violating-sites_list.md' theme: readthedocs diff --git a/gen/abusiveexperiencereport1-cli/src/client.rs b/gen/abusiveexperiencereport1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/abusiveexperiencereport1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/abusiveexperiencereport1-cli/src/main.rs b/gen/abusiveexperiencereport1-cli/src/main.rs index 9c5bd03e70..955365c1a5 100644 --- a/gen/abusiveexperiencereport1-cli/src/main.rs +++ b/gen/abusiveexperiencereport1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_abusiveexperiencereport1::{api, Error, oauth2}; +use google_abusiveexperiencereport1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -297,7 +296,7 @@ async fn main() { let mut app = App::new("abusiveexperiencereport1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230114") .about("Views Abusive Experience Report data, and gets a list of sites that have a significant number of abusive experiences.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_abusiveexperiencereport1_cli") .arg(Arg::with_name("folder") diff --git a/gen/abusiveexperiencereport1/Cargo.toml b/gen/abusiveexperiencereport1/Cargo.toml index 7c2a0ba477..1f53b03d8e 100644 --- a/gen/abusiveexperiencereport1/Cargo.toml +++ b/gen/abusiveexperiencereport1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-abusiveexperiencereport1" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Abusive Experience Report (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/abusiveexperiencereport1" homepage = "https://developers.google.com/abusive-experience-report/" -documentation = "https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114" license = "MIT" keywords = ["abusiveexperiencerep", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/abusiveexperiencereport1/README.md b/gen/abusiveexperiencereport1/README.md index 2678ca814c..4ccfed6034 100644 --- a/gen/abusiveexperiencereport1/README.md +++ b/gen/abusiveexperiencereport1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-abusiveexperiencereport1` library allows access to all features of the *Google Abusive Experience Report* service. -This documentation was generated from *Abusive Experience Report* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *abusiveexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Abusive Experience Report* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *abusiveexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Abusive Experience Report* *v1* API can be found at the [official documentation site](https://developers.google.com/abusive-experience-report/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/AbusiveExperienceReport) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/AbusiveExperienceReport) ... * sites - * [*get*](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/api::SiteGetCall) + * [*get*](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/api::SiteGetCall) * violating sites - * [*list*](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/api::ViolatingSiteListCall) + * [*list*](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/api::ViolatingSiteListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/AbusiveExperienceReport)** +* **[Hub](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/AbusiveExperienceReport)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::CallBuilder) -* **[Resources](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::CallBuilder) +* **[Resources](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Part)** + * **[Parts](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Delegate) to the -[Method Builder](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Delegate) to the +[Method Builder](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::RequestValue) and -[decodable](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::RequestValue) and +[decodable](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-abusiveexperiencereport1/5.0.2-beta-1+20230114/google_abusiveexperiencereport1/client::RequestValue) are moved +* [request values](https://docs.rs/google-abusiveexperiencereport1/5.0.2+20230114/google_abusiveexperiencereport1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/abusiveexperiencereport1/src/api.rs b/gen/abusiveexperiencereport1/src/api.rs index 37d64fef38..6ebc664dab 100644 --- a/gen/abusiveexperiencereport1/src/api.rs +++ b/gen/abusiveexperiencereport1/src/api.rs @@ -97,7 +97,7 @@ impl<'a, S> AbusiveExperienceReport { AbusiveExperienceReport { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://abusiveexperiencereport.googleapis.com/".to_string(), _root_url: "https://abusiveexperiencereport.googleapis.com/".to_string(), } @@ -111,7 +111,7 @@ impl<'a, S> AbusiveExperienceReport { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/abusiveexperiencereport1/src/client.rs b/gen/abusiveexperiencereport1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/abusiveexperiencereport1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/abusiveexperiencereport1/src/lib.rs b/gen/abusiveexperiencereport1/src/lib.rs index f8e9c9d6a7..69d4bb298f 100644 --- a/gen/abusiveexperiencereport1/src/lib.rs +++ b/gen/abusiveexperiencereport1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Abusive Experience Report* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *abusiveexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Abusive Experience Report* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *abusiveexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Abusive Experience Report* *v1* API can be found at the //! [official documentation site](https://developers.google.com/abusive-experience-report/). diff --git a/gen/acceleratedmobilepageurl1-cli/Cargo.toml b/gen/acceleratedmobilepageurl1-cli/Cargo.toml index 112ca74c71..c84100168b 100644 --- a/gen/acceleratedmobilepageurl1-cli/Cargo.toml +++ b/gen/acceleratedmobilepageurl1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-acceleratedmobilepageurl1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Acceleratedmobilepageurl (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/acceleratedmobilepageurl1-cli" @@ -20,13 +20,13 @@ name = "acceleratedmobilepageurl1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-acceleratedmobilepageurl1] path = "../acceleratedmobilepageurl1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/acceleratedmobilepageurl1-cli/README.md b/gen/acceleratedmobilepageurl1-cli/README.md index 082f6de8da..4bb1c3e1c5 100644 --- a/gen/acceleratedmobilepageurl1-cli/README.md +++ b/gen/acceleratedmobilepageurl1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Acceleratedmobilepageurl* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Acceleratedmobilepageurl* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash acceleratedmobilepageurl1 [options] diff --git a/gen/acceleratedmobilepageurl1-cli/mkdocs.yml b/gen/acceleratedmobilepageurl1-cli/mkdocs.yml index 2e3df8eb0c..2474cea11d 100644 --- a/gen/acceleratedmobilepageurl1-cli/mkdocs.yml +++ b/gen/acceleratedmobilepageurl1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Acceleratedmobilepageurl v4.0.1+20220305 +site_name: Acceleratedmobilepageurl v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-acceleratedmobilepageurl1-cli site_description: A complete library to interact with Acceleratedmobilepageurl (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/acceleratedmobil docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['amp-urls_batch-get.md', 'Amp Urls', 'Batch Get'] +nav: +- Home: 'index.md' +- 'Amp Urls': + - 'Batch Get': 'amp-urls_batch-get.md' theme: readthedocs diff --git a/gen/acceleratedmobilepageurl1-cli/src/client.rs b/gen/acceleratedmobilepageurl1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/acceleratedmobilepageurl1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/acceleratedmobilepageurl1-cli/src/main.rs b/gen/acceleratedmobilepageurl1-cli/src/main.rs index 79d6935efe..3d9ed42e64 100644 --- a/gen/acceleratedmobilepageurl1-cli/src/main.rs +++ b/gen/acceleratedmobilepageurl1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_acceleratedmobilepageurl1::{api, Error, oauth2}; +use google_acceleratedmobilepageurl1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -252,7 +251,7 @@ async fn main() { let mut app = App::new("acceleratedmobilepageurl1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("Retrieves the list of AMP URLs (and equivalent AMP Cache URLs) for a given list of public URL(s). ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_acceleratedmobilepageurl1_cli") .arg(Arg::with_name("folder") diff --git a/gen/acceleratedmobilepageurl1/Cargo.toml b/gen/acceleratedmobilepageurl1/Cargo.toml index aeb06e5105..a6903b8dad 100644 --- a/gen/acceleratedmobilepageurl1/Cargo.toml +++ b/gen/acceleratedmobilepageurl1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-acceleratedmobilepageurl1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Acceleratedmobilepageurl (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/acceleratedmobilepageurl1" homepage = "https://developers.google.com/amp/cache/" -documentation = "https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123" license = "MIT" keywords = ["acceleratedmobilepag", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/acceleratedmobilepageurl1/README.md b/gen/acceleratedmobilepageurl1/README.md index f9c74bb9a2..9d9e457f82 100644 --- a/gen/acceleratedmobilepageurl1/README.md +++ b/gen/acceleratedmobilepageurl1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-acceleratedmobilepageurl1` library allows access to all features of the *Google Acceleratedmobilepageurl* service. -This documentation was generated from *Acceleratedmobilepageurl* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *acceleratedmobilepageurl:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Acceleratedmobilepageurl* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *acceleratedmobilepageurl:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Acceleratedmobilepageurl* *v1* API can be found at the [official documentation site](https://developers.google.com/amp/cache/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/Acceleratedmobilepageurl) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/Acceleratedmobilepageurl) ... -* [amp urls](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/api::AmpUrl) - * [*batch get*](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/api::AmpUrlBatchGetCall) +* [amp urls](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/api::AmpUrl) + * [*batch get*](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/api::AmpUrlBatchGetCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/Acceleratedmobilepageurl)** +* **[Hub](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/Acceleratedmobilepageurl)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::CallBuilder) -* **[Resources](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::CallBuilder) +* **[Resources](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Part)** + * **[Parts](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Delegate) to the -[Method Builder](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Delegate) to the +[Method Builder](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::RequestValue) and -[decodable](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::RequestValue) and +[decodable](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2-beta-1+20230123/google_acceleratedmobilepageurl1/client::RequestValue) are moved +* [request values](https://docs.rs/google-acceleratedmobilepageurl1/5.0.2+20230123/google_acceleratedmobilepageurl1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/acceleratedmobilepageurl1/src/api.rs b/gen/acceleratedmobilepageurl1/src/api.rs index 4df87ee575..28ce49335a 100644 --- a/gen/acceleratedmobilepageurl1/src/api.rs +++ b/gen/acceleratedmobilepageurl1/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> Acceleratedmobilepageurl { Acceleratedmobilepageurl { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://acceleratedmobilepageurl.googleapis.com/".to_string(), _root_url: "https://acceleratedmobilepageurl.googleapis.com/".to_string(), } @@ -114,7 +114,7 @@ impl<'a, S> Acceleratedmobilepageurl { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/acceleratedmobilepageurl1/src/client.rs b/gen/acceleratedmobilepageurl1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/acceleratedmobilepageurl1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/acceleratedmobilepageurl1/src/lib.rs b/gen/acceleratedmobilepageurl1/src/lib.rs index f5021b9550..81f7830a4e 100644 --- a/gen/acceleratedmobilepageurl1/src/lib.rs +++ b/gen/acceleratedmobilepageurl1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Acceleratedmobilepageurl* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *acceleratedmobilepageurl:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Acceleratedmobilepageurl* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *acceleratedmobilepageurl:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Acceleratedmobilepageurl* *v1* API can be found at the //! [official documentation site](https://developers.google.com/amp/cache/). diff --git a/gen/accessapproval1-cli/Cargo.toml b/gen/accessapproval1-cli/Cargo.toml index 2195791c73..75b69a7252 100644 --- a/gen/accessapproval1-cli/Cargo.toml +++ b/gen/accessapproval1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-accessapproval1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Approval (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accessapproval1-cli" @@ -20,13 +20,13 @@ name = "accessapproval1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-accessapproval1] path = "../accessapproval1" -version = "4.0.1+20220225" +version = "5.0.2+20230120" + diff --git a/gen/accessapproval1-cli/README.md b/gen/accessapproval1-cli/README.md index ee818e06f4..eafda5d09b 100644 --- a/gen/accessapproval1-cli/README.md +++ b/gen/accessapproval1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Access Approval* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Access Approval* API at revision *20230120*. The CLI is at version *5.0.2*. ```bash accessapproval1 [options] @@ -33,25 +33,31 @@ accessapproval1 [options] approval-requests-approve (-r )... [-p ]... [-o ] approval-requests-dismiss (-r )... [-p ]... [-o ] approval-requests-get [-p ]... [-o ] + approval-requests-invalidate (-r )... [-p ]... [-o ] approval-requests-list [-p ]... [-o ] delete-access-approval-settings [-p ]... [-o ] get-access-approval-settings [-p ]... [-o ] + get-service-account [-p ]... [-o ] update-access-approval-settings (-r )... [-p ]... [-o ] organizations approval-requests-approve (-r )... [-p ]... [-o ] approval-requests-dismiss (-r )... [-p ]... [-o ] approval-requests-get [-p ]... [-o ] + approval-requests-invalidate (-r )... [-p ]... [-o ] approval-requests-list [-p ]... [-o ] delete-access-approval-settings [-p ]... [-o ] get-access-approval-settings [-p ]... [-o ] + get-service-account [-p ]... [-o ] update-access-approval-settings (-r )... [-p ]... [-o ] projects approval-requests-approve (-r )... [-p ]... [-o ] approval-requests-dismiss (-r )... [-p ]... [-o ] approval-requests-get [-p ]... [-o ] + approval-requests-invalidate (-r )... [-p ]... [-o ] approval-requests-list [-p ]... [-o ] delete-access-approval-settings [-p ]... [-o ] get-access-approval-settings [-p ]... [-o ] + get-service-account [-p ]... [-o ] update-access-approval-settings (-r )... [-p ]... [-o ] accessapproval1 --help diff --git a/gen/accessapproval1-cli/mkdocs.yml b/gen/accessapproval1-cli/mkdocs.yml index 4b3e4c1080..227e994e2f 100644 --- a/gen/accessapproval1-cli/mkdocs.yml +++ b/gen/accessapproval1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Access Approval v4.0.1+20220225 +site_name: Access Approval v5.0.2+20230120 site_url: http://byron.github.io/google-apis-rs/google-accessapproval1-cli site_description: A complete library to interact with Access Approval (protocol v1) @@ -7,29 +7,38 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/accessapproval1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_approval-requests-approve.md', 'Folders', 'Approval Requests Approve'] -- ['folders_approval-requests-dismiss.md', 'Folders', 'Approval Requests Dismiss'] -- ['folders_approval-requests-get.md', 'Folders', 'Approval Requests Get'] -- ['folders_approval-requests-list.md', 'Folders', 'Approval Requests List'] -- ['folders_delete-access-approval-settings.md', 'Folders', 'Delete Access Approval Settings'] -- ['folders_get-access-approval-settings.md', 'Folders', 'Get Access Approval Settings'] -- ['folders_update-access-approval-settings.md', 'Folders', 'Update Access Approval Settings'] -- ['organizations_approval-requests-approve.md', 'Organizations', 'Approval Requests Approve'] -- ['organizations_approval-requests-dismiss.md', 'Organizations', 'Approval Requests Dismiss'] -- ['organizations_approval-requests-get.md', 'Organizations', 'Approval Requests Get'] -- ['organizations_approval-requests-list.md', 'Organizations', 'Approval Requests List'] -- ['organizations_delete-access-approval-settings.md', 'Organizations', 'Delete Access Approval Settings'] -- ['organizations_get-access-approval-settings.md', 'Organizations', 'Get Access Approval Settings'] -- ['organizations_update-access-approval-settings.md', 'Organizations', 'Update Access Approval Settings'] -- ['projects_approval-requests-approve.md', 'Projects', 'Approval Requests Approve'] -- ['projects_approval-requests-dismiss.md', 'Projects', 'Approval Requests Dismiss'] -- ['projects_approval-requests-get.md', 'Projects', 'Approval Requests Get'] -- ['projects_approval-requests-list.md', 'Projects', 'Approval Requests List'] -- ['projects_delete-access-approval-settings.md', 'Projects', 'Delete Access Approval Settings'] -- ['projects_get-access-approval-settings.md', 'Projects', 'Get Access Approval Settings'] -- ['projects_update-access-approval-settings.md', 'Projects', 'Update Access Approval Settings'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Approval Requests Approve': 'folders_approval-requests-approve.md' + - 'Approval Requests Dismiss': 'folders_approval-requests-dismiss.md' + - 'Approval Requests Get': 'folders_approval-requests-get.md' + - 'Approval Requests Invalidate': 'folders_approval-requests-invalidate.md' + - 'Approval Requests List': 'folders_approval-requests-list.md' + - 'Delete Access Approval Settings': 'folders_delete-access-approval-settings.md' + - 'Get Access Approval Settings': 'folders_get-access-approval-settings.md' + - 'Get Service Account': 'folders_get-service-account.md' + - 'Update Access Approval Settings': 'folders_update-access-approval-settings.md' +- 'Organizations': + - 'Approval Requests Approve': 'organizations_approval-requests-approve.md' + - 'Approval Requests Dismiss': 'organizations_approval-requests-dismiss.md' + - 'Approval Requests Get': 'organizations_approval-requests-get.md' + - 'Approval Requests Invalidate': 'organizations_approval-requests-invalidate.md' + - 'Approval Requests List': 'organizations_approval-requests-list.md' + - 'Delete Access Approval Settings': 'organizations_delete-access-approval-settings.md' + - 'Get Access Approval Settings': 'organizations_get-access-approval-settings.md' + - 'Get Service Account': 'organizations_get-service-account.md' + - 'Update Access Approval Settings': 'organizations_update-access-approval-settings.md' +- 'Projects': + - 'Approval Requests Approve': 'projects_approval-requests-approve.md' + - 'Approval Requests Dismiss': 'projects_approval-requests-dismiss.md' + - 'Approval Requests Get': 'projects_approval-requests-get.md' + - 'Approval Requests Invalidate': 'projects_approval-requests-invalidate.md' + - 'Approval Requests List': 'projects_approval-requests-list.md' + - 'Delete Access Approval Settings': 'projects_delete-access-approval-settings.md' + - 'Get Access Approval Settings': 'projects_get-access-approval-settings.md' + - 'Get Service Account': 'projects_get-service-account.md' + - 'Update Access Approval Settings': 'projects_update-access-approval-settings.md' theme: readthedocs diff --git a/gen/accessapproval1-cli/src/client.rs b/gen/accessapproval1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/accessapproval1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/accessapproval1-cli/src/main.rs b/gen/accessapproval1-cli/src/main.rs index bd89d2a3fe..4fe2750cd1 100644 --- a/gen/accessapproval1-cli/src/main.rs +++ b/gen/accessapproval1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_accessapproval1::{api, Error, oauth2}; +use google_accessapproval1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -272,6 +271,90 @@ where } } + async fn _folders_approval_requests_invalidate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InvalidateApprovalRequestMessage = json::value::from_value(object).unwrap(); + let mut call = self.hub.folders().approval_requests_invalidate(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_approval_requests_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.folders().approval_requests_list(opt.value_of("parent").unwrap_or("")); @@ -282,7 +365,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -438,6 +521,58 @@ where } } + async fn _folders_get_service_account(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.folders().get_service_account(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_update_access_approval_settings(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -461,11 +596,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "active-key-version" => Some(("activeKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ancestor-has-active-key-version" => Some(("ancestorHasActiveKeyVersion", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enrolled-ancestor" => Some(("enrolledAncestor", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "invalid-key-version" => Some(("invalidKeyVersion", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-emails" => Some(("notificationEmails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["enrolled-ancestor", "name", "notification-emails"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-key-version", "ancestor-has-active-key-version", "enrolled-ancestor", "invalid-key-version", "name", "notification-emails"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -480,7 +618,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -750,6 +888,90 @@ where } } + async fn _organizations_approval_requests_invalidate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InvalidateApprovalRequestMessage = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().approval_requests_invalidate(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_approval_requests_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().approval_requests_list(opt.value_of("parent").unwrap_or("")); @@ -760,7 +982,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -916,6 +1138,58 @@ where } } + async fn _organizations_get_service_account(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().get_service_account(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_update_access_approval_settings(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -939,11 +1213,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "active-key-version" => Some(("activeKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ancestor-has-active-key-version" => Some(("ancestorHasActiveKeyVersion", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enrolled-ancestor" => Some(("enrolledAncestor", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "invalid-key-version" => Some(("invalidKeyVersion", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-emails" => Some(("notificationEmails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["enrolled-ancestor", "name", "notification-emails"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-key-version", "ancestor-has-active-key-version", "enrolled-ancestor", "invalid-key-version", "name", "notification-emails"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -958,7 +1235,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1228,6 +1505,90 @@ where } } + async fn _projects_approval_requests_invalidate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InvalidateApprovalRequestMessage = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().approval_requests_invalidate(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_approval_requests_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().approval_requests_list(opt.value_of("parent").unwrap_or("")); @@ -1238,7 +1599,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1394,6 +1755,58 @@ where } } + async fn _projects_get_service_account(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().get_service_account(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_update_access_approval_settings(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1417,11 +1830,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "active-key-version" => Some(("activeKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ancestor-has-active-key-version" => Some(("ancestorHasActiveKeyVersion", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enrolled-ancestor" => Some(("enrolledAncestor", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "invalid-key-version" => Some(("invalidKeyVersion", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-emails" => Some(("notificationEmails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["enrolled-ancestor", "name", "notification-emails"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-key-version", "ancestor-has-active-key-version", "enrolled-ancestor", "invalid-key-version", "name", "notification-emails"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1436,7 +1852,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1501,6 +1917,9 @@ where ("approval-requests-get", Some(opt)) => { call_result = self._folders_approval_requests_get(opt, dry_run, &mut err).await; }, + ("approval-requests-invalidate", Some(opt)) => { + call_result = self._folders_approval_requests_invalidate(opt, dry_run, &mut err).await; + }, ("approval-requests-list", Some(opt)) => { call_result = self._folders_approval_requests_list(opt, dry_run, &mut err).await; }, @@ -1510,6 +1929,9 @@ where ("get-access-approval-settings", Some(opt)) => { call_result = self._folders_get_access_approval_settings(opt, dry_run, &mut err).await; }, + ("get-service-account", Some(opt)) => { + call_result = self._folders_get_service_account(opt, dry_run, &mut err).await; + }, ("update-access-approval-settings", Some(opt)) => { call_result = self._folders_update_access_approval_settings(opt, dry_run, &mut err).await; }, @@ -1530,6 +1952,9 @@ where ("approval-requests-get", Some(opt)) => { call_result = self._organizations_approval_requests_get(opt, dry_run, &mut err).await; }, + ("approval-requests-invalidate", Some(opt)) => { + call_result = self._organizations_approval_requests_invalidate(opt, dry_run, &mut err).await; + }, ("approval-requests-list", Some(opt)) => { call_result = self._organizations_approval_requests_list(opt, dry_run, &mut err).await; }, @@ -1539,6 +1964,9 @@ where ("get-access-approval-settings", Some(opt)) => { call_result = self._organizations_get_access_approval_settings(opt, dry_run, &mut err).await; }, + ("get-service-account", Some(opt)) => { + call_result = self._organizations_get_service_account(opt, dry_run, &mut err).await; + }, ("update-access-approval-settings", Some(opt)) => { call_result = self._organizations_update_access_approval_settings(opt, dry_run, &mut err).await; }, @@ -1559,6 +1987,9 @@ where ("approval-requests-get", Some(opt)) => { call_result = self._projects_approval_requests_get(opt, dry_run, &mut err).await; }, + ("approval-requests-invalidate", Some(opt)) => { + call_result = self._projects_approval_requests_invalidate(opt, dry_run, &mut err).await; + }, ("approval-requests-list", Some(opt)) => { call_result = self._projects_approval_requests_list(opt, dry_run, &mut err).await; }, @@ -1568,6 +1999,9 @@ where ("get-access-approval-settings", Some(opt)) => { call_result = self._projects_get_access_approval_settings(opt, dry_run, &mut err).await; }, + ("get-service-account", Some(opt)) => { + call_result = self._projects_get_service_account(opt, dry_run, &mut err).await; + }, ("update-access-approval-settings", Some(opt)) => { call_result = self._projects_update_access_approval_settings(opt, dry_run, &mut err).await; }, @@ -1650,7 +2084,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("folders", "methods: 'approval-requests-approve', 'approval-requests-dismiss', 'approval-requests-get', 'approval-requests-list', 'delete-access-approval-settings', 'get-access-approval-settings' and 'update-access-approval-settings'", vec![ + ("folders", "methods: 'approval-requests-approve', 'approval-requests-dismiss', 'approval-requests-get', 'approval-requests-invalidate', 'approval-requests-list', 'delete-access-approval-settings', 'get-access-approval-settings', 'get-service-account' and 'update-access-approval-settings'", vec![ ("approval-requests-approve", Some(r##"Approves a request and returns the updated ApprovalRequest. Returns NOT_FOUND if the request does not exist. Returns FAILED_PRECONDITION if the request exists but is not in a pending state."##), "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/folders_approval-requests-approve", @@ -1723,6 +2157,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("approval-requests-invalidate", + Some(r##"Invalidates an existing ApprovalRequest. Returns the updated ApprovalRequest. NOTE: This does not deny access to the resource if another request has been made and approved. It only invalidates a single approval. Returns FAILED_PRECONDITION if the request exists but is not in an approved state."##), + "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/folders_approval-requests-invalidate", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the ApprovalRequest to invalidate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1789,6 +2251,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-service-account", + Some(r##"Retrieves the service account that is used by Access Approval to access KMS keys for signing approved approval requests."##), + "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/folders_get-service-account", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the AccessApprovalServiceAccount to retrieve."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1825,7 +2309,7 @@ async fn main() { ]), ]), - ("organizations", "methods: 'approval-requests-approve', 'approval-requests-dismiss', 'approval-requests-get', 'approval-requests-list', 'delete-access-approval-settings', 'get-access-approval-settings' and 'update-access-approval-settings'", vec![ + ("organizations", "methods: 'approval-requests-approve', 'approval-requests-dismiss', 'approval-requests-get', 'approval-requests-invalidate', 'approval-requests-list', 'delete-access-approval-settings', 'get-access-approval-settings', 'get-service-account' and 'update-access-approval-settings'", vec![ ("approval-requests-approve", Some(r##"Approves a request and returns the updated ApprovalRequest. Returns NOT_FOUND if the request does not exist. Returns FAILED_PRECONDITION if the request exists but is not in a pending state."##), "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/organizations_approval-requests-approve", @@ -1898,6 +2382,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("approval-requests-invalidate", + Some(r##"Invalidates an existing ApprovalRequest. Returns the updated ApprovalRequest. NOTE: This does not deny access to the resource if another request has been made and approved. It only invalidates a single approval. Returns FAILED_PRECONDITION if the request exists but is not in an approved state."##), + "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/organizations_approval-requests-invalidate", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the ApprovalRequest to invalidate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1964,6 +2476,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-service-account", + Some(r##"Retrieves the service account that is used by Access Approval to access KMS keys for signing approved approval requests."##), + "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/organizations_get-service-account", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the AccessApprovalServiceAccount to retrieve."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2000,7 +2534,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'approval-requests-approve', 'approval-requests-dismiss', 'approval-requests-get', 'approval-requests-list', 'delete-access-approval-settings', 'get-access-approval-settings' and 'update-access-approval-settings'", vec![ + ("projects", "methods: 'approval-requests-approve', 'approval-requests-dismiss', 'approval-requests-get', 'approval-requests-invalidate', 'approval-requests-list', 'delete-access-approval-settings', 'get-access-approval-settings', 'get-service-account' and 'update-access-approval-settings'", vec![ ("approval-requests-approve", Some(r##"Approves a request and returns the updated ApprovalRequest. Returns NOT_FOUND if the request does not exist. Returns FAILED_PRECONDITION if the request exists but is not in a pending state."##), "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/projects_approval-requests-approve", @@ -2073,6 +2607,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("approval-requests-invalidate", + Some(r##"Invalidates an existing ApprovalRequest. Returns the updated ApprovalRequest. NOTE: This does not deny access to the resource if another request has been made and approved. It only invalidates a single approval. Returns FAILED_PRECONDITION if the request exists but is not in an approved state."##), + "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/projects_approval-requests-invalidate", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the ApprovalRequest to invalidate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2139,6 +2701,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-service-account", + Some(r##"Retrieves the service account that is used by Access Approval to access KMS keys for signing approved approval requests."##), + "Details at http://byron.github.io/google-apis-rs/google_accessapproval1_cli/projects_get-service-account", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the AccessApprovalServiceAccount to retrieve."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2179,7 +2763,7 @@ async fn main() { let mut app = App::new("accessapproval1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230120") .about("An API for controlling access to data by Google personnel.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_accessapproval1_cli") .arg(Arg::with_name("url") diff --git a/gen/accessapproval1/Cargo.toml b/gen/accessapproval1/Cargo.toml index a7e2b01ece..7ad279242f 100644 --- a/gen/accessapproval1/Cargo.toml +++ b/gen/accessapproval1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-accessapproval1" -version = "5.0.2-beta-1+20230120" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Approval (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accessapproval1" homepage = "https://cloud.google.com/cloud-provider-access-management/access-approval/docs" -documentation = "https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120" +documentation = "https://docs.rs/google-accessapproval1/5.0.2+20230120" license = "MIT" keywords = ["accessapproval", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/accessapproval1/README.md b/gen/accessapproval1/README.md index 9399ff9de0..927eb8780c 100644 --- a/gen/accessapproval1/README.md +++ b/gen/accessapproval1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-accessapproval1` library allows access to all features of the *Google Access Approval* service. -This documentation was generated from *Access Approval* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *accessapproval:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Access Approval* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *accessapproval:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Access Approval* *v1* API can be found at the [official documentation site](https://cloud.google.com/cloud-provider-access-management/access-approval/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/AccessApproval) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/AccessApproval) ... * folders - * [*approval requests approve*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderApprovalRequestGetCall), [*approval requests invalidate*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderApprovalRequestInvalidateCall), [*approval requests list*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderGetAccessApprovalSettingCall), [*get service account*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderGetServiceAccountCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::FolderUpdateAccessApprovalSettingCall) + * [*approval requests approve*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderApprovalRequestGetCall), [*approval requests invalidate*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderApprovalRequestInvalidateCall), [*approval requests list*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderGetAccessApprovalSettingCall), [*get service account*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderGetServiceAccountCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::FolderUpdateAccessApprovalSettingCall) * organizations - * [*approval requests approve*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationApprovalRequestGetCall), [*approval requests invalidate*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationApprovalRequestInvalidateCall), [*approval requests list*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationGetAccessApprovalSettingCall), [*get service account*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationGetServiceAccountCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::OrganizationUpdateAccessApprovalSettingCall) + * [*approval requests approve*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationApprovalRequestGetCall), [*approval requests invalidate*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationApprovalRequestInvalidateCall), [*approval requests list*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationGetAccessApprovalSettingCall), [*get service account*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationGetServiceAccountCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::OrganizationUpdateAccessApprovalSettingCall) * projects - * [*approval requests approve*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectApprovalRequestGetCall), [*approval requests invalidate*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectApprovalRequestInvalidateCall), [*approval requests list*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectGetAccessApprovalSettingCall), [*get service account*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectGetServiceAccountCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/api::ProjectUpdateAccessApprovalSettingCall) + * [*approval requests approve*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectApprovalRequestGetCall), [*approval requests invalidate*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectApprovalRequestInvalidateCall), [*approval requests list*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectGetAccessApprovalSettingCall), [*get service account*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectGetServiceAccountCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/api::ProjectUpdateAccessApprovalSettingCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/AccessApproval)** +* **[Hub](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/AccessApproval)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::CallBuilder) -* **[Resources](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::CallBuilder) +* **[Resources](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Part)** + * **[Parts](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Delegate) to the -[Method Builder](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Delegate) to the +[Method Builder](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::RequestValue) and -[decodable](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::RequestValue) and +[decodable](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-accessapproval1/5.0.2-beta-1+20230120/google_accessapproval1/client::RequestValue) are moved +* [request values](https://docs.rs/google-accessapproval1/5.0.2+20230120/google_accessapproval1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/accessapproval1/src/api.rs b/gen/accessapproval1/src/api.rs index 0f618075bb..e44086cb80 100644 --- a/gen/accessapproval1/src/api.rs +++ b/gen/accessapproval1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> AccessApproval { AccessApproval { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://accessapproval.googleapis.com/".to_string(), _root_url: "https://accessapproval.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> AccessApproval { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/accessapproval1/src/client.rs b/gen/accessapproval1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/accessapproval1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/accessapproval1/src/lib.rs b/gen/accessapproval1/src/lib.rs index f24f09b615..ade82f7b57 100644 --- a/gen/accessapproval1/src/lib.rs +++ b/gen/accessapproval1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Access Approval* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *accessapproval:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Access Approval* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *accessapproval:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Access Approval* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/cloud-provider-access-management/access-approval/docs). diff --git a/gen/accessapproval1_beta1-cli/Cargo.toml b/gen/accessapproval1_beta1-cli/Cargo.toml index d3e2d467e8..f944265cd3 100644 --- a/gen/accessapproval1_beta1-cli/Cargo.toml +++ b/gen/accessapproval1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-accessapproval1_beta1-cli" -version = "4.0.1+20200708" +version = "5.0.2+20200708" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Approval (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accessapproval1_beta1-cli" @@ -20,13 +20,13 @@ name = "accessapproval1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-accessapproval1_beta1] path = "../accessapproval1_beta1" -version = "4.0.1+20200708" +version = "5.0.2+20200708" + diff --git a/gen/accessapproval1_beta1-cli/README.md b/gen/accessapproval1_beta1-cli/README.md index d80ebd4704..dc3d8534a0 100644 --- a/gen/accessapproval1_beta1-cli/README.md +++ b/gen/accessapproval1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Access Approval* API at revision *20200708*. The CLI is at version *4.0.1*. +This documentation was generated from the *Access Approval* API at revision *20200708*. The CLI is at version *5.0.2*. ```bash accessapproval1-beta1 [options] diff --git a/gen/accessapproval1_beta1-cli/mkdocs.yml b/gen/accessapproval1_beta1-cli/mkdocs.yml index 14d2dc6945..b4b3b02b06 100644 --- a/gen/accessapproval1_beta1-cli/mkdocs.yml +++ b/gen/accessapproval1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Access Approval v4.0.1+20200708 +site_name: Access Approval v5.0.2+20200708 site_url: http://byron.github.io/google-apis-rs/google-accessapproval1_beta1-cli site_description: A complete library to interact with Access Approval (protocol v1beta1) @@ -7,29 +7,32 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/accessapproval1_ docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_approval-requests-approve.md', 'Folders', 'Approval Requests Approve'] -- ['folders_approval-requests-dismiss.md', 'Folders', 'Approval Requests Dismiss'] -- ['folders_approval-requests-get.md', 'Folders', 'Approval Requests Get'] -- ['folders_approval-requests-list.md', 'Folders', 'Approval Requests List'] -- ['folders_delete-access-approval-settings.md', 'Folders', 'Delete Access Approval Settings'] -- ['folders_get-access-approval-settings.md', 'Folders', 'Get Access Approval Settings'] -- ['folders_update-access-approval-settings.md', 'Folders', 'Update Access Approval Settings'] -- ['organizations_approval-requests-approve.md', 'Organizations', 'Approval Requests Approve'] -- ['organizations_approval-requests-dismiss.md', 'Organizations', 'Approval Requests Dismiss'] -- ['organizations_approval-requests-get.md', 'Organizations', 'Approval Requests Get'] -- ['organizations_approval-requests-list.md', 'Organizations', 'Approval Requests List'] -- ['organizations_delete-access-approval-settings.md', 'Organizations', 'Delete Access Approval Settings'] -- ['organizations_get-access-approval-settings.md', 'Organizations', 'Get Access Approval Settings'] -- ['organizations_update-access-approval-settings.md', 'Organizations', 'Update Access Approval Settings'] -- ['projects_approval-requests-approve.md', 'Projects', 'Approval Requests Approve'] -- ['projects_approval-requests-dismiss.md', 'Projects', 'Approval Requests Dismiss'] -- ['projects_approval-requests-get.md', 'Projects', 'Approval Requests Get'] -- ['projects_approval-requests-list.md', 'Projects', 'Approval Requests List'] -- ['projects_delete-access-approval-settings.md', 'Projects', 'Delete Access Approval Settings'] -- ['projects_get-access-approval-settings.md', 'Projects', 'Get Access Approval Settings'] -- ['projects_update-access-approval-settings.md', 'Projects', 'Update Access Approval Settings'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Approval Requests Approve': 'folders_approval-requests-approve.md' + - 'Approval Requests Dismiss': 'folders_approval-requests-dismiss.md' + - 'Approval Requests Get': 'folders_approval-requests-get.md' + - 'Approval Requests List': 'folders_approval-requests-list.md' + - 'Delete Access Approval Settings': 'folders_delete-access-approval-settings.md' + - 'Get Access Approval Settings': 'folders_get-access-approval-settings.md' + - 'Update Access Approval Settings': 'folders_update-access-approval-settings.md' +- 'Organizations': + - 'Approval Requests Approve': 'organizations_approval-requests-approve.md' + - 'Approval Requests Dismiss': 'organizations_approval-requests-dismiss.md' + - 'Approval Requests Get': 'organizations_approval-requests-get.md' + - 'Approval Requests List': 'organizations_approval-requests-list.md' + - 'Delete Access Approval Settings': 'organizations_delete-access-approval-settings.md' + - 'Get Access Approval Settings': 'organizations_get-access-approval-settings.md' + - 'Update Access Approval Settings': 'organizations_update-access-approval-settings.md' +- 'Projects': + - 'Approval Requests Approve': 'projects_approval-requests-approve.md' + - 'Approval Requests Dismiss': 'projects_approval-requests-dismiss.md' + - 'Approval Requests Get': 'projects_approval-requests-get.md' + - 'Approval Requests List': 'projects_approval-requests-list.md' + - 'Delete Access Approval Settings': 'projects_delete-access-approval-settings.md' + - 'Get Access Approval Settings': 'projects_get-access-approval-settings.md' + - 'Update Access Approval Settings': 'projects_update-access-approval-settings.md' theme: readthedocs diff --git a/gen/accessapproval1_beta1-cli/src/client.rs b/gen/accessapproval1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/accessapproval1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/accessapproval1_beta1-cli/src/main.rs b/gen/accessapproval1_beta1-cli/src/main.rs index b5447b9d23..291a523f98 100644 --- a/gen/accessapproval1_beta1-cli/src/main.rs +++ b/gen/accessapproval1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_accessapproval1_beta1::{api, Error, oauth2}; +use google_accessapproval1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -282,7 +281,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -480,7 +479,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -760,7 +759,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -958,7 +957,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1238,7 +1237,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1436,7 +1435,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2257,7 +2256,7 @@ async fn main() { let mut app = App::new("accessapproval1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20200708") + .version("5.0.2+20200708") .about("An API for controlling access to data by Google personnel.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_accessapproval1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/accessapproval1_beta1/Cargo.toml b/gen/accessapproval1_beta1/Cargo.toml index 63611a0f61..4ed854cb40 100644 --- a/gen/accessapproval1_beta1/Cargo.toml +++ b/gen/accessapproval1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-accessapproval1_beta1" -version = "5.0.2-beta-1+20200708" +version = "5.0.2+20200708" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Approval (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accessapproval1_beta1" homepage = "https://cloud.google.com/access-approval/docs" -documentation = "https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708" +documentation = "https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708" license = "MIT" keywords = ["accessapproval", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/accessapproval1_beta1/README.md b/gen/accessapproval1_beta1/README.md index db2d442d4e..8d7d24bf82 100644 --- a/gen/accessapproval1_beta1/README.md +++ b/gen/accessapproval1_beta1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-accessapproval1_beta1` library allows access to all features of the *Google Access Approval* service. -This documentation was generated from *Access Approval* crate version *5.0.2-beta-1+20200708*, where *20200708* is the exact revision of the *accessapproval:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Access Approval* crate version *5.0.2+20200708*, where *20200708* is the exact revision of the *accessapproval:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Access Approval* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/access-approval/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/AccessApproval) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/AccessApproval) ... * folders - * [*approval requests approve*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestGetCall), [*approval requests list*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::FolderDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::FolderGetAccessApprovalSettingCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::FolderUpdateAccessApprovalSettingCall) + * [*approval requests approve*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestGetCall), [*approval requests list*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::FolderApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::FolderDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::FolderGetAccessApprovalSettingCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::FolderUpdateAccessApprovalSettingCall) * organizations - * [*approval requests approve*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestGetCall), [*approval requests list*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::OrganizationDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::OrganizationGetAccessApprovalSettingCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::OrganizationUpdateAccessApprovalSettingCall) + * [*approval requests approve*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestGetCall), [*approval requests list*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::OrganizationApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::OrganizationDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::OrganizationGetAccessApprovalSettingCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::OrganizationUpdateAccessApprovalSettingCall) * projects - * [*approval requests approve*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestGetCall), [*approval requests list*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::ProjectDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::ProjectGetAccessApprovalSettingCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/api::ProjectUpdateAccessApprovalSettingCall) + * [*approval requests approve*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestApproveCall), [*approval requests dismiss*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestDismisCall), [*approval requests get*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestGetCall), [*approval requests list*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::ProjectApprovalRequestListCall), [*delete access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::ProjectDeleteAccessApprovalSettingCall), [*get access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::ProjectGetAccessApprovalSettingCall) and [*update access approval settings*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/api::ProjectUpdateAccessApprovalSettingCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/AccessApproval)** +* **[Hub](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/AccessApproval)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -136,17 +136,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -156,29 +156,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-accessapproval1_beta1/5.0.2-beta-1+20200708/google_accessapproval1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-accessapproval1_beta1/5.0.2+20200708/google_accessapproval1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/accessapproval1_beta1/src/api.rs b/gen/accessapproval1_beta1/src/api.rs index 8f5abc5d72..850d79744f 100644 --- a/gen/accessapproval1_beta1/src/api.rs +++ b/gen/accessapproval1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> AccessApproval { AccessApproval { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://accessapproval.googleapis.com/".to_string(), _root_url: "https://accessapproval.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> AccessApproval { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/accessapproval1_beta1/src/client.rs b/gen/accessapproval1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/accessapproval1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/accessapproval1_beta1/src/lib.rs b/gen/accessapproval1_beta1/src/lib.rs index c331544817..bd222045c3 100644 --- a/gen/accessapproval1_beta1/src/lib.rs +++ b/gen/accessapproval1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Access Approval* crate version *5.0.2-beta-1+20200708*, where *20200708* is the exact revision of the *accessapproval:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Access Approval* crate version *5.0.2+20200708*, where *20200708* is the exact revision of the *accessapproval:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Access Approval* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/access-approval/docs). diff --git a/gen/accesscontextmanager1-cli/Cargo.toml b/gen/accesscontextmanager1-cli/Cargo.toml index 558f5cbfb4..b444ca4136 100644 --- a/gen/accesscontextmanager1-cli/Cargo.toml +++ b/gen/accesscontextmanager1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-accesscontextmanager1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Context Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accesscontextmanager1-cli" @@ -20,13 +20,13 @@ name = "accesscontextmanager1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-accesscontextmanager1] path = "../accesscontextmanager1" -version = "4.0.1+20220301" +version = "5.0.2+20230123" + diff --git a/gen/accesscontextmanager1-cli/README.md b/gen/accesscontextmanager1-cli/README.md index 4e8117d015..8269a11ea1 100644 --- a/gen/accesscontextmanager1-cli/README.md +++ b/gen/accesscontextmanager1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Access Context Manager* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Access Context Manager* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash accesscontextmanager1 [options] @@ -37,6 +37,11 @@ accesscontextmanager1 [options] access-levels-patch (-r )... [-p ]... [-o ] access-levels-replace-all (-r )... [-p ]... [-o ] access-levels-test-iam-permissions (-r )... [-p ]... [-o ] + authorized-orgs-descs-create (-r )... [-p ]... [-o ] + authorized-orgs-descs-delete [-p ]... [-o ] + authorized-orgs-descs-get [-p ]... [-o ] + authorized-orgs-descs-list [-p ]... [-o ] + authorized-orgs-descs-patch (-r )... [-p ]... [-o ] create (-r )... [-p ]... [-o ] delete [-p ]... [-o ] get [-p ]... [-o ] diff --git a/gen/accesscontextmanager1-cli/mkdocs.yml b/gen/accesscontextmanager1-cli/mkdocs.yml index ddb0c091f6..c2460d20a8 100644 --- a/gen/accesscontextmanager1-cli/mkdocs.yml +++ b/gen/accesscontextmanager1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Access Context Manager v4.0.1+20220301 +site_name: Access Context Manager v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-accesscontextmanager1-cli site_description: A complete library to interact with Access Context Manager (protocol v1) @@ -7,40 +7,48 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/accesscontextman docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['access-policies_access-levels-create.md', 'Access Policies', 'Access Levels Create'] -- ['access-policies_access-levels-delete.md', 'Access Policies', 'Access Levels Delete'] -- ['access-policies_access-levels-get.md', 'Access Policies', 'Access Levels Get'] -- ['access-policies_access-levels-list.md', 'Access Policies', 'Access Levels List'] -- ['access-policies_access-levels-patch.md', 'Access Policies', 'Access Levels Patch'] -- ['access-policies_access-levels-replace-all.md', 'Access Policies', 'Access Levels Replace All'] -- ['access-policies_access-levels-test-iam-permissions.md', 'Access Policies', 'Access Levels Test Iam Permissions'] -- ['access-policies_create.md', 'Access Policies', 'Create'] -- ['access-policies_delete.md', 'Access Policies', 'Delete'] -- ['access-policies_get.md', 'Access Policies', 'Get'] -- ['access-policies_get-iam-policy.md', 'Access Policies', 'Get Iam Policy'] -- ['access-policies_list.md', 'Access Policies', 'List'] -- ['access-policies_patch.md', 'Access Policies', 'Patch'] -- ['access-policies_service-perimeters-commit.md', 'Access Policies', 'Service Perimeters Commit'] -- ['access-policies_service-perimeters-create.md', 'Access Policies', 'Service Perimeters Create'] -- ['access-policies_service-perimeters-delete.md', 'Access Policies', 'Service Perimeters Delete'] -- ['access-policies_service-perimeters-get.md', 'Access Policies', 'Service Perimeters Get'] -- ['access-policies_service-perimeters-list.md', 'Access Policies', 'Service Perimeters List'] -- ['access-policies_service-perimeters-patch.md', 'Access Policies', 'Service Perimeters Patch'] -- ['access-policies_service-perimeters-replace-all.md', 'Access Policies', 'Service Perimeters Replace All'] -- ['access-policies_service-perimeters-test-iam-permissions.md', 'Access Policies', 'Service Perimeters Test Iam Permissions'] -- ['access-policies_set-iam-policy.md', 'Access Policies', 'Set Iam Policy'] -- ['access-policies_test-iam-permissions.md', 'Access Policies', 'Test Iam Permissions'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['organizations_gcp-user-access-bindings-create.md', 'Organizations', 'Gcp User Access Bindings Create'] -- ['organizations_gcp-user-access-bindings-delete.md', 'Organizations', 'Gcp User Access Bindings Delete'] -- ['organizations_gcp-user-access-bindings-get.md', 'Organizations', 'Gcp User Access Bindings Get'] -- ['organizations_gcp-user-access-bindings-list.md', 'Organizations', 'Gcp User Access Bindings List'] -- ['organizations_gcp-user-access-bindings-patch.md', 'Organizations', 'Gcp User Access Bindings Patch'] +nav: +- Home: 'index.md' +- 'Access Policies': + - 'Access Levels Create': 'access-policies_access-levels-create.md' + - 'Access Levels Delete': 'access-policies_access-levels-delete.md' + - 'Access Levels Get': 'access-policies_access-levels-get.md' + - 'Access Levels List': 'access-policies_access-levels-list.md' + - 'Access Levels Patch': 'access-policies_access-levels-patch.md' + - 'Access Levels Replace All': 'access-policies_access-levels-replace-all.md' + - 'Access Levels Test Iam Permissions': 'access-policies_access-levels-test-iam-permissions.md' + - 'Authorized Orgs Descs Create': 'access-policies_authorized-orgs-descs-create.md' + - 'Authorized Orgs Descs Delete': 'access-policies_authorized-orgs-descs-delete.md' + - 'Authorized Orgs Descs Get': 'access-policies_authorized-orgs-descs-get.md' + - 'Authorized Orgs Descs List': 'access-policies_authorized-orgs-descs-list.md' + - 'Authorized Orgs Descs Patch': 'access-policies_authorized-orgs-descs-patch.md' + - 'Create': 'access-policies_create.md' + - 'Delete': 'access-policies_delete.md' + - 'Get': 'access-policies_get.md' + - 'Get Iam Policy': 'access-policies_get-iam-policy.md' + - 'List': 'access-policies_list.md' + - 'Patch': 'access-policies_patch.md' + - 'Service Perimeters Commit': 'access-policies_service-perimeters-commit.md' + - 'Service Perimeters Create': 'access-policies_service-perimeters-create.md' + - 'Service Perimeters Delete': 'access-policies_service-perimeters-delete.md' + - 'Service Perimeters Get': 'access-policies_service-perimeters-get.md' + - 'Service Perimeters List': 'access-policies_service-perimeters-list.md' + - 'Service Perimeters Patch': 'access-policies_service-perimeters-patch.md' + - 'Service Perimeters Replace All': 'access-policies_service-perimeters-replace-all.md' + - 'Service Perimeters Test Iam Permissions': 'access-policies_service-perimeters-test-iam-permissions.md' + - 'Set Iam Policy': 'access-policies_set-iam-policy.md' + - 'Test Iam Permissions': 'access-policies_test-iam-permissions.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Organizations': + - 'Gcp User Access Bindings Create': 'organizations_gcp-user-access-bindings-create.md' + - 'Gcp User Access Bindings Delete': 'organizations_gcp-user-access-bindings-delete.md' + - 'Gcp User Access Bindings Get': 'organizations_gcp-user-access-bindings-get.md' + - 'Gcp User Access Bindings List': 'organizations_gcp-user-access-bindings-list.md' + - 'Gcp User Access Bindings Patch': 'organizations_gcp-user-access-bindings-patch.md' theme: readthedocs diff --git a/gen/accesscontextmanager1-cli/src/client.rs b/gen/accesscontextmanager1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/accesscontextmanager1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/accesscontextmanager1-cli/src/main.rs b/gen/accesscontextmanager1-cli/src/main.rs index d517ebed73..a53dff4269 100644 --- a/gen/accesscontextmanager1-cli/src/main.rs +++ b/gen/accesscontextmanager1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_accesscontextmanager1::{api, Error, oauth2}; +use google_accesscontextmanager1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -261,7 +260,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "access-level-format" => { call = call.access_level_format(value.unwrap_or("")); @@ -360,7 +359,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -579,6 +578,351 @@ where } } + async fn _access_policies_authorized_orgs_descs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "asset-type" => Some(("assetType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "authorization-direction" => Some(("authorizationDirection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "authorization-type" => Some(("authorizationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "orgs" => Some(("orgs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["asset-type", "authorization-direction", "authorization-type", "name", "orgs"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::AuthorizedOrgsDesc = json::value::from_value(object).unwrap(); + let mut call = self.hub.access_policies().authorized_orgs_descs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _access_policies_authorized_orgs_descs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.access_policies().authorized_orgs_descs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _access_policies_authorized_orgs_descs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.access_policies().authorized_orgs_descs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _access_policies_authorized_orgs_descs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.access_policies().authorized_orgs_descs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _access_policies_authorized_orgs_descs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "asset-type" => Some(("assetType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "authorization-direction" => Some(("authorizationDirection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "authorization-type" => Some(("authorizationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "orgs" => Some(("orgs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["asset-type", "authorization-direction", "authorization-type", "name", "orgs"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::AuthorizedOrgsDesc = json::value::from_value(object).unwrap(); + let mut call = self.hub.access_policies().authorized_orgs_descs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _access_policies_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -870,7 +1214,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -963,7 +1307,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1310,7 +1654,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1413,7 +1757,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2002,7 +2346,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2255,7 +2599,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2346,7 +2690,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2423,6 +2767,21 @@ where ("access-levels-test-iam-permissions", Some(opt)) => { call_result = self._access_policies_access_levels_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("authorized-orgs-descs-create", Some(opt)) => { + call_result = self._access_policies_authorized_orgs_descs_create(opt, dry_run, &mut err).await; + }, + ("authorized-orgs-descs-delete", Some(opt)) => { + call_result = self._access_policies_authorized_orgs_descs_delete(opt, dry_run, &mut err).await; + }, + ("authorized-orgs-descs-get", Some(opt)) => { + call_result = self._access_policies_authorized_orgs_descs_get(opt, dry_run, &mut err).await; + }, + ("authorized-orgs-descs-list", Some(opt)) => { + call_result = self._access_policies_authorized_orgs_descs_list(opt, dry_run, &mut err).await; + }, + ("authorized-orgs-descs-patch", Some(opt)) => { + call_result = self._access_policies_authorized_orgs_descs_patch(opt, dry_run, &mut err).await; + }, ("create", Some(opt)) => { call_result = self._access_policies_create(opt, dry_run, &mut err).await; }, @@ -2593,7 +2952,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("access-policies", "methods: 'access-levels-create', 'access-levels-delete', 'access-levels-get', 'access-levels-list', 'access-levels-patch', 'access-levels-replace-all', 'access-levels-test-iam-permissions', 'create', 'delete', 'get', 'get-iam-policy', 'list', 'patch', 'service-perimeters-commit', 'service-perimeters-create', 'service-perimeters-delete', 'service-perimeters-get', 'service-perimeters-list', 'service-perimeters-patch', 'service-perimeters-replace-all', 'service-perimeters-test-iam-permissions', 'set-iam-policy' and 'test-iam-permissions'", vec![ + ("access-policies", "methods: 'access-levels-create', 'access-levels-delete', 'access-levels-get', 'access-levels-list', 'access-levels-patch', 'access-levels-replace-all', 'access-levels-test-iam-permissions', 'authorized-orgs-descs-create', 'authorized-orgs-descs-delete', 'authorized-orgs-descs-get', 'authorized-orgs-descs-list', 'authorized-orgs-descs-patch', 'create', 'delete', 'get', 'get-iam-policy', 'list', 'patch', 'service-perimeters-commit', 'service-perimeters-create', 'service-perimeters-delete', 'service-perimeters-get', 'service-perimeters-list', 'service-perimeters-patch', 'service-perimeters-replace-all', 'service-perimeters-test-iam-permissions', 'set-iam-policy' and 'test-iam-permissions'", vec![ ("access-levels-create", Some(r##"Creates an access level. The long-running operation from this RPC has a successful status after the access level propagates to long-lasting storage. If access levels contain errors, an error response is returned for the first error encountered."##), "Details at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_cli/access-policies_access-levels-create", @@ -2694,7 +3053,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Resource name for the Access Level. The `short_name` component must begin with a letter and only include alphanumeric and '_'. Format: `accessPolicies/{access_policy}/accessLevels/{access_level}`. The maximum length of the `access_level` component is 50 characters."##), + Some(r##"Resource name for the `AccessLevel`. Format: `accessPolicies/{access_policy}/accessLevels/{access_level}`. The `access_level` component must begin with a letter, followed by alphanumeric characters or `_`. Its maximum length is 50 characters. After you create an `AccessLevel`, you cannot change its `name`."##), Some(true), Some(false)), @@ -2750,7 +3109,129 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("authorized-orgs-descs-create", + Some(r##"Creates a authorized orgs desc. The long-running operation from this RPC has a successful status after the authorized orgs desc propagates to long-lasting storage. If a authorized orgs desc contains errors, an error response is returned for the first error encountered. The name of this `AuthorizedOrgsDesc` will be assigned during creation."##), + "Details at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_cli/access-policies_authorized-orgs-descs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Resource name for the access policy which owns this Authorized Orgs Desc. Format: `accessPolicies/{policy_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("authorized-orgs-descs-delete", + Some(r##"Deletes a authorized orgs desc based on the resource name. The long-running operation from this RPC has a successful status after the authorized orgs desc is removed from long-lasting storage."##), + "Details at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_cli/access-policies_authorized-orgs-descs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Resource name for the Authorized Orgs Desc. Format: `accessPolicies/{policy_id}/authorizedOrgsDesc/{authorized_orgs_desc_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("authorized-orgs-descs-get", + Some(r##"Gets a authorized orgs desc based on the resource name."##), + "Details at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_cli/access-policies_authorized-orgs-descs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Resource name for the Authorized Orgs Desc. Format: `accessPolicies/{policy_id}/authorizedOrgsDescs/{authorized_orgs_descs_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("authorized-orgs-descs-list", + Some(r##"Lists all authorized orgs descs for an access policy."##), + "Details at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_cli/access-policies_authorized-orgs-descs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Resource name for the access policy to list Authorized Orgs Desc from. Format: `accessPolicies/{policy_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("authorized-orgs-descs-patch", + Some(r##"Updates a authorized orgs desc. The long-running operation from this RPC has a successful status after the authorized orgs desc propagates to long-lasting storage. If a authorized orgs desc contains errors, an error response is returned for the first error encountered. Only the organization list in `AuthorizedOrgsDesc` can be updated. The name, authorization_type, asset_type and authorization_direction cannot be updated."##), + "Details at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_cli/access-policies_authorized-orgs-descs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Assigned by the server during creation. The last segment has an arbitrary length and has only URI unreserved characters (as defined by [RFC 3986 Section 2.3](https://tools.ietf.org/html/rfc3986#section-2.3)). Should not be specified by the client during creation. Example: "accessPolicies/122256/authorizedOrgs/b3-BhcX_Ud5N""##), Some(true), Some(false)), @@ -2844,7 +3325,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3038,7 +3519,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Resource name for the ServicePerimeter. The `short_name` component must begin with a letter and only include alphanumeric and '_'. Format: `accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}`"##), + Some(r##"Resource name for the `ServicePerimeter`. Format: `accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}`. The `service_perimeter` component must begin with a letter, followed by alphanumeric characters or `_`. After you create a `ServicePerimeter`, you cannot change its `name`."##), Some(true), Some(false)), @@ -3094,7 +3575,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3122,7 +3603,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3150,7 +3631,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3400,7 +3881,7 @@ async fn main() { let mut app = App::new("accesscontextmanager1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230123") .about("An API for setting attribute based access control to requests to GCP services.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_cli") .arg(Arg::with_name("url") diff --git a/gen/accesscontextmanager1/Cargo.toml b/gen/accesscontextmanager1/Cargo.toml index 250f1a1928..826e97af16 100644 --- a/gen/accesscontextmanager1/Cargo.toml +++ b/gen/accesscontextmanager1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-accesscontextmanager1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Context Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accesscontextmanager1" homepage = "https://cloud.google.com/access-context-manager/docs/reference/rest/" -documentation = "https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-accesscontextmanager1/5.0.2+20230123" license = "MIT" keywords = ["accesscontextmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/accesscontextmanager1/README.md b/gen/accesscontextmanager1/README.md index a55cab4093..d685457f13 100644 --- a/gen/accesscontextmanager1/README.md +++ b/gen/accesscontextmanager1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-accesscontextmanager1` library allows access to all features of the *Google Access Context Manager* service. -This documentation was generated from *Access Context Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Access Context Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Access Context Manager* *v1* API can be found at the [official documentation site](https://cloud.google.com/access-context-manager/docs/reference/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/AccessContextManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/AccessContextManager) ... -* [access policies](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicy) - * [*access levels create*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelCreateCall), [*access levels delete*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelDeleteCall), [*access levels get*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelGetCall), [*access levels list*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelListCall), [*access levels patch*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelPatchCall), [*access levels replace all*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelReplaceAllCall), [*access levels test iam permissions*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelTestIamPermissionCall), [*authorized orgs descs create*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescCreateCall), [*authorized orgs descs delete*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescDeleteCall), [*authorized orgs descs get*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescGetCall), [*authorized orgs descs list*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescListCall), [*authorized orgs descs patch*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescPatchCall), [*create*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyCreateCall), [*delete*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyDeleteCall), [*get*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyGetCall), [*get iam policy*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyGetIamPolicyCall), [*list*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyListCall), [*patch*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyPatchCall), [*service perimeters commit*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterCommitCall), [*service perimeters create*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterCreateCall), [*service perimeters delete*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterDeleteCall), [*service perimeters get*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterGetCall), [*service perimeters list*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterListCall), [*service perimeters patch*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterPatchCall), [*service perimeters replace all*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterReplaceAllCall), [*service perimeters test iam permissions*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterTestIamPermissionCall), [*set iam policy*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::AccessPolicyTestIamPermissionCall) -* [operations](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::Operation) - * [*cancel*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OperationCancelCall), [*delete*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OperationDeleteCall), [*get*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OperationGetCall) and [*list*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OperationListCall) +* [access policies](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicy) + * [*access levels create*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelCreateCall), [*access levels delete*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelDeleteCall), [*access levels get*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelGetCall), [*access levels list*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelListCall), [*access levels patch*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelPatchCall), [*access levels replace all*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelReplaceAllCall), [*access levels test iam permissions*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAccessLevelTestIamPermissionCall), [*authorized orgs descs create*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescCreateCall), [*authorized orgs descs delete*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescDeleteCall), [*authorized orgs descs get*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescGetCall), [*authorized orgs descs list*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescListCall), [*authorized orgs descs patch*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyAuthorizedOrgsDescPatchCall), [*create*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyCreateCall), [*delete*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyDeleteCall), [*get*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyGetCall), [*get iam policy*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyGetIamPolicyCall), [*list*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyListCall), [*patch*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyPatchCall), [*service perimeters commit*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterCommitCall), [*service perimeters create*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterCreateCall), [*service perimeters delete*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterDeleteCall), [*service perimeters get*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterGetCall), [*service perimeters list*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterListCall), [*service perimeters patch*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterPatchCall), [*service perimeters replace all*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterReplaceAllCall), [*service perimeters test iam permissions*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyServicePerimeterTestIamPermissionCall), [*set iam policy*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::AccessPolicyTestIamPermissionCall) +* [operations](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::Operation) + * [*cancel*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OperationCancelCall), [*delete*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OperationDeleteCall), [*get*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OperationGetCall) and [*list*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OperationListCall) * organizations - * [*gcp user access bindings create*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingCreateCall), [*gcp user access bindings delete*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingDeleteCall), [*gcp user access bindings get*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingGetCall), [*gcp user access bindings list*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingListCall) and [*gcp user access bindings patch*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingPatchCall) + * [*gcp user access bindings create*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingCreateCall), [*gcp user access bindings delete*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingDeleteCall), [*gcp user access bindings get*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingGetCall), [*gcp user access bindings list*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingListCall) and [*gcp user access bindings patch*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/api::OrganizationGcpUserAccessBindingPatchCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/AccessContextManager)** +* **[Hub](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/AccessContextManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::CallBuilder) -* **[Resources](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::CallBuilder) +* **[Resources](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Part)** + * **[Parts](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -146,17 +146,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -166,29 +166,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Delegate) to the -[Method Builder](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Delegate) to the +[Method Builder](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::RequestValue) and -[decodable](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::RequestValue) and +[decodable](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-accesscontextmanager1/5.0.2-beta-1+20230123/google_accesscontextmanager1/client::RequestValue) are moved +* [request values](https://docs.rs/google-accesscontextmanager1/5.0.2+20230123/google_accesscontextmanager1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/accesscontextmanager1/src/api.rs b/gen/accesscontextmanager1/src/api.rs index 5783f05efd..98f674fe51 100644 --- a/gen/accesscontextmanager1/src/api.rs +++ b/gen/accesscontextmanager1/src/api.rs @@ -122,7 +122,7 @@ impl<'a, S> AccessContextManager { AccessContextManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://accesscontextmanager.googleapis.com/".to_string(), _root_url: "https://accesscontextmanager.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> AccessContextManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/accesscontextmanager1/src/client.rs b/gen/accesscontextmanager1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/accesscontextmanager1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/accesscontextmanager1/src/lib.rs b/gen/accesscontextmanager1/src/lib.rs index 0b9496063a..220c745557 100644 --- a/gen/accesscontextmanager1/src/lib.rs +++ b/gen/accesscontextmanager1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Access Context Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Access Context Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Access Context Manager* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/access-context-manager/docs/reference/rest/). diff --git a/gen/accesscontextmanager1_beta-cli/Cargo.toml b/gen/accesscontextmanager1_beta-cli/Cargo.toml index 816a7afa77..981793d6fb 100644 --- a/gen/accesscontextmanager1_beta-cli/Cargo.toml +++ b/gen/accesscontextmanager1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-accesscontextmanager1_beta-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Context Manager (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accesscontextmanager1_beta-cli" @@ -20,13 +20,13 @@ name = "accesscontextmanager1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-accesscontextmanager1_beta] path = "../accesscontextmanager1_beta" -version = "4.0.1+20220301" +version = "5.0.2+20230123" + diff --git a/gen/accesscontextmanager1_beta-cli/README.md b/gen/accesscontextmanager1_beta-cli/README.md index d46669f213..37cc8655ab 100644 --- a/gen/accesscontextmanager1_beta-cli/README.md +++ b/gen/accesscontextmanager1_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Access Context Manager* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Access Context Manager* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash accesscontextmanager1-beta [options] diff --git a/gen/accesscontextmanager1_beta-cli/mkdocs.yml b/gen/accesscontextmanager1_beta-cli/mkdocs.yml index e795a419b6..a37843db7f 100644 --- a/gen/accesscontextmanager1_beta-cli/mkdocs.yml +++ b/gen/accesscontextmanager1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Access Context Manager v4.0.1+20220301 +site_name: Access Context Manager v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-accesscontextmanager1_beta-cli site_description: A complete library to interact with Access Context Manager (protocol v1beta) @@ -7,24 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/accesscontextman docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['access-policies_access-levels-create.md', 'Access Policies', 'Access Levels Create'] -- ['access-policies_access-levels-delete.md', 'Access Policies', 'Access Levels Delete'] -- ['access-policies_access-levels-get.md', 'Access Policies', 'Access Levels Get'] -- ['access-policies_access-levels-list.md', 'Access Policies', 'Access Levels List'] -- ['access-policies_access-levels-patch.md', 'Access Policies', 'Access Levels Patch'] -- ['access-policies_create.md', 'Access Policies', 'Create'] -- ['access-policies_delete.md', 'Access Policies', 'Delete'] -- ['access-policies_get.md', 'Access Policies', 'Get'] -- ['access-policies_list.md', 'Access Policies', 'List'] -- ['access-policies_patch.md', 'Access Policies', 'Patch'] -- ['access-policies_service-perimeters-create.md', 'Access Policies', 'Service Perimeters Create'] -- ['access-policies_service-perimeters-delete.md', 'Access Policies', 'Service Perimeters Delete'] -- ['access-policies_service-perimeters-get.md', 'Access Policies', 'Service Perimeters Get'] -- ['access-policies_service-perimeters-list.md', 'Access Policies', 'Service Perimeters List'] -- ['access-policies_service-perimeters-patch.md', 'Access Policies', 'Service Perimeters Patch'] -- ['operations_get.md', 'Operations', 'Get'] +nav: +- Home: 'index.md' +- 'Access Policies': + - 'Access Levels Create': 'access-policies_access-levels-create.md' + - 'Access Levels Delete': 'access-policies_access-levels-delete.md' + - 'Access Levels Get': 'access-policies_access-levels-get.md' + - 'Access Levels List': 'access-policies_access-levels-list.md' + - 'Access Levels Patch': 'access-policies_access-levels-patch.md' + - 'Create': 'access-policies_create.md' + - 'Delete': 'access-policies_delete.md' + - 'Get': 'access-policies_get.md' + - 'List': 'access-policies_list.md' + - 'Patch': 'access-policies_patch.md' + - 'Service Perimeters Create': 'access-policies_service-perimeters-create.md' + - 'Service Perimeters Delete': 'access-policies_service-perimeters-delete.md' + - 'Service Perimeters Get': 'access-policies_service-perimeters-get.md' + - 'Service Perimeters List': 'access-policies_service-perimeters-list.md' + - 'Service Perimeters Patch': 'access-policies_service-perimeters-patch.md' +- 'Operations': + - 'Get': 'operations_get.md' theme: readthedocs diff --git a/gen/accesscontextmanager1_beta-cli/src/client.rs b/gen/accesscontextmanager1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/accesscontextmanager1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/accesscontextmanager1_beta-cli/src/main.rs b/gen/accesscontextmanager1_beta-cli/src/main.rs index 5c68f4daf2..40eaf4d103 100644 --- a/gen/accesscontextmanager1_beta-cli/src/main.rs +++ b/gen/accesscontextmanager1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_accesscontextmanager1_beta::{api, Error, oauth2}; +use google_accesscontextmanager1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -261,7 +260,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "access-level-format" => { call = call.access_level_format(value.unwrap_or("")); @@ -360,7 +359,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -613,7 +612,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -704,7 +703,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -961,7 +960,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1059,7 +1058,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1403,7 +1402,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Resource name for the Access Level. The `short_name` component must begin with a letter and only include alphanumeric and '_'. Format: `accessPolicies/{policy_id}/accessLevels/{short_name}`. The maximum length // of the `short_name` component is 50 characters."##), + Some(r##"Resource name for the `AccessLevel`. Format: `accessPolicies/{access_policy}/accessLevels/{access_level}`. The `access_level` component must begin with a letter, followed by alphanumeric characters or `_`. Its maximum length is 50 characters. After you create an `AccessLevel`, you cannot change its `name`."##), Some(true), Some(false)), @@ -1635,7 +1634,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Resource name for the ServicePerimeter. The `short_name` component must begin with a letter and only include alphanumeric and '_'. Format: `accessPolicies/{policy_id}/servicePerimeters/{short_name}`"##), + Some(r##"Resource name for the `ServicePerimeter`. Format: `accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}`. The `service_perimeter` component must begin with a letter, followed by alphanumeric characters or `_`. After you create a `ServicePerimeter`, you cannot change its `name`."##), Some(true), Some(false)), @@ -1688,7 +1687,7 @@ async fn main() { let mut app = App::new("accesscontextmanager1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230123") .about("An API for setting attribute based access control to requests to GCP services.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_accesscontextmanager1_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/accesscontextmanager1_beta/Cargo.toml b/gen/accesscontextmanager1_beta/Cargo.toml index 8a33687fe0..40e314855b 100644 --- a/gen/accesscontextmanager1_beta/Cargo.toml +++ b/gen/accesscontextmanager1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-accesscontextmanager1_beta" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Access Context Manager (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/accesscontextmanager1_beta" homepage = "https://cloud.google.com/access-context-manager/docs/reference/rest/" -documentation = "https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123" license = "MIT" keywords = ["accesscontextmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/accesscontextmanager1_beta/README.md b/gen/accesscontextmanager1_beta/README.md index da611c5aef..7066d57c46 100644 --- a/gen/accesscontextmanager1_beta/README.md +++ b/gen/accesscontextmanager1_beta/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-accesscontextmanager1_beta` library allows access to all features of the *Google Access Context Manager* service. -This documentation was generated from *Access Context Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Access Context Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Access Context Manager* *v1_beta* API can be found at the [official documentation site](https://cloud.google.com/access-context-manager/docs/reference/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/AccessContextManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/AccessContextManager) ... -* [access policies](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicy) - * [*access levels create*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelCreateCall), [*access levels delete*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelDeleteCall), [*access levels get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelGetCall), [*access levels list*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelListCall), [*access levels patch*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelPatchCall), [*create*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyCreateCall), [*delete*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyDeleteCall), [*get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyGetCall), [*list*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyListCall), [*patch*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyPatchCall), [*service perimeters create*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterCreateCall), [*service perimeters delete*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterDeleteCall), [*service perimeters get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterGetCall), [*service perimeters list*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterListCall) and [*service perimeters patch*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterPatchCall) -* [operations](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::Operation) - * [*get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/api::OperationGetCall) +* [access policies](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicy) + * [*access levels create*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelCreateCall), [*access levels delete*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelDeleteCall), [*access levels get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelGetCall), [*access levels list*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelListCall), [*access levels patch*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyAccessLevelPatchCall), [*create*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyCreateCall), [*delete*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyDeleteCall), [*get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyGetCall), [*list*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyListCall), [*patch*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyPatchCall), [*service perimeters create*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterCreateCall), [*service perimeters delete*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterDeleteCall), [*service perimeters get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterGetCall), [*service perimeters list*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterListCall) and [*service perimeters patch*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::AccessPolicyServicePerimeterPatchCall) +* [operations](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::Operation) + * [*get*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/api::OperationGetCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/AccessContextManager)** +* **[Hub](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/AccessContextManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Part)** + * **[Parts](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -136,17 +136,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -156,29 +156,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-accesscontextmanager1_beta/5.0.2-beta-1+20230123/google_accesscontextmanager1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-accesscontextmanager1_beta/5.0.2+20230123/google_accesscontextmanager1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/accesscontextmanager1_beta/src/api.rs b/gen/accesscontextmanager1_beta/src/api.rs index f507e17d41..dc1b7b6aa5 100644 --- a/gen/accesscontextmanager1_beta/src/api.rs +++ b/gen/accesscontextmanager1_beta/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> AccessContextManager { AccessContextManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://accesscontextmanager.googleapis.com/".to_string(), _root_url: "https://accesscontextmanager.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> AccessContextManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/accesscontextmanager1_beta/src/client.rs b/gen/accesscontextmanager1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/accesscontextmanager1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/accesscontextmanager1_beta/src/lib.rs b/gen/accesscontextmanager1_beta/src/lib.rs index 8e2d40fab7..c583906f30 100644 --- a/gen/accesscontextmanager1_beta/src/lib.rs +++ b/gen/accesscontextmanager1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Access Context Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Access Context Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *accesscontextmanager:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Access Context Manager* *v1_beta* API can be found at the //! [official documentation site](https://cloud.google.com/access-context-manager/docs/reference/rest/). diff --git a/gen/adexchangebuyer1d3-cli/Cargo.toml b/gen/adexchangebuyer1d3-cli/Cargo.toml index a7f7a9c6b6..fce02aee87 100644 --- a/gen/adexchangebuyer1d3-cli/Cargo.toml +++ b/gen/adexchangebuyer1d3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adexchangebuyer1d3-cli" -version = "4.0.1+20210330" +version = "5.0.2+20210330" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Exchange Buyer (protocol v1.3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer1d3-cli" @@ -20,13 +20,13 @@ name = "adexchangebuyer1d3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adexchangebuyer1d3] path = "../adexchangebuyer1d3" -version = "4.0.1+20210330" +version = "5.0.2+20210330" + diff --git a/gen/adexchangebuyer1d3-cli/README.md b/gen/adexchangebuyer1d3-cli/README.md index 1bb4bf37ec..1dd854b001 100644 --- a/gen/adexchangebuyer1d3-cli/README.md +++ b/gen/adexchangebuyer1d3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Ad Exchange Buyer* API at revision *20210330*. The CLI is at version *4.0.1*. +This documentation was generated from the *Ad Exchange Buyer* API at revision *20210330*. The CLI is at version *5.0.2*. ```bash adexchangebuyer1d3 [options] diff --git a/gen/adexchangebuyer1d3-cli/mkdocs.yml b/gen/adexchangebuyer1d3-cli/mkdocs.yml index 0ae8cfbbbb..0c9bdce3f8 100644 --- a/gen/adexchangebuyer1d3-cli/mkdocs.yml +++ b/gen/adexchangebuyer1d3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Ad Exchange Buyer v4.0.1+20210330 +site_name: Ad Exchange Buyer v5.0.2+20210330 site_url: http://byron.github.io/google-apis-rs/google-adexchangebuyer1d3-cli site_description: A complete library to interact with Ad Exchange Buyer (protocol v1.3) @@ -7,29 +7,36 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['billing-info_get.md', 'Billing Info', 'Get'] -- ['billing-info_list.md', 'Billing Info', 'List'] -- ['budget_get.md', 'Budget', 'Get'] -- ['budget_patch.md', 'Budget', 'Patch'] -- ['budget_update.md', 'Budget', 'Update'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['direct-deals_get.md', 'Direct Deals', 'Get'] -- ['direct-deals_list.md', 'Direct Deals', 'List'] -- ['performance-report_list.md', 'Performance Report', 'List'] -- ['pretargeting-config_delete.md', 'Pretargeting Config', 'Delete'] -- ['pretargeting-config_get.md', 'Pretargeting Config', 'Get'] -- ['pretargeting-config_insert.md', 'Pretargeting Config', 'Insert'] -- ['pretargeting-config_list.md', 'Pretargeting Config', 'List'] -- ['pretargeting-config_patch.md', 'Pretargeting Config', 'Patch'] -- ['pretargeting-config_update.md', 'Pretargeting Config', 'Update'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Billing Info': + - 'Get': 'billing-info_get.md' + - 'List': 'billing-info_list.md' +- 'Budget': + - 'Get': 'budget_get.md' + - 'Patch': 'budget_patch.md' + - 'Update': 'budget_update.md' +- 'Creatives': + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' +- 'Direct Deals': + - 'Get': 'direct-deals_get.md' + - 'List': 'direct-deals_list.md' +- 'Performance Report': + - 'List': 'performance-report_list.md' +- 'Pretargeting Config': + - 'Delete': 'pretargeting-config_delete.md' + - 'Get': 'pretargeting-config_get.md' + - 'Insert': 'pretargeting-config_insert.md' + - 'List': 'pretargeting-config_list.md' + - 'Patch': 'pretargeting-config_patch.md' + - 'Update': 'pretargeting-config_update.md' theme: readthedocs diff --git a/gen/adexchangebuyer1d3-cli/src/client.rs b/gen/adexchangebuyer1d3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adexchangebuyer1d3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adexchangebuyer1d3-cli/src/main.rs b/gen/adexchangebuyer1d3-cli/src/main.rs index a11bc4008a..82a4c06e28 100644 --- a/gen/adexchangebuyer1d3-cli/src/main.rs +++ b/gen/adexchangebuyer1d3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adexchangebuyer1d3::{api, Error, oauth2}; +use google_adexchangebuyer1d3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -867,13 +866,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "buyer-creative-id" => { call = call.add_buyer_creative_id(value.unwrap_or("")); }, "account-id" => { - call = call.add_account_id(arg_from_str(value.unwrap_or("-0"), err, "account-id", "integer")); + call = call.add_account_id( value.map(|v| arg_from_str(v, err, "account-id", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1036,7 +1035,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2301,7 +2300,7 @@ async fn main() { let mut app = App::new("adexchangebuyer1d3") .author("Sebastian Thiel ") - .version("4.0.1+20210330") + .version("5.0.2+20210330") .about("Accesses your bidding-account information, submits creatives for validation, finds available direct deals, and retrieves performance reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d3_cli") .arg(Arg::with_name("url") diff --git a/gen/adexchangebuyer1d3/Cargo.toml b/gen/adexchangebuyer1d3/Cargo.toml index 0aaf06ca1a..bbc2c03991 100644 --- a/gen/adexchangebuyer1d3/Cargo.toml +++ b/gen/adexchangebuyer1d3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adexchangebuyer1d3" -version = "5.0.2-beta-1+20210330" +version = "5.0.2+20210330" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Exchange Buyer (protocol v1.3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer1d3" homepage = "https://developers.google.com/ad-exchange/buyer-rest" -documentation = "https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330" +documentation = "https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330" license = "MIT" keywords = ["adexchangebuyer", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adexchangebuyer1d3/README.md b/gen/adexchangebuyer1d3/README.md index f51e34b5aa..79308b268e 100644 --- a/gen/adexchangebuyer1d3/README.md +++ b/gen/adexchangebuyer1d3/README.md @@ -5,28 +5,28 @@ DO NOT EDIT ! --> The `google-adexchangebuyer1d3` library allows access to all features of the *Google Ad Exchange Buyer* service. -This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2-beta-1+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Ad Exchange Buyer* *v1d3* API can be found at the [official documentation site](https://developers.google.com/ad-exchange/buyer-rest). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/AdExchangeBuyer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/AdExchangeBuyer) ... -* [accounts](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::Account) - * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::AccountGetCall), [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::AccountListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::AccountPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::AccountUpdateCall) -* [billing info](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::BillingInfo) - * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::BillingInfoGetCall) and [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::BillingInfoListCall) -* [budget](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::Budget) - * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::BudgetGetCall), [*patch*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::BudgetPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::BudgetUpdateCall) -* [creatives](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::Creative) - * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::CreativeGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::CreativeInsertCall) and [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::CreativeListCall) -* [direct deals](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::DirectDeal) - * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::DirectDealGetCall) and [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::DirectDealListCall) -* [performance report](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PerformanceReport) - * [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PerformanceReportListCall) -* [pretargeting config](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PretargetingConfig) - * [*delete*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PretargetingConfigDeleteCall), [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PretargetingConfigGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PretargetingConfigInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PretargetingConfigListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PretargetingConfigPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/api::PretargetingConfigUpdateCall) +* [accounts](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::Account) + * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::AccountGetCall), [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::AccountListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::AccountPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::AccountUpdateCall) +* [billing info](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::BillingInfo) + * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::BillingInfoGetCall) and [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::BillingInfoListCall) +* [budget](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::Budget) + * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::BudgetGetCall), [*patch*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::BudgetPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::BudgetUpdateCall) +* [creatives](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::Creative) + * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::CreativeGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::CreativeInsertCall) and [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::CreativeListCall) +* [direct deals](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::DirectDeal) + * [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::DirectDealGetCall) and [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::DirectDealListCall) +* [performance report](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PerformanceReport) + * [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PerformanceReportListCall) +* [pretargeting config](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PretargetingConfig) + * [*delete*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PretargetingConfigDeleteCall), [*get*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PretargetingConfigGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PretargetingConfigInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PretargetingConfigListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PretargetingConfigPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/api::PretargetingConfigUpdateCall) @@ -35,17 +35,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/AdExchangeBuyer)** +* **[Hub](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/AdExchangeBuyer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::CallBuilder) -* **[Resources](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::CallBuilder) +* **[Resources](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Part)** + * **[Parts](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Delegate) to the -[Method Builder](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Delegate) to the +[Method Builder](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::RequestValue) and -[decodable](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::RequestValue) and +[decodable](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adexchangebuyer1d3/5.0.2-beta-1+20210330/google_adexchangebuyer1d3/client::RequestValue) are moved +* [request values](https://docs.rs/google-adexchangebuyer1d3/5.0.2+20210330/google_adexchangebuyer1d3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adexchangebuyer1d3/src/api.rs b/gen/adexchangebuyer1d3/src/api.rs index 95a841a0c6..30dc3c0812 100644 --- a/gen/adexchangebuyer1d3/src/api.rs +++ b/gen/adexchangebuyer1d3/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> AdExchangeBuyer { AdExchangeBuyer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/adexchangebuyer/v1.3/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -154,7 +154,7 @@ impl<'a, S> AdExchangeBuyer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adexchangebuyer1d3/src/client.rs b/gen/adexchangebuyer1d3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adexchangebuyer1d3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adexchangebuyer1d3/src/lib.rs b/gen/adexchangebuyer1d3/src/lib.rs index 8744f40f28..f7d605da7b 100644 --- a/gen/adexchangebuyer1d3/src/lib.rs +++ b/gen/adexchangebuyer1d3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2-beta-1+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Ad Exchange Buyer* *v1d3* API can be found at the //! [official documentation site](https://developers.google.com/ad-exchange/buyer-rest). diff --git a/gen/adexchangebuyer1d4-cli/Cargo.toml b/gen/adexchangebuyer1d4-cli/Cargo.toml index 4e98c528ab..0962bb0933 100644 --- a/gen/adexchangebuyer1d4-cli/Cargo.toml +++ b/gen/adexchangebuyer1d4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adexchangebuyer1d4-cli" -version = "4.0.1+20210330" +version = "5.0.2+20210330" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Exchange Buyer (protocol v1.4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer1d4-cli" @@ -20,13 +20,13 @@ name = "adexchangebuyer1d4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adexchangebuyer1d4] path = "../adexchangebuyer1d4" -version = "4.0.1+20210330" +version = "5.0.2+20210330" + diff --git a/gen/adexchangebuyer1d4-cli/README.md b/gen/adexchangebuyer1d4-cli/README.md index 79c8803a6d..cdba70f886 100644 --- a/gen/adexchangebuyer1d4-cli/README.md +++ b/gen/adexchangebuyer1d4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Ad Exchange Buyer* API at revision *20210330*. The CLI is at version *4.0.1*. +This documentation was generated from the *Ad Exchange Buyer* API at revision *20210330*. The CLI is at version *5.0.2*. ```bash adexchangebuyer1d4 [options] diff --git a/gen/adexchangebuyer1d4-cli/mkdocs.yml b/gen/adexchangebuyer1d4-cli/mkdocs.yml index bd5a7043de..4e445403a9 100644 --- a/gen/adexchangebuyer1d4-cli/mkdocs.yml +++ b/gen/adexchangebuyer1d4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Ad Exchange Buyer v4.0.1+20210330 +site_name: Ad Exchange Buyer v5.0.2+20210330 site_url: http://byron.github.io/google-apis-rs/google-adexchangebuyer1d4-cli site_description: A complete library to interact with Ad Exchange Buyer (protocol v1.4) @@ -7,46 +7,58 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['billing-info_get.md', 'Billing Info', 'Get'] -- ['billing-info_list.md', 'Billing Info', 'List'] -- ['budget_get.md', 'Budget', 'Get'] -- ['budget_patch.md', 'Budget', 'Patch'] -- ['budget_update.md', 'Budget', 'Update'] -- ['creatives_add-deal.md', 'Creatives', 'Add Deal'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['creatives_list-deals.md', 'Creatives', 'List Deals'] -- ['creatives_remove-deal.md', 'Creatives', 'Remove Deal'] -- ['marketplacedeals_delete.md', 'Marketplacedeals', 'Delete'] -- ['marketplacedeals_insert.md', 'Marketplacedeals', 'Insert'] -- ['marketplacedeals_list.md', 'Marketplacedeals', 'List'] -- ['marketplacedeals_update.md', 'Marketplacedeals', 'Update'] -- ['marketplacenotes_insert.md', 'Marketplacenotes', 'Insert'] -- ['marketplacenotes_list.md', 'Marketplacenotes', 'List'] -- ['marketplaceprivateauction_updateproposal.md', 'Marketplaceprivateauction', 'Updateproposal'] -- ['performance-report_list.md', 'Performance Report', 'List'] -- ['pretargeting-config_delete.md', 'Pretargeting Config', 'Delete'] -- ['pretargeting-config_get.md', 'Pretargeting Config', 'Get'] -- ['pretargeting-config_insert.md', 'Pretargeting Config', 'Insert'] -- ['pretargeting-config_list.md', 'Pretargeting Config', 'List'] -- ['pretargeting-config_patch.md', 'Pretargeting Config', 'Patch'] -- ['pretargeting-config_update.md', 'Pretargeting Config', 'Update'] -- ['products_get.md', 'Products', 'Get'] -- ['products_search.md', 'Products', 'Search'] -- ['proposals_get.md', 'Proposals', 'Get'] -- ['proposals_insert.md', 'Proposals', 'Insert'] -- ['proposals_patch.md', 'Proposals', 'Patch'] -- ['proposals_search.md', 'Proposals', 'Search'] -- ['proposals_setupcomplete.md', 'Proposals', 'Setupcomplete'] -- ['proposals_update.md', 'Proposals', 'Update'] -- ['pubprofiles_list.md', 'Pubprofiles', 'List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Billing Info': + - 'Get': 'billing-info_get.md' + - 'List': 'billing-info_list.md' +- 'Budget': + - 'Get': 'budget_get.md' + - 'Patch': 'budget_patch.md' + - 'Update': 'budget_update.md' +- 'Creatives': + - 'Add Deal': 'creatives_add-deal.md' + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' + - 'List Deals': 'creatives_list-deals.md' + - 'Remove Deal': 'creatives_remove-deal.md' +- 'Marketplacedeals': + - 'Delete': 'marketplacedeals_delete.md' + - 'Insert': 'marketplacedeals_insert.md' + - 'List': 'marketplacedeals_list.md' + - 'Update': 'marketplacedeals_update.md' +- 'Marketplacenotes': + - 'Insert': 'marketplacenotes_insert.md' + - 'List': 'marketplacenotes_list.md' +- 'Marketplaceprivateauction': + - 'Updateproposal': 'marketplaceprivateauction_updateproposal.md' +- 'Performance Report': + - 'List': 'performance-report_list.md' +- 'Pretargeting Config': + - 'Delete': 'pretargeting-config_delete.md' + - 'Get': 'pretargeting-config_get.md' + - 'Insert': 'pretargeting-config_insert.md' + - 'List': 'pretargeting-config_list.md' + - 'Patch': 'pretargeting-config_patch.md' + - 'Update': 'pretargeting-config_update.md' +- 'Products': + - 'Get': 'products_get.md' + - 'Search': 'products_search.md' +- 'Proposals': + - 'Get': 'proposals_get.md' + - 'Insert': 'proposals_insert.md' + - 'Patch': 'proposals_patch.md' + - 'Search': 'proposals_search.md' + - 'Setupcomplete': 'proposals_setupcomplete.md' + - 'Update': 'proposals_update.md' +- 'Pubprofiles': + - 'List': 'pubprofiles_list.md' theme: readthedocs diff --git a/gen/adexchangebuyer1d4-cli/src/client.rs b/gen/adexchangebuyer1d4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adexchangebuyer1d4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adexchangebuyer1d4-cli/src/main.rs b/gen/adexchangebuyer1d4-cli/src/main.rs index 868ce4b605..0ee8a398a0 100644 --- a/gen/adexchangebuyer1d4-cli/src/main.rs +++ b/gen/adexchangebuyer1d4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adexchangebuyer1d4::{api, Error, oauth2}; +use google_adexchangebuyer1d4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -204,7 +203,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "confirm-unsafe-account-change" => { - call = call.confirm_unsafe_account_change(arg_from_str(value.unwrap_or("false"), err, "confirm-unsafe-account-change", "boolean")); + call = call.confirm_unsafe_account_change( value.map(|v| arg_from_str(v, err, "confirm-unsafe-account-change", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -301,7 +300,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "confirm-unsafe-account-change" => { - call = call.confirm_unsafe_account_change(arg_from_str(value.unwrap_or("false"), err, "confirm-unsafe-account-change", "boolean")); + call = call.confirm_unsafe_account_change( value.map(|v| arg_from_str(v, err, "confirm-unsafe-account-change", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -930,7 +929,7 @@ where call = call.open_auction_status_filter(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "deals-status-filter" => { call = call.deals_status_filter(value.unwrap_or("")); @@ -939,7 +938,7 @@ where call = call.add_buyer_creative_id(value.unwrap_or("")); }, "account-id" => { - call = call.add_account_id(arg_from_str(value.unwrap_or("-0"), err, "account-id", "integer")); + call = call.add_account_id( value.map(|v| arg_from_str(v, err, "account-id", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1660,7 +1659,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -4087,7 +4086,7 @@ async fn main() { let mut app = App::new("adexchangebuyer1d4") .author("Sebastian Thiel ") - .version("4.0.1+20210330") + .version("5.0.2+20210330") .about("Accesses your bidding-account information, submits creatives for validation, finds available direct deals, and retrieves performance reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli") .arg(Arg::with_name("url") diff --git a/gen/adexchangebuyer1d4/Cargo.toml b/gen/adexchangebuyer1d4/Cargo.toml index 712ba475e1..5c5f06ae9d 100644 --- a/gen/adexchangebuyer1d4/Cargo.toml +++ b/gen/adexchangebuyer1d4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adexchangebuyer1d4" -version = "5.0.2-beta-1+20210330" +version = "5.0.2+20210330" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Exchange Buyer (protocol v1.4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer1d4" homepage = "https://developers.google.com/ad-exchange/buyer-rest" -documentation = "https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330" +documentation = "https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330" license = "MIT" keywords = ["adexchangebuyer", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adexchangebuyer1d4/README.md b/gen/adexchangebuyer1d4/README.md index db461e688e..5a1b4d4856 100644 --- a/gen/adexchangebuyer1d4/README.md +++ b/gen/adexchangebuyer1d4/README.md @@ -5,38 +5,38 @@ DO NOT EDIT ! --> The `google-adexchangebuyer1d4` library allows access to all features of the *Google Ad Exchange Buyer* service. -This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2-beta-1+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Ad Exchange Buyer* *v1d4* API can be found at the [official documentation site](https://developers.google.com/ad-exchange/buyer-rest). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/AdExchangeBuyer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/AdExchangeBuyer) ... -* [accounts](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::Account) - * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::AccountGetCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::AccountListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::AccountPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::AccountUpdateCall) -* [billing info](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::BillingInfo) - * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::BillingInfoGetCall) and [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::BillingInfoListCall) -* [budget](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::Budget) - * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::BudgetGetCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::BudgetPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::BudgetUpdateCall) -* [creatives](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::Creative) - * [*add deal*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::CreativeAddDealCall), [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::CreativeGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::CreativeInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::CreativeListCall), [*list deals*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::CreativeListDealCall) and [*remove deal*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::CreativeRemoveDealCall) +* [accounts](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::Account) + * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::AccountGetCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::AccountListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::AccountPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::AccountUpdateCall) +* [billing info](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::BillingInfo) + * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::BillingInfoGetCall) and [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::BillingInfoListCall) +* [budget](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::Budget) + * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::BudgetGetCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::BudgetPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::BudgetUpdateCall) +* [creatives](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::Creative) + * [*add deal*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::CreativeAddDealCall), [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::CreativeGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::CreativeInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::CreativeListCall), [*list deals*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::CreativeListDealCall) and [*remove deal*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::CreativeRemoveDealCall) * marketplacedeals - * [*delete*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::MarketplacedealDeleteCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::MarketplacedealInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::MarketplacedealListCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::MarketplacedealUpdateCall) + * [*delete*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::MarketplacedealDeleteCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::MarketplacedealInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::MarketplacedealListCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::MarketplacedealUpdateCall) * marketplacenotes - * [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::MarketplacenoteInsertCall) and [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::MarketplacenoteListCall) + * [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::MarketplacenoteInsertCall) and [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::MarketplacenoteListCall) * marketplaceprivateauction - * [*updateproposal*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::MarketplaceprivateauctionUpdateproposalCall) -* [performance report](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PerformanceReport) - * [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PerformanceReportListCall) -* [pretargeting config](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PretargetingConfig) - * [*delete*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PretargetingConfigDeleteCall), [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PretargetingConfigGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PretargetingConfigInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PretargetingConfigListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PretargetingConfigPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PretargetingConfigUpdateCall) -* [products](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::Product) - * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProductGetCall) and [*search*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProductSearchCall) -* [proposals](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::Proposal) - * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProposalGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProposalInsertCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProposalPatchCall), [*search*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProposalSearchCall), [*setupcomplete*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProposalSetupcompleteCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::ProposalUpdateCall) + * [*updateproposal*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::MarketplaceprivateauctionUpdateproposalCall) +* [performance report](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PerformanceReport) + * [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PerformanceReportListCall) +* [pretargeting config](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PretargetingConfig) + * [*delete*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PretargetingConfigDeleteCall), [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PretargetingConfigGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PretargetingConfigInsertCall), [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PretargetingConfigListCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PretargetingConfigPatchCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PretargetingConfigUpdateCall) +* [products](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::Product) + * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProductGetCall) and [*search*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProductSearchCall) +* [proposals](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::Proposal) + * [*get*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProposalGetCall), [*insert*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProposalInsertCall), [*patch*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProposalPatchCall), [*search*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProposalSearchCall), [*setupcomplete*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProposalSetupcompleteCall) and [*update*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::ProposalUpdateCall) * pubprofiles - * [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/api::PubprofileListCall) + * [*list*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/api::PubprofileListCall) @@ -45,17 +45,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/AdExchangeBuyer)** +* **[Hub](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/AdExchangeBuyer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::CallBuilder) -* **[Resources](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::CallBuilder) +* **[Resources](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Part)** + * **[Parts](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -151,17 +151,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -171,29 +171,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Delegate) to the -[Method Builder](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Delegate) to the +[Method Builder](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::RequestValue) and -[decodable](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::RequestValue) and +[decodable](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adexchangebuyer1d4/5.0.2-beta-1+20210330/google_adexchangebuyer1d4/client::RequestValue) are moved +* [request values](https://docs.rs/google-adexchangebuyer1d4/5.0.2+20210330/google_adexchangebuyer1d4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adexchangebuyer1d4/src/api.rs b/gen/adexchangebuyer1d4/src/api.rs index 683fbe36b8..506ef2ed50 100644 --- a/gen/adexchangebuyer1d4/src/api.rs +++ b/gen/adexchangebuyer1d4/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> AdExchangeBuyer { AdExchangeBuyer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/adexchangebuyer/v1.4/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -169,7 +169,7 @@ impl<'a, S> AdExchangeBuyer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adexchangebuyer1d4/src/client.rs b/gen/adexchangebuyer1d4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adexchangebuyer1d4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adexchangebuyer1d4/src/lib.rs b/gen/adexchangebuyer1d4/src/lib.rs index a3d9135cfa..8512fd6c04 100644 --- a/gen/adexchangebuyer1d4/src/lib.rs +++ b/gen/adexchangebuyer1d4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2-beta-1+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Ad Exchange Buyer* crate version *5.0.2+20210330*, where *20210330* is the exact revision of the *adexchangebuyer:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Ad Exchange Buyer* *v1d4* API can be found at the //! [official documentation site](https://developers.google.com/ad-exchange/buyer-rest). diff --git a/gen/adexchangebuyer2_v2_beta1-cli/Cargo.toml b/gen/adexchangebuyer2_v2_beta1-cli/Cargo.toml index f584cc7dcb..6eda3c1071 100644 --- a/gen/adexchangebuyer2_v2_beta1-cli/Cargo.toml +++ b/gen/adexchangebuyer2_v2_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adexchangebuyer2_v2_beta1-cli" -version = "4.0.1+20220307" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdExchangeBuyerII (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer2_v2_beta1-cli" @@ -20,13 +20,13 @@ name = "adexchangebuyer2-v2-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adexchangebuyer2_v2_beta1] path = "../adexchangebuyer2_v2_beta1" -version = "4.0.1+20220307" +version = "5.0.2+20230124" + diff --git a/gen/adexchangebuyer2_v2_beta1-cli/README.md b/gen/adexchangebuyer2_v2_beta1-cli/README.md index eed7d49fc5..1fa963bcb2 100644 --- a/gen/adexchangebuyer2_v2_beta1-cli/README.md +++ b/gen/adexchangebuyer2_v2_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AdExchangeBuyerII* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *AdExchangeBuyerII* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash adexchangebuyer2-v2-beta1 [options] diff --git a/gen/adexchangebuyer2_v2_beta1-cli/mkdocs.yml b/gen/adexchangebuyer2_v2_beta1-cli/mkdocs.yml index e00aba5653..427e8c576c 100644 --- a/gen/adexchangebuyer2_v2_beta1-cli/mkdocs.yml +++ b/gen/adexchangebuyer2_v2_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AdExchangeBuyerII v4.0.1+20220307 +site_name: AdExchangeBuyerII v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-adexchangebuyer2_v2_beta1-cli site_description: A complete library to interact with AdExchangeBuyerII (protocol v2beta1) @@ -7,72 +7,74 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer2 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_clients-create.md', 'Accounts', 'Clients Create'] -- ['accounts_clients-get.md', 'Accounts', 'Clients Get'] -- ['accounts_clients-invitations-create.md', 'Accounts', 'Clients Invitations Create'] -- ['accounts_clients-invitations-get.md', 'Accounts', 'Clients Invitations Get'] -- ['accounts_clients-invitations-list.md', 'Accounts', 'Clients Invitations List'] -- ['accounts_clients-list.md', 'Accounts', 'Clients List'] -- ['accounts_clients-update.md', 'Accounts', 'Clients Update'] -- ['accounts_clients-users-get.md', 'Accounts', 'Clients Users Get'] -- ['accounts_clients-users-list.md', 'Accounts', 'Clients Users List'] -- ['accounts_clients-users-update.md', 'Accounts', 'Clients Users Update'] -- ['accounts_creatives-create.md', 'Accounts', 'Creatives Create'] -- ['accounts_creatives-deal-associations-add.md', 'Accounts', 'Creatives Deal Associations Add'] -- ['accounts_creatives-deal-associations-list.md', 'Accounts', 'Creatives Deal Associations List'] -- ['accounts_creatives-deal-associations-remove.md', 'Accounts', 'Creatives Deal Associations Remove'] -- ['accounts_creatives-get.md', 'Accounts', 'Creatives Get'] -- ['accounts_creatives-list.md', 'Accounts', 'Creatives List'] -- ['accounts_creatives-stop-watching.md', 'Accounts', 'Creatives Stop Watching'] -- ['accounts_creatives-update.md', 'Accounts', 'Creatives Update'] -- ['accounts_creatives-watch.md', 'Accounts', 'Creatives Watch'] -- ['accounts_finalized-proposals-list.md', 'Accounts', 'Finalized Proposals List'] -- ['accounts_finalized-proposals-pause.md', 'Accounts', 'Finalized Proposals Pause'] -- ['accounts_finalized-proposals-resume.md', 'Accounts', 'Finalized Proposals Resume'] -- ['accounts_products-get.md', 'Accounts', 'Products Get'] -- ['accounts_products-list.md', 'Accounts', 'Products List'] -- ['accounts_proposals-accept.md', 'Accounts', 'Proposals Accept'] -- ['accounts_proposals-add-note.md', 'Accounts', 'Proposals Add Note'] -- ['accounts_proposals-cancel-negotiation.md', 'Accounts', 'Proposals Cancel Negotiation'] -- ['accounts_proposals-complete-setup.md', 'Accounts', 'Proposals Complete Setup'] -- ['accounts_proposals-create.md', 'Accounts', 'Proposals Create'] -- ['accounts_proposals-get.md', 'Accounts', 'Proposals Get'] -- ['accounts_proposals-list.md', 'Accounts', 'Proposals List'] -- ['accounts_proposals-pause.md', 'Accounts', 'Proposals Pause'] -- ['accounts_proposals-resume.md', 'Accounts', 'Proposals Resume'] -- ['accounts_proposals-update.md', 'Accounts', 'Proposals Update'] -- ['accounts_publisher-profiles-get.md', 'Accounts', 'Publisher Profiles Get'] -- ['accounts_publisher-profiles-list.md', 'Accounts', 'Publisher Profiles List'] -- ['bidders_accounts-filter-sets-bid-metrics-list.md', 'Bidders', 'Accounts Filter Sets Bid Metrics List'] -- ['bidders_accounts-filter-sets-bid-response-errors-list.md', 'Bidders', 'Accounts Filter Sets Bid Response Errors List'] -- ['bidders_accounts-filter-sets-bid-responses-without-bids-list.md', 'Bidders', 'Accounts Filter Sets Bid Responses Without Bids List'] -- ['bidders_accounts-filter-sets-create.md', 'Bidders', 'Accounts Filter Sets Create'] -- ['bidders_accounts-filter-sets-delete.md', 'Bidders', 'Accounts Filter Sets Delete'] -- ['bidders_accounts-filter-sets-filtered-bid-requests-list.md', 'Bidders', 'Accounts Filter Sets Filtered Bid Requests List'] -- ['bidders_accounts-filter-sets-filtered-bids-creatives-list.md', 'Bidders', 'Accounts Filter Sets Filtered Bids Creatives List'] -- ['bidders_accounts-filter-sets-filtered-bids-details-list.md', 'Bidders', 'Accounts Filter Sets Filtered Bids Details List'] -- ['bidders_accounts-filter-sets-filtered-bids-list.md', 'Bidders', 'Accounts Filter Sets Filtered Bids List'] -- ['bidders_accounts-filter-sets-get.md', 'Bidders', 'Accounts Filter Sets Get'] -- ['bidders_accounts-filter-sets-impression-metrics-list.md', 'Bidders', 'Accounts Filter Sets Impression Metrics List'] -- ['bidders_accounts-filter-sets-list.md', 'Bidders', 'Accounts Filter Sets List'] -- ['bidders_accounts-filter-sets-losing-bids-list.md', 'Bidders', 'Accounts Filter Sets Losing Bids List'] -- ['bidders_accounts-filter-sets-non-billable-winning-bids-list.md', 'Bidders', 'Accounts Filter Sets Non Billable Winning Bids List'] -- ['bidders_filter-sets-bid-metrics-list.md', 'Bidders', 'Filter Sets Bid Metrics List'] -- ['bidders_filter-sets-bid-response-errors-list.md', 'Bidders', 'Filter Sets Bid Response Errors List'] -- ['bidders_filter-sets-bid-responses-without-bids-list.md', 'Bidders', 'Filter Sets Bid Responses Without Bids List'] -- ['bidders_filter-sets-create.md', 'Bidders', 'Filter Sets Create'] -- ['bidders_filter-sets-delete.md', 'Bidders', 'Filter Sets Delete'] -- ['bidders_filter-sets-filtered-bid-requests-list.md', 'Bidders', 'Filter Sets Filtered Bid Requests List'] -- ['bidders_filter-sets-filtered-bids-creatives-list.md', 'Bidders', 'Filter Sets Filtered Bids Creatives List'] -- ['bidders_filter-sets-filtered-bids-details-list.md', 'Bidders', 'Filter Sets Filtered Bids Details List'] -- ['bidders_filter-sets-filtered-bids-list.md', 'Bidders', 'Filter Sets Filtered Bids List'] -- ['bidders_filter-sets-get.md', 'Bidders', 'Filter Sets Get'] -- ['bidders_filter-sets-impression-metrics-list.md', 'Bidders', 'Filter Sets Impression Metrics List'] -- ['bidders_filter-sets-list.md', 'Bidders', 'Filter Sets List'] -- ['bidders_filter-sets-losing-bids-list.md', 'Bidders', 'Filter Sets Losing Bids List'] -- ['bidders_filter-sets-non-billable-winning-bids-list.md', 'Bidders', 'Filter Sets Non Billable Winning Bids List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Clients Create': 'accounts_clients-create.md' + - 'Clients Get': 'accounts_clients-get.md' + - 'Clients Invitations Create': 'accounts_clients-invitations-create.md' + - 'Clients Invitations Get': 'accounts_clients-invitations-get.md' + - 'Clients Invitations List': 'accounts_clients-invitations-list.md' + - 'Clients List': 'accounts_clients-list.md' + - 'Clients Update': 'accounts_clients-update.md' + - 'Clients Users Get': 'accounts_clients-users-get.md' + - 'Clients Users List': 'accounts_clients-users-list.md' + - 'Clients Users Update': 'accounts_clients-users-update.md' + - 'Creatives Create': 'accounts_creatives-create.md' + - 'Creatives Deal Associations Add': 'accounts_creatives-deal-associations-add.md' + - 'Creatives Deal Associations List': 'accounts_creatives-deal-associations-list.md' + - 'Creatives Deal Associations Remove': 'accounts_creatives-deal-associations-remove.md' + - 'Creatives Get': 'accounts_creatives-get.md' + - 'Creatives List': 'accounts_creatives-list.md' + - 'Creatives Stop Watching': 'accounts_creatives-stop-watching.md' + - 'Creatives Update': 'accounts_creatives-update.md' + - 'Creatives Watch': 'accounts_creatives-watch.md' + - 'Finalized Proposals List': 'accounts_finalized-proposals-list.md' + - 'Finalized Proposals Pause': 'accounts_finalized-proposals-pause.md' + - 'Finalized Proposals Resume': 'accounts_finalized-proposals-resume.md' + - 'Products Get': 'accounts_products-get.md' + - 'Products List': 'accounts_products-list.md' + - 'Proposals Accept': 'accounts_proposals-accept.md' + - 'Proposals Add Note': 'accounts_proposals-add-note.md' + - 'Proposals Cancel Negotiation': 'accounts_proposals-cancel-negotiation.md' + - 'Proposals Complete Setup': 'accounts_proposals-complete-setup.md' + - 'Proposals Create': 'accounts_proposals-create.md' + - 'Proposals Get': 'accounts_proposals-get.md' + - 'Proposals List': 'accounts_proposals-list.md' + - 'Proposals Pause': 'accounts_proposals-pause.md' + - 'Proposals Resume': 'accounts_proposals-resume.md' + - 'Proposals Update': 'accounts_proposals-update.md' + - 'Publisher Profiles Get': 'accounts_publisher-profiles-get.md' + - 'Publisher Profiles List': 'accounts_publisher-profiles-list.md' +- 'Bidders': + - 'Accounts Filter Sets Bid Metrics List': 'bidders_accounts-filter-sets-bid-metrics-list.md' + - 'Accounts Filter Sets Bid Response Errors List': 'bidders_accounts-filter-sets-bid-response-errors-list.md' + - 'Accounts Filter Sets Bid Responses Without Bids List': 'bidders_accounts-filter-sets-bid-responses-without-bids-list.md' + - 'Accounts Filter Sets Create': 'bidders_accounts-filter-sets-create.md' + - 'Accounts Filter Sets Delete': 'bidders_accounts-filter-sets-delete.md' + - 'Accounts Filter Sets Filtered Bid Requests List': 'bidders_accounts-filter-sets-filtered-bid-requests-list.md' + - 'Accounts Filter Sets Filtered Bids Creatives List': 'bidders_accounts-filter-sets-filtered-bids-creatives-list.md' + - 'Accounts Filter Sets Filtered Bids Details List': 'bidders_accounts-filter-sets-filtered-bids-details-list.md' + - 'Accounts Filter Sets Filtered Bids List': 'bidders_accounts-filter-sets-filtered-bids-list.md' + - 'Accounts Filter Sets Get': 'bidders_accounts-filter-sets-get.md' + - 'Accounts Filter Sets Impression Metrics List': 'bidders_accounts-filter-sets-impression-metrics-list.md' + - 'Accounts Filter Sets List': 'bidders_accounts-filter-sets-list.md' + - 'Accounts Filter Sets Losing Bids List': 'bidders_accounts-filter-sets-losing-bids-list.md' + - 'Accounts Filter Sets Non Billable Winning Bids List': 'bidders_accounts-filter-sets-non-billable-winning-bids-list.md' + - 'Filter Sets Bid Metrics List': 'bidders_filter-sets-bid-metrics-list.md' + - 'Filter Sets Bid Response Errors List': 'bidders_filter-sets-bid-response-errors-list.md' + - 'Filter Sets Bid Responses Without Bids List': 'bidders_filter-sets-bid-responses-without-bids-list.md' + - 'Filter Sets Create': 'bidders_filter-sets-create.md' + - 'Filter Sets Delete': 'bidders_filter-sets-delete.md' + - 'Filter Sets Filtered Bid Requests List': 'bidders_filter-sets-filtered-bid-requests-list.md' + - 'Filter Sets Filtered Bids Creatives List': 'bidders_filter-sets-filtered-bids-creatives-list.md' + - 'Filter Sets Filtered Bids Details List': 'bidders_filter-sets-filtered-bids-details-list.md' + - 'Filter Sets Filtered Bids List': 'bidders_filter-sets-filtered-bids-list.md' + - 'Filter Sets Get': 'bidders_filter-sets-get.md' + - 'Filter Sets Impression Metrics List': 'bidders_filter-sets-impression-metrics-list.md' + - 'Filter Sets List': 'bidders_filter-sets-list.md' + - 'Filter Sets Losing Bids List': 'bidders_filter-sets-losing-bids-list.md' + - 'Filter Sets Non Billable Winning Bids List': 'bidders_filter-sets-non-billable-winning-bids-list.md' theme: readthedocs diff --git a/gen/adexchangebuyer2_v2_beta1-cli/src/client.rs b/gen/adexchangebuyer2_v2_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adexchangebuyer2_v2_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adexchangebuyer2_v2_beta1-cli/src/main.rs b/gen/adexchangebuyer2_v2_beta1-cli/src/main.rs index 6a4325c14b..ec8067dda7 100644 --- a/gen/adexchangebuyer2_v2_beta1-cli/src/main.rs +++ b/gen/adexchangebuyer2_v2_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adexchangebuyer2_v2_beta1::{api, Error, oauth2}; +use google_adexchangebuyer2_v2_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -345,7 +344,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -407,7 +406,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -611,7 +610,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -982,7 +981,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1183,7 +1182,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1541,7 +1540,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter-syntax" => { call = call.filter_syntax(value.unwrap_or("")); @@ -1829,7 +1828,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2385,7 +2384,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter-syntax" => { call = call.filter_syntax(value.unwrap_or("")); @@ -2771,7 +2770,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2830,7 +2829,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2889,7 +2888,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2948,7 +2947,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3056,7 +3055,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "is-transient" => { - call = call.is_transient(arg_from_str(value.unwrap_or("false"), err, "is-transient", "boolean")); + call = call.is_transient( value.map(|v| arg_from_str(v, err, "is-transient", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3167,7 +3166,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3227,7 +3226,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3287,7 +3286,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3346,7 +3345,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3457,7 +3456,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3516,7 +3515,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3575,7 +3574,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3634,7 +3633,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3693,7 +3692,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3752,7 +3751,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3811,7 +3810,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3919,7 +3918,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "is-transient" => { - call = call.is_transient(arg_from_str(value.unwrap_or("false"), err, "is-transient", "boolean")); + call = call.is_transient( value.map(|v| arg_from_str(v, err, "is-transient", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4030,7 +4029,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4090,7 +4089,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4150,7 +4149,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4209,7 +4208,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4320,7 +4319,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4379,7 +4378,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4438,7 +4437,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4497,7 +4496,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5442,7 +5441,7 @@ async fn main() { Some(false)), ]), ("finalized-proposals-pause", - Some(r##"Update given deals to pause serving. This method will set the `DealServingMetadata.DealPauseStatus.has_buyer_paused` bit to true for all listed deals in the request. Currently, this method only applies to PG and PD deals. For PA deals, please call accounts.proposals.pause endpoint. It is a no-op to pause already-paused deals. It is an error to call PauseProposalDeals for deals which are not part of the proposal of proposal_id or which are not finalized or renegotiating."##), + Some(r##"Update given deals to pause serving. This method will set the `DealServingMetadata.DealPauseStatus.has_buyer_paused` bit to true for all listed deals in the request. Currently, this method only applies to PG and PD deals. For PA deals, call accounts.proposals.pause endpoint. It is a no-op to pause already-paused deals. It is an error to call PauseProposalDeals for deals which are not part of the proposal of proposal_id or which are not finalized or renegotiating."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer2_v2_beta1_cli/accounts_finalized-proposals-pause", vec![ (Some(r##"account-id"##), @@ -5476,7 +5475,7 @@ async fn main() { Some(false)), ]), ("finalized-proposals-resume", - Some(r##"Update given deals to resume serving. This method will set the `DealServingMetadata.DealPauseStatus.has_buyer_paused` bit to false for all listed deals in the request. Currently, this method only applies to PG and PD deals. For PA deals, please call accounts.proposals.resume endpoint. It is a no-op to resume running deals or deals paused by the other party. It is an error to call ResumeProposalDeals for deals which are not part of the proposal of proposal_id or which are not finalized or renegotiating."##), + Some(r##"Update given deals to resume serving. This method will set the `DealServingMetadata.DealPauseStatus.has_buyer_paused` bit to false for all listed deals in the request. Currently, this method only applies to PG and PD deals. For PA deals, call accounts.proposals.resume endpoint. It is a no-op to resume running deals or deals paused by the other party. It is an error to call ResumeProposalDeals for deals which are not part of the proposal of proposal_id or which are not finalized or renegotiating."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer2_v2_beta1_cli/accounts_finalized-proposals-resume", vec![ (Some(r##"account-id"##), @@ -5662,7 +5661,7 @@ async fn main() { Some(false)), ]), ("proposals-complete-setup", - Some(r##"Update the given proposal to indicate that setup has been completed. This method is called by the buyer when the line items have been created on their end for a finalized proposal and all the required creatives have been uploaded using the creatives API. This call updates the `is_setup_completed` bit on the proposal and also notifies the seller. The server will advance the revision number of the most recent proposal."##), + Some(r##"You can opt-in to manually update proposals to indicate that setup is complete. By default, proposal setup is automatically completed after their deals are finalized. Contact your Technical Account Manager to opt in. Buyers can call this method when the proposal has been finalized, and all the required creatives have been uploaded using the Creatives API. This call updates the `is_setup_completed` field on the deals in the proposal, and notifies the seller. The server then advances the revision number of the most recent proposal. To mark an individual deal as ready to serve, call `buyers.finalizedDeals.setReadyToServe` in the Marketplace API."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer2_v2_beta1_cli/accounts_proposals-complete-setup", vec![ (Some(r##"account-id"##), @@ -6586,7 +6585,7 @@ async fn main() { let mut app = App::new("adexchangebuyer2-v2-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220307") + .version("5.0.2+20230124") .about("Accesses the latest features for managing Authorized Buyers accounts, Real-Time Bidding configurations and auction metrics, and Marketplace programmatic deals.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adexchangebuyer2_v2_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/adexchangebuyer2_v2_beta1/Cargo.toml b/gen/adexchangebuyer2_v2_beta1/Cargo.toml index a8d2591530..de690f396e 100644 --- a/gen/adexchangebuyer2_v2_beta1/Cargo.toml +++ b/gen/adexchangebuyer2_v2_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adexchangebuyer2_v2_beta1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdExchangeBuyerII (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangebuyer2_v2_beta1" homepage = "https://developers.google.com/authorized-buyers/apis/reference/rest/" -documentation = "https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124" license = "MIT" keywords = ["adexchangebuyer2", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adexchangebuyer2_v2_beta1/README.md b/gen/adexchangebuyer2_v2_beta1/README.md index a1cf367026..d3f1cd028a 100644 --- a/gen/adexchangebuyer2_v2_beta1/README.md +++ b/gen/adexchangebuyer2_v2_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-adexchangebuyer2_v2_beta1` library allows access to all features of the *Google AdExchangeBuyerII* service. -This documentation was generated from *AdExchangeBuyerII* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *adexchangebuyer2:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AdExchangeBuyerII* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *adexchangebuyer2:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AdExchangeBuyerII* *v2_beta1* API can be found at the [official documentation site](https://developers.google.com/authorized-buyers/apis/reference/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/AdExchangeBuyerII) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/AdExchangeBuyerII) ... * accounts - * [*clients create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientCreateCall), [*clients get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientGetCall), [*clients invitations create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientInvitationCreateCall), [*clients invitations get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientInvitationGetCall), [*clients invitations list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientInvitationListCall), [*clients list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientListCall), [*clients update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUpdateCall), [*clients users get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUserGetCall), [*clients users list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUserListCall), [*clients users update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUserUpdateCall), [*creatives create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeCreateCall), [*creatives deal associations add*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeDealAssociationAddCall), [*creatives deal associations list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeDealAssociationListCall), [*creatives deal associations remove*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeDealAssociationRemoveCall), [*creatives get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeGetCall), [*creatives list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeListCall), [*creatives stop watching*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeStopWatchingCall), [*creatives update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeUpdateCall), [*creatives watch*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeWatchCall), [*finalized proposals list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountFinalizedProposalListCall), [*finalized proposals pause*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountFinalizedProposalPauseCall), [*finalized proposals resume*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountFinalizedProposalResumeCall), [*products get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProductGetCall), [*products list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProductListCall), [*proposals accept*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalAcceptCall), [*proposals add note*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalAddNoteCall), [*proposals cancel negotiation*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalCancelNegotiationCall), [*proposals complete setup*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalCompleteSetupCall), [*proposals create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalCreateCall), [*proposals get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalGetCall), [*proposals list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalListCall), [*proposals pause*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalPauseCall), [*proposals resume*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalResumeCall), [*proposals update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalUpdateCall), [*publisher profiles get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountPublisherProfileGetCall) and [*publisher profiles list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::AccountPublisherProfileListCall) + * [*clients create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientCreateCall), [*clients get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientGetCall), [*clients invitations create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientInvitationCreateCall), [*clients invitations get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientInvitationGetCall), [*clients invitations list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientInvitationListCall), [*clients list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientListCall), [*clients update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUpdateCall), [*clients users get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUserGetCall), [*clients users list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUserListCall), [*clients users update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountClientUserUpdateCall), [*creatives create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeCreateCall), [*creatives deal associations add*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeDealAssociationAddCall), [*creatives deal associations list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeDealAssociationListCall), [*creatives deal associations remove*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeDealAssociationRemoveCall), [*creatives get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeGetCall), [*creatives list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeListCall), [*creatives stop watching*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeStopWatchingCall), [*creatives update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeUpdateCall), [*creatives watch*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountCreativeWatchCall), [*finalized proposals list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountFinalizedProposalListCall), [*finalized proposals pause*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountFinalizedProposalPauseCall), [*finalized proposals resume*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountFinalizedProposalResumeCall), [*products get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProductGetCall), [*products list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProductListCall), [*proposals accept*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalAcceptCall), [*proposals add note*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalAddNoteCall), [*proposals cancel negotiation*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalCancelNegotiationCall), [*proposals complete setup*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalCompleteSetupCall), [*proposals create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalCreateCall), [*proposals get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalGetCall), [*proposals list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalListCall), [*proposals pause*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalPauseCall), [*proposals resume*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalResumeCall), [*proposals update*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountProposalUpdateCall), [*publisher profiles get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountPublisherProfileGetCall) and [*publisher profiles list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::AccountPublisherProfileListCall) * bidders - * [*accounts filter sets bid metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetBidMetricListCall), [*accounts filter sets bid response errors list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetBidResponseErrorListCall), [*accounts filter sets bid responses without bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetBidResponsesWithoutBidListCall), [*accounts filter sets create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetCreateCall), [*accounts filter sets delete*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetDeleteCall), [*accounts filter sets filtered bid requests list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidRequestListCall), [*accounts filter sets filtered bids creatives list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidCreativeListCall), [*accounts filter sets filtered bids details list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidDetailListCall), [*accounts filter sets filtered bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidListCall), [*accounts filter sets get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetGetCall), [*accounts filter sets impression metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetImpressionMetricListCall), [*accounts filter sets list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetListCall), [*accounts filter sets losing bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetLosingBidListCall), [*accounts filter sets non billable winning bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetNonBillableWinningBidListCall), [*filter sets bid metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetBidMetricListCall), [*filter sets bid response errors list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetBidResponseErrorListCall), [*filter sets bid responses without bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetBidResponsesWithoutBidListCall), [*filter sets create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetCreateCall), [*filter sets delete*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetDeleteCall), [*filter sets filtered bid requests list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidRequestListCall), [*filter sets filtered bids creatives list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidCreativeListCall), [*filter sets filtered bids details list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidDetailListCall), [*filter sets filtered bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidListCall), [*filter sets get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetGetCall), [*filter sets impression metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetImpressionMetricListCall), [*filter sets list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetListCall), [*filter sets losing bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetLosingBidListCall) and [*filter sets non billable winning bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetNonBillableWinningBidListCall) + * [*accounts filter sets bid metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetBidMetricListCall), [*accounts filter sets bid response errors list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetBidResponseErrorListCall), [*accounts filter sets bid responses without bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetBidResponsesWithoutBidListCall), [*accounts filter sets create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetCreateCall), [*accounts filter sets delete*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetDeleteCall), [*accounts filter sets filtered bid requests list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidRequestListCall), [*accounts filter sets filtered bids creatives list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidCreativeListCall), [*accounts filter sets filtered bids details list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidDetailListCall), [*accounts filter sets filtered bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetFilteredBidListCall), [*accounts filter sets get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetGetCall), [*accounts filter sets impression metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetImpressionMetricListCall), [*accounts filter sets list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetListCall), [*accounts filter sets losing bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetLosingBidListCall), [*accounts filter sets non billable winning bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderAccountFilterSetNonBillableWinningBidListCall), [*filter sets bid metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetBidMetricListCall), [*filter sets bid response errors list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetBidResponseErrorListCall), [*filter sets bid responses without bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetBidResponsesWithoutBidListCall), [*filter sets create*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetCreateCall), [*filter sets delete*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetDeleteCall), [*filter sets filtered bid requests list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidRequestListCall), [*filter sets filtered bids creatives list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidCreativeListCall), [*filter sets filtered bids details list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidDetailListCall), [*filter sets filtered bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetFilteredBidListCall), [*filter sets get*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetGetCall), [*filter sets impression metrics list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetImpressionMetricListCall), [*filter sets list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetListCall), [*filter sets losing bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetLosingBidListCall) and [*filter sets non billable winning bids list*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/api::BidderFilterSetNonBillableWinningBidListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/AdExchangeBuyerII)** +* **[Hub](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/AdExchangeBuyerII)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Part)** + * **[Parts](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -135,17 +135,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -155,29 +155,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2-beta-1+20230124/google_adexchangebuyer2_v2_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-adexchangebuyer2_v2_beta1/5.0.2+20230124/google_adexchangebuyer2_v2_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adexchangebuyer2_v2_beta1/src/api.rs b/gen/adexchangebuyer2_v2_beta1/src/api.rs index 77fe40e110..c2f47a21b4 100644 --- a/gen/adexchangebuyer2_v2_beta1/src/api.rs +++ b/gen/adexchangebuyer2_v2_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> AdExchangeBuyerII { AdExchangeBuyerII { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://adexchangebuyer.googleapis.com/".to_string(), _root_url: "https://adexchangebuyer.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> AdExchangeBuyerII { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adexchangebuyer2_v2_beta1/src/client.rs b/gen/adexchangebuyer2_v2_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adexchangebuyer2_v2_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adexchangebuyer2_v2_beta1/src/lib.rs b/gen/adexchangebuyer2_v2_beta1/src/lib.rs index 1c0cd1bce0..f16ef7771c 100644 --- a/gen/adexchangebuyer2_v2_beta1/src/lib.rs +++ b/gen/adexchangebuyer2_v2_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AdExchangeBuyerII* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *adexchangebuyer2:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AdExchangeBuyerII* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *adexchangebuyer2:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AdExchangeBuyerII* *v2_beta1* API can be found at the //! [official documentation site](https://developers.google.com/authorized-buyers/apis/reference/rest/). diff --git a/gen/adexchangeseller2-cli/Cargo.toml b/gen/adexchangeseller2-cli/Cargo.toml index 2caf4a42a0..12a873b248 100644 --- a/gen/adexchangeseller2-cli/Cargo.toml +++ b/gen/adexchangeseller2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adexchangeseller2-cli" -version = "4.0.1+20171101" +version = "5.0.2+20171101" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Exchange Seller (protocol v2.0)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangeseller2-cli" @@ -20,13 +20,13 @@ name = "adexchangeseller2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adexchangeseller2] path = "../adexchangeseller2" -version = "4.0.1+20171101" +version = "5.0.2+20171101" + diff --git a/gen/adexchangeseller2-cli/README.md b/gen/adexchangeseller2-cli/README.md index 65b1009927..579a547c65 100644 --- a/gen/adexchangeseller2-cli/README.md +++ b/gen/adexchangeseller2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Ad Exchange Seller* API at revision *20171101*. The CLI is at version *4.0.1*. +This documentation was generated from the *Ad Exchange Seller* API at revision *20171101*. The CLI is at version *5.0.2*. ```bash adexchangeseller2 [options] diff --git a/gen/adexchangeseller2-cli/mkdocs.yml b/gen/adexchangeseller2-cli/mkdocs.yml index 233699ca14..b994cc349f 100644 --- a/gen/adexchangeseller2-cli/mkdocs.yml +++ b/gen/adexchangeseller2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Ad Exchange Seller v4.0.1+20171101 +site_name: Ad Exchange Seller v5.0.2+20171101 site_url: http://byron.github.io/google-apis-rs/google-adexchangeseller2-cli site_description: A complete library to interact with Ad Exchange Seller (protocol v2.0) @@ -7,22 +7,23 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangeseller docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_adclients-list.md', 'Accounts', 'Adclients List'] -- ['accounts_alerts-list.md', 'Accounts', 'Alerts List'] -- ['accounts_customchannels-get.md', 'Accounts', 'Customchannels Get'] -- ['accounts_customchannels-list.md', 'Accounts', 'Customchannels List'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_metadata-dimensions-list.md', 'Accounts', 'Metadata Dimensions List'] -- ['accounts_metadata-metrics-list.md', 'Accounts', 'Metadata Metrics List'] -- ['accounts_preferreddeals-get.md', 'Accounts', 'Preferreddeals Get'] -- ['accounts_preferreddeals-list.md', 'Accounts', 'Preferreddeals List'] -- ['accounts_reports-generate.md', 'Accounts', 'Reports Generate'] -- ['accounts_reports-saved-generate.md', 'Accounts', 'Reports Saved Generate'] -- ['accounts_reports-saved-list.md', 'Accounts', 'Reports Saved List'] -- ['accounts_urlchannels-list.md', 'Accounts', 'Urlchannels List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Adclients List': 'accounts_adclients-list.md' + - 'Alerts List': 'accounts_alerts-list.md' + - 'Customchannels Get': 'accounts_customchannels-get.md' + - 'Customchannels List': 'accounts_customchannels-list.md' + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Metadata Dimensions List': 'accounts_metadata-dimensions-list.md' + - 'Metadata Metrics List': 'accounts_metadata-metrics-list.md' + - 'Preferreddeals Get': 'accounts_preferreddeals-get.md' + - 'Preferreddeals List': 'accounts_preferreddeals-list.md' + - 'Reports Generate': 'accounts_reports-generate.md' + - 'Reports Saved Generate': 'accounts_reports-saved-generate.md' + - 'Reports Saved List': 'accounts_reports-saved-list.md' + - 'Urlchannels List': 'accounts_urlchannels-list.md' theme: readthedocs diff --git a/gen/adexchangeseller2-cli/src/client.rs b/gen/adexchangeseller2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adexchangeseller2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adexchangeseller2-cli/src/main.rs b/gen/adexchangeseller2-cli/src/main.rs index 68d4efcc00..01eed0185c 100644 --- a/gen/adexchangeseller2-cli/src/main.rs +++ b/gen/adexchangeseller2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adexchangeseller2::{api, Error, oauth2}; +use google_adexchangeseller2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -228,7 +227,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -339,7 +338,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -604,7 +603,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "sort" => { call = call.add_sort(value.unwrap_or("")); @@ -613,7 +612,7 @@ where call = call.add_metric(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -687,10 +686,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -752,7 +751,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -811,7 +810,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1342,7 +1341,7 @@ async fn main() { let mut app = App::new("adexchangeseller2") .author("Sebastian Thiel ") - .version("4.0.1+20171101") + .version("5.0.2+20171101") .about("Accesses the inventory of Ad Exchange seller users and generates reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adexchangeseller2_cli") .arg(Arg::with_name("url") diff --git a/gen/adexchangeseller2/Cargo.toml b/gen/adexchangeseller2/Cargo.toml index 0c9060c1aa..245e3b980d 100644 --- a/gen/adexchangeseller2/Cargo.toml +++ b/gen/adexchangeseller2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adexchangeseller2" -version = "5.0.2-beta-1+20171101" +version = "5.0.2+20171101" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Exchange Seller (protocol v2.0)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexchangeseller2" homepage = "https://developers.google.com/ad-exchange/seller-rest/" -documentation = "https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101" +documentation = "https://docs.rs/google-adexchangeseller2/5.0.2+20171101" license = "MIT" keywords = ["adexchangeseller", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adexchangeseller2/README.md b/gen/adexchangeseller2/README.md index 2567a008ba..9d005e28f2 100644 --- a/gen/adexchangeseller2/README.md +++ b/gen/adexchangeseller2/README.md @@ -5,21 +5,21 @@ DO NOT EDIT ! --> The `google-adexchangeseller2` library allows access to all features of the *Google Ad Exchange Seller* service. -This documentation was generated from *Ad Exchange Seller* crate version *5.0.2-beta-1+20171101*, where *20171101* is the exact revision of the *adexchangeseller:v2.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Ad Exchange Seller* crate version *5.0.2+20171101*, where *20171101* is the exact revision of the *adexchangeseller:v2.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Ad Exchange Seller* *v2* API can be found at the [official documentation site](https://developers.google.com/ad-exchange/seller-rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/AdExchangeSeller) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/AdExchangeSeller) ... -* [accounts](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::Account) - * [*adclients list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountAdclientListCall), [*alerts list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountAlertListCall), [*customchannels get*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountCustomchannelGetCall), [*customchannels list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountCustomchannelListCall), [*get*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountGetCall), [*list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountListCall), [*metadata dimensions list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountMetadataDimensionListCall), [*metadata metrics list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountMetadataMetricListCall), [*preferreddeals get*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountPreferreddealGetCall), [*preferreddeals list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountPreferreddealListCall), [*reports generate*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountReportGenerateCall), [*reports saved generate*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountReportSavedGenerateCall), [*reports saved list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountReportSavedListCall) and [*urlchannels list*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountUrlchannelListCall) +* [accounts](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::Account) + * [*adclients list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountAdclientListCall), [*alerts list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountAlertListCall), [*customchannels get*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountCustomchannelGetCall), [*customchannels list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountCustomchannelListCall), [*get*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountGetCall), [*list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountListCall), [*metadata dimensions list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountMetadataDimensionListCall), [*metadata metrics list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountMetadataMetricListCall), [*preferreddeals get*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountPreferreddealGetCall), [*preferreddeals list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountPreferreddealListCall), [*reports generate*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountReportGenerateCall), [*reports saved generate*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountReportSavedGenerateCall), [*reports saved list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountReportSavedListCall) and [*urlchannels list*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountUrlchannelListCall) Download supported by ... -* [*reports generate accounts*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/api::AccountReportGenerateCall) +* [*reports generate accounts*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/api::AccountReportGenerateCall) @@ -27,17 +27,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/AdExchangeSeller)** +* **[Hub](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/AdExchangeSeller)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::CallBuilder) -* **[Resources](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::CallBuilder) +* **[Resources](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Part)** + * **[Parts](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -142,17 +142,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -162,29 +162,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Delegate) to the -[Method Builder](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Delegate) to the +[Method Builder](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::RequestValue) and -[decodable](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::RequestValue) and +[decodable](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adexchangeseller2/5.0.2-beta-1+20171101/google_adexchangeseller2/client::RequestValue) are moved +* [request values](https://docs.rs/google-adexchangeseller2/5.0.2+20171101/google_adexchangeseller2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adexchangeseller2/src/api.rs b/gen/adexchangeseller2/src/api.rs index 672d8718e5..1c4b00f11b 100644 --- a/gen/adexchangeseller2/src/api.rs +++ b/gen/adexchangeseller2/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> AdExchangeSeller { AdExchangeSeller { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/adexchangeseller/v2.0/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> AdExchangeSeller { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adexchangeseller2/src/client.rs b/gen/adexchangeseller2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adexchangeseller2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adexchangeseller2/src/lib.rs b/gen/adexchangeseller2/src/lib.rs index d94a69b8a6..dba465494b 100644 --- a/gen/adexchangeseller2/src/lib.rs +++ b/gen/adexchangeseller2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Ad Exchange Seller* crate version *5.0.2-beta-1+20171101*, where *20171101* is the exact revision of the *adexchangeseller:v2.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Ad Exchange Seller* crate version *5.0.2+20171101*, where *20171101* is the exact revision of the *adexchangeseller:v2.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Ad Exchange Seller* *v2* API can be found at the //! [official documentation site](https://developers.google.com/ad-exchange/seller-rest/). diff --git a/gen/adexperiencereport1-cli/Cargo.toml b/gen/adexperiencereport1-cli/Cargo.toml index 9ef2a07418..4d453e43f1 100644 --- a/gen/adexperiencereport1-cli/Cargo.toml +++ b/gen/adexperiencereport1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adexperiencereport1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Experience Report (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexperiencereport1-cli" @@ -20,13 +20,13 @@ name = "adexperiencereport1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adexperiencereport1] path = "../adexperiencereport1" -version = "4.0.1+20220303" +version = "5.0.2+20230114" + diff --git a/gen/adexperiencereport1-cli/README.md b/gen/adexperiencereport1-cli/README.md index 33ff49ee8b..051c041e73 100644 --- a/gen/adexperiencereport1-cli/README.md +++ b/gen/adexperiencereport1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Ad Experience Report* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Ad Experience Report* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash adexperiencereport1 [options] diff --git a/gen/adexperiencereport1-cli/mkdocs.yml b/gen/adexperiencereport1-cli/mkdocs.yml index 2b3b97318e..626eb2d8bb 100644 --- a/gen/adexperiencereport1-cli/mkdocs.yml +++ b/gen/adexperiencereport1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Ad Experience Report v4.0.1+20220303 +site_name: Ad Experience Report v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-adexperiencereport1-cli site_description: A complete library to interact with Ad Experience Report (protocol v1) @@ -7,10 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adexperiencerepo docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['sites_get.md', 'Sites', 'Get'] -- ['violating-sites_list.md', 'Violating Sites', 'List'] +nav: +- Home: 'index.md' +- 'Sites': + - 'Get': 'sites_get.md' +- 'Violating Sites': + - 'List': 'violating-sites_list.md' theme: readthedocs diff --git a/gen/adexperiencereport1-cli/src/client.rs b/gen/adexperiencereport1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adexperiencereport1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adexperiencereport1-cli/src/main.rs b/gen/adexperiencereport1-cli/src/main.rs index 441b9e14c5..d85e0ee574 100644 --- a/gen/adexperiencereport1-cli/src/main.rs +++ b/gen/adexperiencereport1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adexperiencereport1::{api, Error, oauth2}; +use google_adexperiencereport1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -297,7 +296,7 @@ async fn main() { let mut app = App::new("adexperiencereport1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230114") .about("Views Ad Experience Report data, and gets a list of sites that have a significant number of annoying ads.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adexperiencereport1_cli") .arg(Arg::with_name("folder") diff --git a/gen/adexperiencereport1/Cargo.toml b/gen/adexperiencereport1/Cargo.toml index 486d993520..46d7c67625 100644 --- a/gen/adexperiencereport1/Cargo.toml +++ b/gen/adexperiencereport1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adexperiencereport1" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ad Experience Report (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adexperiencereport1" homepage = "https://developers.google.com/ad-experience-report/" -documentation = "https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-adexperiencereport1/5.0.2+20230114" license = "MIT" keywords = ["adexperiencereport", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adexperiencereport1/README.md b/gen/adexperiencereport1/README.md index b8e67b8916..336e3de24c 100644 --- a/gen/adexperiencereport1/README.md +++ b/gen/adexperiencereport1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-adexperiencereport1` library allows access to all features of the *Google Ad Experience Report* service. -This documentation was generated from *Ad Experience Report* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *adexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Ad Experience Report* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *adexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Ad Experience Report* *v1* API can be found at the [official documentation site](https://developers.google.com/ad-experience-report/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/AdExperienceReport) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/AdExperienceReport) ... * sites - * [*get*](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/api::SiteGetCall) + * [*get*](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/api::SiteGetCall) * violating sites - * [*list*](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/api::ViolatingSiteListCall) + * [*list*](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/api::ViolatingSiteListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/AdExperienceReport)** +* **[Hub](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/AdExperienceReport)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::CallBuilder) -* **[Resources](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::CallBuilder) +* **[Resources](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Part)** + * **[Parts](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Delegate) to the -[Method Builder](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Delegate) to the +[Method Builder](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::RequestValue) and -[decodable](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::RequestValue) and +[decodable](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adexperiencereport1/5.0.2-beta-1+20230114/google_adexperiencereport1/client::RequestValue) are moved +* [request values](https://docs.rs/google-adexperiencereport1/5.0.2+20230114/google_adexperiencereport1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adexperiencereport1/src/api.rs b/gen/adexperiencereport1/src/api.rs index a96913fa72..f5c03a98dc 100644 --- a/gen/adexperiencereport1/src/api.rs +++ b/gen/adexperiencereport1/src/api.rs @@ -97,7 +97,7 @@ impl<'a, S> AdExperienceReport { AdExperienceReport { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://adexperiencereport.googleapis.com/".to_string(), _root_url: "https://adexperiencereport.googleapis.com/".to_string(), } @@ -111,7 +111,7 @@ impl<'a, S> AdExperienceReport { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adexperiencereport1/src/client.rs b/gen/adexperiencereport1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adexperiencereport1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adexperiencereport1/src/lib.rs b/gen/adexperiencereport1/src/lib.rs index 94f13d8a22..ef141504a5 100644 --- a/gen/adexperiencereport1/src/lib.rs +++ b/gen/adexperiencereport1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Ad Experience Report* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *adexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Ad Experience Report* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *adexperiencereport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Ad Experience Report* *v1* API can be found at the //! [official documentation site](https://developers.google.com/ad-experience-report/). diff --git a/gen/admob1-cli/Cargo.toml b/gen/admob1-cli/Cargo.toml index ae9c0c8918..1b6ac316b7 100644 --- a/gen/admob1-cli/Cargo.toml +++ b/gen/admob1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-admob1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdMob (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/admob1-cli" @@ -20,13 +20,13 @@ name = "admob1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-admob1] path = "../admob1" -version = "4.0.1+20220303" +version = "5.0.2+20230124" + diff --git a/gen/admob1-cli/README.md b/gen/admob1-cli/README.md index f26a662485..bda7a3f1ea 100644 --- a/gen/admob1-cli/README.md +++ b/gen/admob1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AdMob* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *AdMob* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash admob1 [options] diff --git a/gen/admob1-cli/mkdocs.yml b/gen/admob1-cli/mkdocs.yml index 4e02bd1796..93a9622c8e 100644 --- a/gen/admob1-cli/mkdocs.yml +++ b/gen/admob1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AdMob v4.0.1+20220303 +site_name: AdMob v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-admob1-cli site_description: A complete library to interact with AdMob (protocol v1) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/admob1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_ad-units-list.md', 'Accounts', 'Ad Units List'] -- ['accounts_apps-list.md', 'Accounts', 'Apps List'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_mediation-report-generate.md', 'Accounts', 'Mediation Report Generate'] -- ['accounts_network-report-generate.md', 'Accounts', 'Network Report Generate'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Ad Units List': 'accounts_ad-units-list.md' + - 'Apps List': 'accounts_apps-list.md' + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Mediation Report Generate': 'accounts_mediation-report-generate.md' + - 'Network Report Generate': 'accounts_network-report-generate.md' theme: readthedocs diff --git a/gen/admob1-cli/src/client.rs b/gen/admob1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/admob1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/admob1-cli/src/main.rs b/gen/admob1-cli/src/main.rs index 60c9c5fe19..a8f83b9038 100644 --- a/gen/admob1-cli/src/main.rs +++ b/gen/admob1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_admob1::{api, Error, oauth2}; +use google_admob1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -120,7 +119,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -231,7 +230,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -660,7 +659,7 @@ async fn main() { Some(false)), ]), ("mediation-report-generate", - Some(r##"Generates an AdMob Mediation report based on the provided report specification. Returns result of a server-side streaming RPC. The result is returned in a sequence of responses."##), + Some(r##"Generates an AdMob mediation report based on the provided report specification. Returns result of a server-side streaming RPC. The result is returned in a sequence of responses."##), "Details at http://byron.github.io/google-apis-rs/google_admob1_cli/accounts_mediation-report-generate", vec![ (Some(r##"parent"##), @@ -721,7 +720,7 @@ async fn main() { let mut app = App::new("admob1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230124") .about("The AdMob API allows publishers to programmatically get information about their AdMob account. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_admob1_cli") .arg(Arg::with_name("url") diff --git a/gen/admob1/Cargo.toml b/gen/admob1/Cargo.toml index 79cfa30626..6d5c02d50f 100644 --- a/gen/admob1/Cargo.toml +++ b/gen/admob1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-admob1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdMob (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/admob1" homepage = "https://developers.google.com/admob/api/" -documentation = "https://docs.rs/google-admob1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-admob1/5.0.2+20230124" license = "MIT" keywords = ["admob", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/admob1/README.md b/gen/admob1/README.md index b10ed11e99..a6435d7149 100644 --- a/gen/admob1/README.md +++ b/gen/admob1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-admob1` library allows access to all features of the *Google AdMob* service. -This documentation was generated from *AdMob* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *admob:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AdMob* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *admob:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AdMob* *v1* API can be found at the [official documentation site](https://developers.google.com/admob/api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/AdMob) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/AdMob) ... * accounts - * [*ad units list*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/api::AccountAdUnitListCall), [*apps list*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/api::AccountAppListCall), [*get*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/api::AccountGetCall), [*list*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/api::AccountListCall), [*mediation report generate*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/api::AccountMediationReportGenerateCall) and [*network report generate*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/api::AccountNetworkReportGenerateCall) + * [*ad units list*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/api::AccountAdUnitListCall), [*apps list*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/api::AccountAppListCall), [*get*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/api::AccountGetCall), [*list*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/api::AccountListCall), [*mediation report generate*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/api::AccountMediationReportGenerateCall) and [*network report generate*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/api::AccountNetworkReportGenerateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/AdMob)** +* **[Hub](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/AdMob)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::CallBuilder) -* **[Resources](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::CallBuilder) +* **[Resources](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Part)** + * **[Parts](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -118,17 +118,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -138,29 +138,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Delegate) to the -[Method Builder](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Delegate) to the +[Method Builder](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::RequestValue) and -[decodable](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::RequestValue) and +[decodable](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-admob1/5.0.2-beta-1+20230124/google_admob1/client::RequestValue) are moved +* [request values](https://docs.rs/google-admob1/5.0.2+20230124/google_admob1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/admob1/src/api.rs b/gen/admob1/src/api.rs index 146d539994..7c8edc17e8 100644 --- a/gen/admob1/src/api.rs +++ b/gen/admob1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> AdMob { AdMob { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://admob.googleapis.com/".to_string(), _root_url: "https://admob.googleapis.com/".to_string(), } @@ -134,7 +134,7 @@ impl<'a, S> AdMob { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/admob1/src/client.rs b/gen/admob1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/admob1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/admob1/src/lib.rs b/gen/admob1/src/lib.rs index 6717d89c6f..c9b08094cf 100644 --- a/gen/admob1/src/lib.rs +++ b/gen/admob1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AdMob* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *admob:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AdMob* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *admob:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AdMob* *v1* API can be found at the //! [official documentation site](https://developers.google.com/admob/api/). diff --git a/gen/adsense1d4-cli/Cargo.toml b/gen/adsense1d4-cli/Cargo.toml index 294cb9d9df..1f4ce94d62 100644 --- a/gen/adsense1d4-cli/Cargo.toml +++ b/gen/adsense1d4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adsense1d4-cli" -version = "4.0.1+20201002" +version = "5.0.2+20201002" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdSense (protocol v1.4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adsense1d4-cli" @@ -20,13 +20,13 @@ name = "adsense1d4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adsense1d4] path = "../adsense1d4" -version = "4.0.1+20201002" +version = "5.0.2+20201002" + diff --git a/gen/adsense1d4-cli/README.md b/gen/adsense1d4-cli/README.md index b7d48485c5..adbbaf536d 100644 --- a/gen/adsense1d4-cli/README.md +++ b/gen/adsense1d4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AdSense* API at revision *20201002*. The CLI is at version *4.0.1*. +This documentation was generated from the *AdSense* API at revision *20201002*. The CLI is at version *5.0.2*. ```bash adsense1d4 [options] diff --git a/gen/adsense1d4-cli/mkdocs.yml b/gen/adsense1d4-cli/mkdocs.yml index 8b22317e9f..f2426c33b4 100644 --- a/gen/adsense1d4-cli/mkdocs.yml +++ b/gen/adsense1d4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AdSense v4.0.1+20201002 +site_name: AdSense v5.0.2+20201002 site_url: http://byron.github.io/google-apis-rs/google-adsense1d4-cli site_description: A complete library to interact with AdSense (protocol v1.4) @@ -7,47 +7,57 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adsense1d4-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_adclients-get-ad-code.md', 'Accounts', 'Adclients Get Ad Code'] -- ['accounts_adclients-list.md', 'Accounts', 'Adclients List'] -- ['accounts_adunits-customchannels-list.md', 'Accounts', 'Adunits Customchannels List'] -- ['accounts_adunits-get.md', 'Accounts', 'Adunits Get'] -- ['accounts_adunits-get-ad-code.md', 'Accounts', 'Adunits Get Ad Code'] -- ['accounts_adunits-list.md', 'Accounts', 'Adunits List'] -- ['accounts_alerts-delete.md', 'Accounts', 'Alerts Delete'] -- ['accounts_alerts-list.md', 'Accounts', 'Alerts List'] -- ['accounts_customchannels-adunits-list.md', 'Accounts', 'Customchannels Adunits List'] -- ['accounts_customchannels-get.md', 'Accounts', 'Customchannels Get'] -- ['accounts_customchannels-list.md', 'Accounts', 'Customchannels List'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_payments-list.md', 'Accounts', 'Payments List'] -- ['accounts_reports-generate.md', 'Accounts', 'Reports Generate'] -- ['accounts_reports-saved-generate.md', 'Accounts', 'Reports Saved Generate'] -- ['accounts_reports-saved-list.md', 'Accounts', 'Reports Saved List'] -- ['accounts_savedadstyles-get.md', 'Accounts', 'Savedadstyles Get'] -- ['accounts_savedadstyles-list.md', 'Accounts', 'Savedadstyles List'] -- ['accounts_urlchannels-list.md', 'Accounts', 'Urlchannels List'] -- ['adclients_list.md', 'Adclients', 'List'] -- ['adunits_customchannels-list.md', 'Adunits', 'Customchannels List'] -- ['adunits_get.md', 'Adunits', 'Get'] -- ['adunits_get-ad-code.md', 'Adunits', 'Get Ad Code'] -- ['adunits_list.md', 'Adunits', 'List'] -- ['alerts_delete.md', 'Alerts', 'Delete'] -- ['alerts_list.md', 'Alerts', 'List'] -- ['customchannels_adunits-list.md', 'Customchannels', 'Adunits List'] -- ['customchannels_get.md', 'Customchannels', 'Get'] -- ['customchannels_list.md', 'Customchannels', 'List'] -- ['metadata_dimensions-list.md', 'Metadata', 'Dimensions List'] -- ['metadata_metrics-list.md', 'Metadata', 'Metrics List'] -- ['payments_list.md', 'Payments', 'List'] -- ['reports_generate.md', 'Reports', 'Generate'] -- ['reports_saved-generate.md', 'Reports', 'Saved Generate'] -- ['reports_saved-list.md', 'Reports', 'Saved List'] -- ['savedadstyles_get.md', 'Savedadstyles', 'Get'] -- ['savedadstyles_list.md', 'Savedadstyles', 'List'] -- ['urlchannels_list.md', 'Urlchannels', 'List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Adclients Get Ad Code': 'accounts_adclients-get-ad-code.md' + - 'Adclients List': 'accounts_adclients-list.md' + - 'Adunits Customchannels List': 'accounts_adunits-customchannels-list.md' + - 'Adunits Get': 'accounts_adunits-get.md' + - 'Adunits Get Ad Code': 'accounts_adunits-get-ad-code.md' + - 'Adunits List': 'accounts_adunits-list.md' + - 'Alerts Delete': 'accounts_alerts-delete.md' + - 'Alerts List': 'accounts_alerts-list.md' + - 'Customchannels Adunits List': 'accounts_customchannels-adunits-list.md' + - 'Customchannels Get': 'accounts_customchannels-get.md' + - 'Customchannels List': 'accounts_customchannels-list.md' + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Payments List': 'accounts_payments-list.md' + - 'Reports Generate': 'accounts_reports-generate.md' + - 'Reports Saved Generate': 'accounts_reports-saved-generate.md' + - 'Reports Saved List': 'accounts_reports-saved-list.md' + - 'Savedadstyles Get': 'accounts_savedadstyles-get.md' + - 'Savedadstyles List': 'accounts_savedadstyles-list.md' + - 'Urlchannels List': 'accounts_urlchannels-list.md' +- 'Adclients': + - 'List': 'adclients_list.md' +- 'Adunits': + - 'Customchannels List': 'adunits_customchannels-list.md' + - 'Get': 'adunits_get.md' + - 'Get Ad Code': 'adunits_get-ad-code.md' + - 'List': 'adunits_list.md' +- 'Alerts': + - 'Delete': 'alerts_delete.md' + - 'List': 'alerts_list.md' +- 'Customchannels': + - 'Adunits List': 'customchannels_adunits-list.md' + - 'Get': 'customchannels_get.md' + - 'List': 'customchannels_list.md' +- 'Metadata': + - 'Dimensions List': 'metadata_dimensions-list.md' + - 'Metrics List': 'metadata_metrics-list.md' +- 'Payments': + - 'List': 'payments_list.md' +- 'Reports': + - 'Generate': 'reports_generate.md' + - 'Saved Generate': 'reports_saved-generate.md' + - 'Saved List': 'reports_saved-list.md' +- 'Savedadstyles': + - 'Get': 'savedadstyles_get.md' + - 'List': 'savedadstyles_list.md' +- 'Urlchannels': + - 'List': 'urlchannels_list.md' theme: readthedocs diff --git a/gen/adsense1d4-cli/src/client.rs b/gen/adsense1d4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adsense1d4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adsense1d4-cli/src/main.rs b/gen/adsense1d4-cli/src/main.rs index e1b1dcd937..520121bdba 100644 --- a/gen/adsense1d4-cli/src/main.rs +++ b/gen/adsense1d4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adsense1d4::{api, Error, oauth2}; +use google_adsense1d4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -117,7 +116,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -176,7 +175,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -339,10 +338,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-inactive" => { - call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); + call = call.include_inactive( value.map(|v| arg_from_str(v, err, "include-inactive", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -501,10 +500,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-inactive" => { - call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); + call = call.include_inactive( value.map(|v| arg_from_str(v, err, "include-inactive", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -615,7 +614,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -671,7 +670,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "tree" => { - call = call.tree(arg_from_str(value.unwrap_or("false"), err, "tree", "boolean")); + call = call.tree( value.map(|v| arg_from_str(v, err, "tree", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -730,7 +729,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -839,10 +838,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-timezone-reporting" => { - call = call.use_timezone_reporting(arg_from_str(value.unwrap_or("false"), err, "use-timezone-reporting", "boolean")); + call = call.use_timezone_reporting( value.map(|v| arg_from_str(v, err, "use-timezone-reporting", "boolean")).unwrap_or(false)); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "sort" => { call = call.add_sort(value.unwrap_or("")); @@ -851,7 +850,7 @@ where call = call.add_metric(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -928,10 +927,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -993,7 +992,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1104,7 +1103,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1163,7 +1162,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1222,7 +1221,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1281,7 +1280,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1444,10 +1443,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-inactive" => { - call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); + call = call.include_inactive( value.map(|v| arg_from_str(v, err, "include-inactive", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1606,10 +1605,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-inactive" => { - call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); + call = call.include_inactive( value.map(|v| arg_from_str(v, err, "include-inactive", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1720,7 +1719,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1933,10 +1932,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-timezone-reporting" => { - call = call.use_timezone_reporting(arg_from_str(value.unwrap_or("false"), err, "use-timezone-reporting", "boolean")); + call = call.use_timezone_reporting( value.map(|v| arg_from_str(v, err, "use-timezone-reporting", "boolean")).unwrap_or(false)); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "sort" => { call = call.add_sort(value.unwrap_or("")); @@ -1945,7 +1944,7 @@ where call = call.add_metric(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -2025,10 +2024,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -2090,7 +2089,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2201,7 +2200,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2260,7 +2259,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3563,7 +3562,7 @@ async fn main() { let mut app = App::new("adsense1d4") .author("Sebastian Thiel ") - .version("4.0.1+20201002") + .version("5.0.2+20201002") .about("Accesses AdSense publishers' inventory and generates performance reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adsense1d4_cli") .arg(Arg::with_name("url") diff --git a/gen/adsense1d4/Cargo.toml b/gen/adsense1d4/Cargo.toml index 7868af30a7..5db5942424 100644 --- a/gen/adsense1d4/Cargo.toml +++ b/gen/adsense1d4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adsense1d4" -version = "5.0.2-beta-1+20201002" +version = "5.0.2+20201002" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdSense (protocol v1.4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adsense1d4" homepage = "https://developers.google.com/adsense/management/" -documentation = "https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002" +documentation = "https://docs.rs/google-adsense1d4/5.0.2+20201002" license = "MIT" keywords = ["adsense", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adsense1d4/README.md b/gen/adsense1d4/README.md index 9eb628c322..a02af12bb7 100644 --- a/gen/adsense1d4/README.md +++ b/gen/adsense1d4/README.md @@ -5,40 +5,40 @@ DO NOT EDIT ! --> The `google-adsense1d4` library allows access to all features of the *Google AdSense* service. -This documentation was generated from *AdSense* crate version *5.0.2-beta-1+20201002*, where *20201002* is the exact revision of the *adsense:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AdSense* crate version *5.0.2+20201002*, where *20201002* is the exact revision of the *adsense:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AdSense* *v1d4* API can be found at the [official documentation site](https://developers.google.com/adsense/management/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/AdSense) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/AdSense) ... -* [accounts](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::Account) - * [*adclients get ad code*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAdclientGetAdCodeCall), [*adclients list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAdclientListCall), [*adunits customchannels list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAdunitCustomchannelListCall), [*adunits get*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAdunitGetCall), [*adunits get ad code*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAdunitGetAdCodeCall), [*adunits list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAdunitListCall), [*alerts delete*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAlertDeleteCall), [*alerts list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountAlertListCall), [*customchannels adunits list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountCustomchannelAdunitListCall), [*customchannels get*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountCustomchannelGetCall), [*customchannels list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountCustomchannelListCall), [*get*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountGetCall), [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountListCall), [*payments list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountPaymentListCall), [*reports generate*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountReportGenerateCall), [*reports saved generate*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountReportSavedGenerateCall), [*reports saved list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountReportSavedListCall), [*savedadstyles get*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountSavedadstyleGetCall), [*savedadstyles list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountSavedadstyleListCall) and [*urlchannels list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountUrlchannelListCall) +* [accounts](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::Account) + * [*adclients get ad code*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAdclientGetAdCodeCall), [*adclients list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAdclientListCall), [*adunits customchannels list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAdunitCustomchannelListCall), [*adunits get*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAdunitGetCall), [*adunits get ad code*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAdunitGetAdCodeCall), [*adunits list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAdunitListCall), [*alerts delete*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAlertDeleteCall), [*alerts list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountAlertListCall), [*customchannels adunits list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountCustomchannelAdunitListCall), [*customchannels get*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountCustomchannelGetCall), [*customchannels list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountCustomchannelListCall), [*get*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountGetCall), [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountListCall), [*payments list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountPaymentListCall), [*reports generate*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountReportGenerateCall), [*reports saved generate*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountReportSavedGenerateCall), [*reports saved list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountReportSavedListCall), [*savedadstyles get*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountSavedadstyleGetCall), [*savedadstyles list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountSavedadstyleListCall) and [*urlchannels list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountUrlchannelListCall) * adclients - * [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AdclientListCall) + * [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AdclientListCall) * adunits - * [*customchannels list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AdunitCustomchannelListCall), [*get*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AdunitGetCall), [*get ad code*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AdunitGetAdCodeCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AdunitListCall) -* [alerts](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::Alert) - * [*delete*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AlertDeleteCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AlertListCall) + * [*customchannels list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AdunitCustomchannelListCall), [*get*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AdunitGetCall), [*get ad code*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AdunitGetAdCodeCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AdunitListCall) +* [alerts](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::Alert) + * [*delete*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AlertDeleteCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AlertListCall) * customchannels - * [*adunits list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::CustomchannelAdunitListCall), [*get*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::CustomchannelGetCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::CustomchannelListCall) -* [metadata](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::Metadata) - * [*dimensions list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::MetadataDimensionListCall) and [*metrics list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::MetadataMetricListCall) -* [payments](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::Payment) - * [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::PaymentListCall) + * [*adunits list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::CustomchannelAdunitListCall), [*get*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::CustomchannelGetCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::CustomchannelListCall) +* [metadata](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::Metadata) + * [*dimensions list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::MetadataDimensionListCall) and [*metrics list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::MetadataMetricListCall) +* [payments](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::Payment) + * [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::PaymentListCall) * reports - * [*generate*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::ReportGenerateCall), [*saved generate*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::ReportSavedGenerateCall) and [*saved list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::ReportSavedListCall) + * [*generate*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::ReportGenerateCall), [*saved generate*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::ReportSavedGenerateCall) and [*saved list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::ReportSavedListCall) * savedadstyles - * [*get*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::SavedadstyleGetCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::SavedadstyleListCall) + * [*get*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::SavedadstyleGetCall) and [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::SavedadstyleListCall) * urlchannels - * [*list*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::UrlchannelListCall) + * [*list*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::UrlchannelListCall) Download supported by ... -* [*reports generate accounts*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::AccountReportGenerateCall) -* [*generate reports*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/api::ReportGenerateCall) +* [*reports generate accounts*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::AccountReportGenerateCall) +* [*generate reports*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/api::ReportGenerateCall) @@ -46,17 +46,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/AdSense)** +* **[Hub](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/AdSense)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::CallBuilder) -* **[Resources](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::CallBuilder) +* **[Resources](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Part)** + * **[Parts](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -169,17 +169,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -189,29 +189,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Delegate) to the -[Method Builder](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Delegate) to the +[Method Builder](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::RequestValue) and -[decodable](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::RequestValue) and +[decodable](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adsense1d4/5.0.2-beta-1+20201002/google_adsense1d4/client::RequestValue) are moved +* [request values](https://docs.rs/google-adsense1d4/5.0.2+20201002/google_adsense1d4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adsense1d4/src/api.rs b/gen/adsense1d4/src/api.rs index 3a181cd990..c8c0bc1916 100644 --- a/gen/adsense1d4/src/api.rs +++ b/gen/adsense1d4/src/api.rs @@ -132,7 +132,7 @@ impl<'a, S> AdSense { AdSense { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/adsense/v1.4/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -170,7 +170,7 @@ impl<'a, S> AdSense { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adsense1d4/src/client.rs b/gen/adsense1d4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adsense1d4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adsense1d4/src/lib.rs b/gen/adsense1d4/src/lib.rs index 6d9c217ffc..44e3863900 100644 --- a/gen/adsense1d4/src/lib.rs +++ b/gen/adsense1d4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AdSense* crate version *5.0.2-beta-1+20201002*, where *20201002* is the exact revision of the *adsense:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AdSense* crate version *5.0.2+20201002*, where *20201002* is the exact revision of the *adsense:v1.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AdSense* *v1d4* API can be found at the //! [official documentation site](https://developers.google.com/adsense/management/). diff --git a/gen/adsense2-cli/Cargo.toml b/gen/adsense2-cli/Cargo.toml index 306431d394..a8e2407f00 100644 --- a/gen/adsense2-cli/Cargo.toml +++ b/gen/adsense2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adsense2-cli" -version = "4.0.1+20220304" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with adsense (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adsense2-cli" @@ -20,13 +20,13 @@ name = "adsense2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adsense2] path = "../adsense2" -version = "4.0.1+20220304" +version = "5.0.2+20230124" + diff --git a/gen/adsense2-cli/README.md b/gen/adsense2-cli/README.md index b0204ed333..f5e86e24e3 100644 --- a/gen/adsense2-cli/README.md +++ b/gen/adsense2-cli/README.md @@ -25,28 +25,37 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *adsense* API at revision *20220304*. The CLI is at version *4.0.1*. +This documentation was generated from the *adsense* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash adsense2 [options] accounts + adclients-adunits-create (-r )... [-p ]... [-o ] adclients-adunits-get [-p ]... [-o ] adclients-adunits-get-adcode [-p ]... [-o ] adclients-adunits-list [-p ]... [-o ] adclients-adunits-list-linked-custom-channels [-p ]... [-o ] + adclients-adunits-patch (-r )... [-p ]... [-o ] + adclients-customchannels-create (-r )... [-p ]... [-o ] + adclients-customchannels-delete [-p ]... [-o ] adclients-customchannels-get [-p ]... [-o ] adclients-customchannels-list [-p ]... [-o ] adclients-customchannels-list-linked-ad-units [-p ]... [-o ] + adclients-customchannels-patch (-r )... [-p ]... [-o ] + adclients-get [-p ]... [-o ] adclients-get-adcode [-p ]... [-o ] adclients-list [-p ]... [-o ] + adclients-urlchannels-get [-p ]... [-o ] adclients-urlchannels-list [-p ]... [-o ] alerts-list [-p ]... [-o ] get [-p ]... [-o ] + get-ad-blocking-recovery-tag [-p ]... [-o ] list [-p ]... [-o ] list-child-accounts [-p ]... [-o ] payments-list [-p ]... [-o ] reports-generate [-p ]... [-o ] reports-generate-csv [-p ]... [-o ] + reports-get-saved [-p ]... [-o ] reports-saved-generate [-p ]... [-o ] reports-saved-generate-csv [-p ]... [-o ] reports-saved-list [-p ]... [-o ] diff --git a/gen/adsense2-cli/mkdocs.yml b/gen/adsense2-cli/mkdocs.yml index ce13a3e34f..5c0bb0cf13 100644 --- a/gen/adsense2-cli/mkdocs.yml +++ b/gen/adsense2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: adsense v4.0.1+20220304 +site_name: adsense v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-adsense2-cli site_description: A complete library to interact with adsense (protocol v2) @@ -7,30 +7,40 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adsense2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_adclients-adunits-get.md', 'Accounts', 'Adclients Adunits Get'] -- ['accounts_adclients-adunits-get-adcode.md', 'Accounts', 'Adclients Adunits Get Adcode'] -- ['accounts_adclients-adunits-list.md', 'Accounts', 'Adclients Adunits List'] -- ['accounts_adclients-adunits-list-linked-custom-channels.md', 'Accounts', 'Adclients Adunits List Linked Custom Channels'] -- ['accounts_adclients-customchannels-get.md', 'Accounts', 'Adclients Customchannels Get'] -- ['accounts_adclients-customchannels-list.md', 'Accounts', 'Adclients Customchannels List'] -- ['accounts_adclients-customchannels-list-linked-ad-units.md', 'Accounts', 'Adclients Customchannels List Linked Ad Units'] -- ['accounts_adclients-get-adcode.md', 'Accounts', 'Adclients Get Adcode'] -- ['accounts_adclients-list.md', 'Accounts', 'Adclients List'] -- ['accounts_adclients-urlchannels-list.md', 'Accounts', 'Adclients Urlchannels List'] -- ['accounts_alerts-list.md', 'Accounts', 'Alerts List'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_list-child-accounts.md', 'Accounts', 'List Child Accounts'] -- ['accounts_payments-list.md', 'Accounts', 'Payments List'] -- ['accounts_reports-generate.md', 'Accounts', 'Reports Generate'] -- ['accounts_reports-generate-csv.md', 'Accounts', 'Reports Generate Csv'] -- ['accounts_reports-saved-generate.md', 'Accounts', 'Reports Saved Generate'] -- ['accounts_reports-saved-generate-csv.md', 'Accounts', 'Reports Saved Generate Csv'] -- ['accounts_reports-saved-list.md', 'Accounts', 'Reports Saved List'] -- ['accounts_sites-get.md', 'Accounts', 'Sites Get'] -- ['accounts_sites-list.md', 'Accounts', 'Sites List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Adclients Adunits Create': 'accounts_adclients-adunits-create.md' + - 'Adclients Adunits Get': 'accounts_adclients-adunits-get.md' + - 'Adclients Adunits Get Adcode': 'accounts_adclients-adunits-get-adcode.md' + - 'Adclients Adunits List': 'accounts_adclients-adunits-list.md' + - 'Adclients Adunits List Linked Custom Channels': 'accounts_adclients-adunits-list-linked-custom-channels.md' + - 'Adclients Adunits Patch': 'accounts_adclients-adunits-patch.md' + - 'Adclients Customchannels Create': 'accounts_adclients-customchannels-create.md' + - 'Adclients Customchannels Delete': 'accounts_adclients-customchannels-delete.md' + - 'Adclients Customchannels Get': 'accounts_adclients-customchannels-get.md' + - 'Adclients Customchannels List': 'accounts_adclients-customchannels-list.md' + - 'Adclients Customchannels List Linked Ad Units': 'accounts_adclients-customchannels-list-linked-ad-units.md' + - 'Adclients Customchannels Patch': 'accounts_adclients-customchannels-patch.md' + - 'Adclients Get': 'accounts_adclients-get.md' + - 'Adclients Get Adcode': 'accounts_adclients-get-adcode.md' + - 'Adclients List': 'accounts_adclients-list.md' + - 'Adclients Urlchannels Get': 'accounts_adclients-urlchannels-get.md' + - 'Adclients Urlchannels List': 'accounts_adclients-urlchannels-list.md' + - 'Alerts List': 'accounts_alerts-list.md' + - 'Get': 'accounts_get.md' + - 'Get Ad Blocking Recovery Tag': 'accounts_get-ad-blocking-recovery-tag.md' + - 'List': 'accounts_list.md' + - 'List Child Accounts': 'accounts_list-child-accounts.md' + - 'Payments List': 'accounts_payments-list.md' + - 'Reports Generate': 'accounts_reports-generate.md' + - 'Reports Generate Csv': 'accounts_reports-generate-csv.md' + - 'Reports Get Saved': 'accounts_reports-get-saved.md' + - 'Reports Saved Generate': 'accounts_reports-saved-generate.md' + - 'Reports Saved Generate Csv': 'accounts_reports-saved-generate-csv.md' + - 'Reports Saved List': 'accounts_reports-saved-list.md' + - 'Sites Get': 'accounts_sites-get.md' + - 'Sites List': 'accounts_sites-list.md' theme: readthedocs diff --git a/gen/adsense2-cli/src/client.rs b/gen/adsense2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adsense2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adsense2-cli/src/main.rs b/gen/adsense2-cli/src/main.rs index 40c73cacf1..b524dfbad9 100644 --- a/gen/adsense2-cli/src/main.rs +++ b/gen/adsense2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adsense2::{api, Error, oauth2}; +use google_adsense2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,96 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _accounts_adclients_adunits_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "content-ads-settings.size" => Some(("contentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-ads-settings.type" => Some(("contentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "reporting-dimension-id" => Some(("reportingDimensionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["content-ads-settings", "display-name", "name", "reporting-dimension-id", "size", "state", "type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::AdUnit = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().adclients_adunits_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_adclients_adunits_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adclients_adunits_get(opt.value_of("name").unwrap_or("")); @@ -165,7 +254,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -224,7 +313,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -273,6 +362,240 @@ where } } + async fn _accounts_adclients_adunits_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "content-ads-settings.size" => Some(("contentAdsSettings.size", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-ads-settings.type" => Some(("contentAdsSettings.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "reporting-dimension-id" => Some(("reportingDimensionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["content-ads-settings", "display-name", "name", "reporting-dimension-id", "size", "state", "type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::AdUnit = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().adclients_adunits_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_adclients_customchannels_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "active" => Some(("active", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "reporting-dimension-id" => Some(("reportingDimensionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["active", "display-name", "name", "reporting-dimension-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CustomChannel = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().adclients_customchannels_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_adclients_customchannels_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().adclients_customchannels_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_adclients_customchannels_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adclients_customchannels_get(opt.value_of("name").unwrap_or("")); @@ -335,7 +658,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -394,7 +717,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -443,6 +766,150 @@ where } } + async fn _accounts_adclients_customchannels_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "active" => Some(("active", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "reporting-dimension-id" => Some(("reportingDimensionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["active", "display-name", "name", "reporting-dimension-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CustomChannel = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().adclients_customchannels_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_adclients_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().adclients_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_adclients_get_adcode(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adclients_get_adcode(opt.value_of("name").unwrap_or("")); @@ -505,7 +972,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -554,6 +1021,58 @@ where } } + async fn _accounts_adclients_urlchannels_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().adclients_urlchannels_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_adclients_urlchannels_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().adclients_urlchannels_list(opt.value_of("parent").unwrap_or("")); @@ -564,7 +1083,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -721,6 +1240,58 @@ where } } + async fn _accounts_get_ad_blocking_recovery_tag(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().get_ad_blocking_recovery_tag(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().list(); @@ -731,7 +1302,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -790,7 +1361,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -898,13 +1469,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "reporting-time-zone" => { call = call.reporting_time_zone(value.unwrap_or("")); @@ -916,7 +1487,7 @@ where call = call.add_metrics(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -925,13 +1496,13 @@ where call = call.add_filters(value.unwrap_or("")); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, "dimensions" => { call = call.add_dimensions(value.unwrap_or("")); @@ -996,13 +1567,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "reporting-time-zone" => { call = call.reporting_time_zone(value.unwrap_or("")); @@ -1014,7 +1585,7 @@ where call = call.add_metrics(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -1023,13 +1594,13 @@ where call = call.add_filters(value.unwrap_or("")); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, "dimensions" => { call = call.add_dimensions(value.unwrap_or("")); @@ -1087,6 +1658,58 @@ where } } + async fn _accounts_reports_get_saved(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().reports_get_saved(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_reports_saved_generate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().reports_saved_generate(opt.value_of("name").unwrap_or("")); @@ -1094,13 +1717,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "reporting-time-zone" => { call = call.reporting_time_zone(value.unwrap_or("")); @@ -1109,13 +1732,13 @@ where call = call.language_code(value.unwrap_or("")); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, "date-range" => { call = call.date_range(value.unwrap_or("")); @@ -1177,13 +1800,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "reporting-time-zone" => { call = call.reporting_time_zone(value.unwrap_or("")); @@ -1192,13 +1815,13 @@ where call = call.language_code(value.unwrap_or("")); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, "date-range" => { call = call.date_range(value.unwrap_or("")); @@ -1263,7 +1886,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1374,7 +1997,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1430,6 +2053,9 @@ where match self.opt.subcommand() { ("accounts", Some(opt)) => { match opt.subcommand() { + ("adclients-adunits-create", Some(opt)) => { + call_result = self._accounts_adclients_adunits_create(opt, dry_run, &mut err).await; + }, ("adclients-adunits-get", Some(opt)) => { call_result = self._accounts_adclients_adunits_get(opt, dry_run, &mut err).await; }, @@ -1442,6 +2068,15 @@ where ("adclients-adunits-list-linked-custom-channels", Some(opt)) => { call_result = self._accounts_adclients_adunits_list_linked_custom_channels(opt, dry_run, &mut err).await; }, + ("adclients-adunits-patch", Some(opt)) => { + call_result = self._accounts_adclients_adunits_patch(opt, dry_run, &mut err).await; + }, + ("adclients-customchannels-create", Some(opt)) => { + call_result = self._accounts_adclients_customchannels_create(opt, dry_run, &mut err).await; + }, + ("adclients-customchannels-delete", Some(opt)) => { + call_result = self._accounts_adclients_customchannels_delete(opt, dry_run, &mut err).await; + }, ("adclients-customchannels-get", Some(opt)) => { call_result = self._accounts_adclients_customchannels_get(opt, dry_run, &mut err).await; }, @@ -1451,12 +2086,21 @@ where ("adclients-customchannels-list-linked-ad-units", Some(opt)) => { call_result = self._accounts_adclients_customchannels_list_linked_ad_units(opt, dry_run, &mut err).await; }, + ("adclients-customchannels-patch", Some(opt)) => { + call_result = self._accounts_adclients_customchannels_patch(opt, dry_run, &mut err).await; + }, + ("adclients-get", Some(opt)) => { + call_result = self._accounts_adclients_get(opt, dry_run, &mut err).await; + }, ("adclients-get-adcode", Some(opt)) => { call_result = self._accounts_adclients_get_adcode(opt, dry_run, &mut err).await; }, ("adclients-list", Some(opt)) => { call_result = self._accounts_adclients_list(opt, dry_run, &mut err).await; }, + ("adclients-urlchannels-get", Some(opt)) => { + call_result = self._accounts_adclients_urlchannels_get(opt, dry_run, &mut err).await; + }, ("adclients-urlchannels-list", Some(opt)) => { call_result = self._accounts_adclients_urlchannels_list(opt, dry_run, &mut err).await; }, @@ -1466,6 +2110,9 @@ where ("get", Some(opt)) => { call_result = self._accounts_get(opt, dry_run, &mut err).await; }, + ("get-ad-blocking-recovery-tag", Some(opt)) => { + call_result = self._accounts_get_ad_blocking_recovery_tag(opt, dry_run, &mut err).await; + }, ("list", Some(opt)) => { call_result = self._accounts_list(opt, dry_run, &mut err).await; }, @@ -1481,6 +2128,9 @@ where ("reports-generate-csv", Some(opt)) => { call_result = self._accounts_reports_generate_csv(opt, dry_run, &mut err).await; }, + ("reports-get-saved", Some(opt)) => { + call_result = self._accounts_reports_get_saved(opt, dry_run, &mut err).await; + }, ("reports-saved-generate", Some(opt)) => { call_result = self._accounts_reports_saved_generate(opt, dry_run, &mut err).await; }, @@ -1575,7 +2225,35 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("accounts", "methods: 'adclients-adunits-get', 'adclients-adunits-get-adcode', 'adclients-adunits-list', 'adclients-adunits-list-linked-custom-channels', 'adclients-customchannels-get', 'adclients-customchannels-list', 'adclients-customchannels-list-linked-ad-units', 'adclients-get-adcode', 'adclients-list', 'adclients-urlchannels-list', 'alerts-list', 'get', 'list', 'list-child-accounts', 'payments-list', 'reports-generate', 'reports-generate-csv', 'reports-saved-generate', 'reports-saved-generate-csv', 'reports-saved-list', 'sites-get' and 'sites-list'", vec![ + ("accounts", "methods: 'adclients-adunits-create', 'adclients-adunits-get', 'adclients-adunits-get-adcode', 'adclients-adunits-list', 'adclients-adunits-list-linked-custom-channels', 'adclients-adunits-patch', 'adclients-customchannels-create', 'adclients-customchannels-delete', 'adclients-customchannels-get', 'adclients-customchannels-list', 'adclients-customchannels-list-linked-ad-units', 'adclients-customchannels-patch', 'adclients-get', 'adclients-get-adcode', 'adclients-list', 'adclients-urlchannels-get', 'adclients-urlchannels-list', 'alerts-list', 'get', 'get-ad-blocking-recovery-tag', 'list', 'list-child-accounts', 'payments-list', 'reports-generate', 'reports-generate-csv', 'reports-get-saved', 'reports-saved-generate', 'reports-saved-generate-csv', 'reports-saved-list', 'sites-get' and 'sites-list'", vec![ + ("adclients-adunits-create", + Some(r##"Creates an ad unit. This method can only be used by projects enabled for the [AdSense for Platforms](https://developers.google.com/adsense/platforms/) product. Note that ad units can only be created for ad clients with an "AFC" product code. For more info see the [AdClient resource](/adsense/management/reference/rest/v2/accounts.adclients). For now, this method can only be used to create `DISPLAY` ad units. See: https://support.google.com/adsense/answer/9183566"##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-adunits-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Ad client to create an ad unit under. Format: accounts/{account}/adclients/{adclient}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("adclients-adunits-get", Some(r##"Gets an ad unit from a specified account and ad client."##), "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-adunits-get", @@ -1599,7 +2277,7 @@ async fn main() { Some(false)), ]), ("adclients-adunits-get-adcode", - Some(r##"Gets the AdSense code for a given ad unit."##), + Some(r##"Gets the ad unit code for a given ad unit. For more information, see [About the AdSense code](https://support.google.com/adsense/answer/9274634) and [Where to place the ad code in your HTML](https://support.google.com/adsense/answer/9190028)."##), "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-adunits-get-adcode", vec![ (Some(r##"name"##), @@ -1658,6 +2336,84 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("adclients-adunits-patch", + Some(r##"Updates an ad unit. This method can only be used by projects enabled for the [AdSense for Platforms](https://developers.google.com/adsense/platforms/) product. For now, this method can only be used to update `DISPLAY` ad units. See: https://support.google.com/adsense/answer/9183566"##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-adunits-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. Resource name of the ad unit. Format: accounts/{account}/adclients/{adclient}/adunits/{adunit}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("adclients-customchannels-create", + Some(r##"Creates a custom channel. This method can only be used by projects enabled for the [AdSense for Platforms](https://developers.google.com/adsense/platforms/) product."##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-customchannels-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The ad client to create a custom channel under. Format: accounts/{account}/adclients/{adclient}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("adclients-customchannels-delete", + Some(r##"Deletes a custom channel. This method can only be used by projects enabled for the [AdSense for Platforms](https://developers.google.com/adsense/platforms/) product."##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-customchannels-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the custom channel to delete. Format: accounts/{account}/adclients/{adclient}/customchannels/{customchannel}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1724,6 +2480,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("adclients-customchannels-patch", + Some(r##"Updates a custom channel. This method can only be used by projects enabled for the [AdSense for Platforms](https://developers.google.com/adsense/platforms/) product."##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-customchannels-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. Resource name of the custom channel. Format: accounts/{account}/adclients/{adclient}/customchannels/{customchannel}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("adclients-get", + Some(r##"Gets the ad client from the given resource name."##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the ad client to retrieve. Format: accounts/{account}/adclients/{adclient}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1768,6 +2574,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("adclients-urlchannels-get", + Some(r##"Gets information about the selected url channel."##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_adclients-urlchannels-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the url channel to retrieve. Format: accounts/{account}/adclients/{adclient}/urlchannels/{urlchannel}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1834,6 +2662,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-ad-blocking-recovery-tag", + Some(r##"Gets the ad blocking recovery tag of an account."##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_get-ad-blocking-recovery-tag", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the account to get the tag for. Format: accounts/{account}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1938,6 +2788,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("reports-get-saved", + Some(r##"Gets the saved report from the given resource name."##), + "Details at http://byron.github.io/google-apis-rs/google_adsense2_cli/accounts_reports-get-saved", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the saved report to retrieve. Format: accounts/{account}/reports/{report}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2060,7 +2932,7 @@ async fn main() { let mut app = App::new("adsense2") .author("Sebastian Thiel ") - .version("4.0.1+20220304") + .version("5.0.2+20230124") .about("The AdSense Management API allows publishers to access their inventory and run earnings and performance reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adsense2_cli") .arg(Arg::with_name("url") diff --git a/gen/adsense2/Cargo.toml b/gen/adsense2/Cargo.toml index 6727015721..55282b957c 100644 --- a/gen/adsense2/Cargo.toml +++ b/gen/adsense2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adsense2" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with adsense (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adsense2" homepage = "https://developers.google.com/adsense/management/" -documentation = "https://docs.rs/google-adsense2/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-adsense2/5.0.2+20230124" license = "MIT" keywords = ["adsense", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adsense2/README.md b/gen/adsense2/README.md index 4ef7aea3b0..590d32f6bb 100644 --- a/gen/adsense2/README.md +++ b/gen/adsense2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-adsense2` library allows access to all features of the *Google adsense* service. -This documentation was generated from *adsense* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *adsense:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *adsense* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *adsense:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *adsense* *v2* API can be found at the [official documentation site](https://developers.google.com/adsense/management/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/Adsense) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/Adsense) ... -* [accounts](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::Account) - * [*adclients adunits create*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientAdunitCreateCall), [*adclients adunits get*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientAdunitGetCall), [*adclients adunits get adcode*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientAdunitGetAdcodeCall), [*adclients adunits list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientAdunitListCall), [*adclients adunits list linked custom channels*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientAdunitListLinkedCustomChannelCall), [*adclients adunits patch*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientAdunitPatchCall), [*adclients customchannels create*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientCustomchannelCreateCall), [*adclients customchannels delete*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientCustomchannelDeleteCall), [*adclients customchannels get*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientCustomchannelGetCall), [*adclients customchannels list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientCustomchannelListCall), [*adclients customchannels list linked ad units*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientCustomchannelListLinkedAdUnitCall), [*adclients customchannels patch*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientCustomchannelPatchCall), [*adclients get*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientGetCall), [*adclients get adcode*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientGetAdcodeCall), [*adclients list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientListCall), [*adclients urlchannels get*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientUrlchannelGetCall), [*adclients urlchannels list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAdclientUrlchannelListCall), [*alerts list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountAlertListCall), [*get*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountGetCall), [*get ad blocking recovery tag*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountGetAdBlockingRecoveryTagCall), [*list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountListCall), [*list child accounts*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountListChildAccountCall), [*payments list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountPaymentListCall), [*reports generate*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountReportGenerateCall), [*reports generate csv*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountReportGenerateCsvCall), [*reports get saved*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountReportGetSavedCall), [*reports saved generate*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountReportSavedGenerateCall), [*reports saved generate csv*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountReportSavedGenerateCsvCall), [*reports saved list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountReportSavedListCall), [*sites get*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountSiteGetCall) and [*sites list*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/api::AccountSiteListCall) +* [accounts](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::Account) + * [*adclients adunits create*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientAdunitCreateCall), [*adclients adunits get*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientAdunitGetCall), [*adclients adunits get adcode*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientAdunitGetAdcodeCall), [*adclients adunits list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientAdunitListCall), [*adclients adunits list linked custom channels*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientAdunitListLinkedCustomChannelCall), [*adclients adunits patch*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientAdunitPatchCall), [*adclients customchannels create*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientCustomchannelCreateCall), [*adclients customchannels delete*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientCustomchannelDeleteCall), [*adclients customchannels get*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientCustomchannelGetCall), [*adclients customchannels list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientCustomchannelListCall), [*adclients customchannels list linked ad units*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientCustomchannelListLinkedAdUnitCall), [*adclients customchannels patch*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientCustomchannelPatchCall), [*adclients get*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientGetCall), [*adclients get adcode*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientGetAdcodeCall), [*adclients list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientListCall), [*adclients urlchannels get*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientUrlchannelGetCall), [*adclients urlchannels list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAdclientUrlchannelListCall), [*alerts list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountAlertListCall), [*get*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountGetCall), [*get ad blocking recovery tag*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountGetAdBlockingRecoveryTagCall), [*list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountListCall), [*list child accounts*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountListChildAccountCall), [*payments list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountPaymentListCall), [*reports generate*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountReportGenerateCall), [*reports generate csv*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountReportGenerateCsvCall), [*reports get saved*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountReportGetSavedCall), [*reports saved generate*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountReportSavedGenerateCall), [*reports saved generate csv*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountReportSavedGenerateCsvCall), [*reports saved list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountReportSavedListCall), [*sites get*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountSiteGetCall) and [*sites list*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/api::AccountSiteListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/Adsense)** +* **[Hub](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/Adsense)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::CallBuilder) -* **[Resources](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::CallBuilder) +* **[Resources](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Part)** + * **[Parts](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -163,17 +163,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -183,29 +183,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Delegate) to the -[Method Builder](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Delegate) to the +[Method Builder](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::RequestValue) and -[decodable](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::RequestValue) and +[decodable](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adsense2/5.0.2-beta-1+20230124/google_adsense2/client::RequestValue) are moved +* [request values](https://docs.rs/google-adsense2/5.0.2+20230124/google_adsense2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adsense2/src/api.rs b/gen/adsense2/src/api.rs index 0e760974de..ea9d9d7337 100644 --- a/gen/adsense2/src/api.rs +++ b/gen/adsense2/src/api.rs @@ -138,7 +138,7 @@ impl<'a, S> Adsense { Adsense { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://adsense.googleapis.com/".to_string(), _root_url: "https://adsense.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> Adsense { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adsense2/src/client.rs b/gen/adsense2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adsense2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adsense2/src/lib.rs b/gen/adsense2/src/lib.rs index 2ed84971c5..b1f758c7f9 100644 --- a/gen/adsense2/src/lib.rs +++ b/gen/adsense2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *adsense* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *adsense:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *adsense* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *adsense:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *adsense* *v2* API can be found at the //! [official documentation site](https://developers.google.com/adsense/management/). diff --git a/gen/adsensehost4d1-cli/Cargo.toml b/gen/adsensehost4d1-cli/Cargo.toml index 035ef9d180..b34041915f 100644 --- a/gen/adsensehost4d1-cli/Cargo.toml +++ b/gen/adsensehost4d1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-adsensehost4d1-cli" -version = "4.0.1+20200930" +version = "5.0.2+20200930" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdSense Host (protocol v4.1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adsensehost4d1-cli" @@ -20,13 +20,13 @@ name = "adsensehost4d1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-adsensehost4d1] path = "../adsensehost4d1" -version = "4.0.1+20200930" +version = "5.0.2+20200930" + diff --git a/gen/adsensehost4d1-cli/README.md b/gen/adsensehost4d1-cli/README.md index e7ee3ba954..427de2dec2 100644 --- a/gen/adsensehost4d1-cli/README.md +++ b/gen/adsensehost4d1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AdSense Host* API at revision *20200930*. The CLI is at version *4.0.1*. +This documentation was generated from the *AdSense Host* API at revision *20200930*. The CLI is at version *5.0.2*. ```bash adsensehost4d1 [options] diff --git a/gen/adsensehost4d1-cli/mkdocs.yml b/gen/adsensehost4d1-cli/mkdocs.yml index 960c5a33e9..2350c67465 100644 --- a/gen/adsensehost4d1-cli/mkdocs.yml +++ b/gen/adsensehost4d1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AdSense Host v4.0.1+20200930 +site_name: AdSense Host v5.0.2+20200930 site_url: http://byron.github.io/google-apis-rs/google-adsensehost4d1-cli site_description: A complete library to interact with AdSense Host (protocol v4.1) @@ -7,34 +7,40 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/adsensehost4d1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_adclients-get.md', 'Accounts', 'Adclients Get'] -- ['accounts_adclients-list.md', 'Accounts', 'Adclients List'] -- ['accounts_adunits-delete.md', 'Accounts', 'Adunits Delete'] -- ['accounts_adunits-get.md', 'Accounts', 'Adunits Get'] -- ['accounts_adunits-get-ad-code.md', 'Accounts', 'Adunits Get Ad Code'] -- ['accounts_adunits-insert.md', 'Accounts', 'Adunits Insert'] -- ['accounts_adunits-list.md', 'Accounts', 'Adunits List'] -- ['accounts_adunits-patch.md', 'Accounts', 'Adunits Patch'] -- ['accounts_adunits-update.md', 'Accounts', 'Adunits Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_reports-generate.md', 'Accounts', 'Reports Generate'] -- ['adclients_get.md', 'Adclients', 'Get'] -- ['adclients_list.md', 'Adclients', 'List'] -- ['associationsessions_start.md', 'Associationsessions', 'Start'] -- ['associationsessions_verify.md', 'Associationsessions', 'Verify'] -- ['customchannels_delete.md', 'Customchannels', 'Delete'] -- ['customchannels_get.md', 'Customchannels', 'Get'] -- ['customchannels_insert.md', 'Customchannels', 'Insert'] -- ['customchannels_list.md', 'Customchannels', 'List'] -- ['customchannels_patch.md', 'Customchannels', 'Patch'] -- ['customchannels_update.md', 'Customchannels', 'Update'] -- ['reports_generate.md', 'Reports', 'Generate'] -- ['urlchannels_delete.md', 'Urlchannels', 'Delete'] -- ['urlchannels_insert.md', 'Urlchannels', 'Insert'] -- ['urlchannels_list.md', 'Urlchannels', 'List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Adclients Get': 'accounts_adclients-get.md' + - 'Adclients List': 'accounts_adclients-list.md' + - 'Adunits Delete': 'accounts_adunits-delete.md' + - 'Adunits Get': 'accounts_adunits-get.md' + - 'Adunits Get Ad Code': 'accounts_adunits-get-ad-code.md' + - 'Adunits Insert': 'accounts_adunits-insert.md' + - 'Adunits List': 'accounts_adunits-list.md' + - 'Adunits Patch': 'accounts_adunits-patch.md' + - 'Adunits Update': 'accounts_adunits-update.md' + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Reports Generate': 'accounts_reports-generate.md' +- 'Adclients': + - 'Get': 'adclients_get.md' + - 'List': 'adclients_list.md' +- 'Associationsessions': + - 'Start': 'associationsessions_start.md' + - 'Verify': 'associationsessions_verify.md' +- 'Customchannels': + - 'Delete': 'customchannels_delete.md' + - 'Get': 'customchannels_get.md' + - 'Insert': 'customchannels_insert.md' + - 'List': 'customchannels_list.md' + - 'Patch': 'customchannels_patch.md' + - 'Update': 'customchannels_update.md' +- 'Reports': + - 'Generate': 'reports_generate.md' +- 'Urlchannels': + - 'Delete': 'urlchannels_delete.md' + - 'Insert': 'urlchannels_insert.md' + - 'List': 'urlchannels_list.md' theme: readthedocs diff --git a/gen/adsensehost4d1-cli/src/client.rs b/gen/adsensehost4d1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/adsensehost4d1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/adsensehost4d1-cli/src/main.rs b/gen/adsensehost4d1-cli/src/main.rs index 9f54b4d989..64330bcc16 100644 --- a/gen/adsensehost4d1-cli/src/main.rs +++ b/gen/adsensehost4d1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_adsensehost4d1::{api, Error, oauth2}; +use google_adsensehost4d1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -439,10 +438,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-inactive" => { - call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); + call = call.include_inactive( value.map(|v| arg_from_str(v, err, "include-inactive", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -816,7 +815,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "sort" => { call = call.add_sort(value.unwrap_or("")); @@ -825,7 +824,7 @@ where call = call.add_metric(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -945,7 +944,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1310,7 +1309,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1542,7 +1541,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "sort" => { call = call.add_sort(value.unwrap_or("")); @@ -1551,7 +1550,7 @@ where call = call.add_metric(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -1758,7 +1757,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2758,7 +2757,7 @@ async fn main() { let mut app = App::new("adsensehost4d1") .author("Sebastian Thiel ") - .version("4.0.1+20200930") + .version("5.0.2+20200930") .about("Generates performance reports, generates ad codes, and provides publisher management capabilities for AdSense Hosts.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adsensehost4d1_cli") .arg(Arg::with_name("url") diff --git a/gen/adsensehost4d1/Cargo.toml b/gen/adsensehost4d1/Cargo.toml index a7c8e7438f..20ee61e3b2 100644 --- a/gen/adsensehost4d1/Cargo.toml +++ b/gen/adsensehost4d1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-adsensehost4d1" -version = "5.0.2-beta-1+20200930" +version = "5.0.2+20200930" authors = ["Sebastian Thiel "] description = "A complete library to interact with AdSense Host (protocol v4.1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/adsensehost4d1" homepage = "https://developers.google.com/adsense/host/" -documentation = "https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930" +documentation = "https://docs.rs/google-adsensehost4d1/5.0.2+20200930" license = "MIT" keywords = ["adsensehost", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/adsensehost4d1/README.md b/gen/adsensehost4d1/README.md index af393a60be..7a70b0aeeb 100644 --- a/gen/adsensehost4d1/README.md +++ b/gen/adsensehost4d1/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-adsensehost4d1` library allows access to all features of the *Google AdSense Host* service. -This documentation was generated from *AdSense Host* crate version *5.0.2-beta-1+20200930*, where *20200930* is the exact revision of the *adsensehost:v4.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AdSense Host* crate version *5.0.2+20200930*, where *20200930* is the exact revision of the *adsensehost:v4.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AdSense Host* *v4d1* API can be found at the [official documentation site](https://developers.google.com/adsense/host/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/AdSenseHost) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/AdSenseHost) ... -* [accounts](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::Account) - * [*adclients get*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdclientGetCall), [*adclients list*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdclientListCall), [*adunits delete*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdunitDeleteCall), [*adunits get*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdunitGetCall), [*adunits get ad code*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdunitGetAdCodeCall), [*adunits insert*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdunitInsertCall), [*adunits list*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdunitListCall), [*adunits patch*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdunitPatchCall), [*adunits update*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountAdunitUpdateCall), [*get*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountGetCall), [*list*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountListCall) and [*reports generate*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AccountReportGenerateCall) +* [accounts](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::Account) + * [*adclients get*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdclientGetCall), [*adclients list*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdclientListCall), [*adunits delete*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdunitDeleteCall), [*adunits get*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdunitGetCall), [*adunits get ad code*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdunitGetAdCodeCall), [*adunits insert*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdunitInsertCall), [*adunits list*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdunitListCall), [*adunits patch*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdunitPatchCall), [*adunits update*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountAdunitUpdateCall), [*get*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountGetCall), [*list*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountListCall) and [*reports generate*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AccountReportGenerateCall) * adclients - * [*get*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AdclientGetCall) and [*list*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AdclientListCall) + * [*get*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AdclientGetCall) and [*list*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AdclientListCall) * associationsessions - * [*start*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AssociationsessionStartCall) and [*verify*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::AssociationsessionVerifyCall) + * [*start*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AssociationsessionStartCall) and [*verify*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::AssociationsessionVerifyCall) * customchannels - * [*delete*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::CustomchannelDeleteCall), [*get*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::CustomchannelGetCall), [*insert*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::CustomchannelInsertCall), [*list*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::CustomchannelListCall), [*patch*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::CustomchannelPatchCall) and [*update*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::CustomchannelUpdateCall) -* [reports](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::Report) - * [*generate*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::ReportGenerateCall) + * [*delete*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::CustomchannelDeleteCall), [*get*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::CustomchannelGetCall), [*insert*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::CustomchannelInsertCall), [*list*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::CustomchannelListCall), [*patch*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::CustomchannelPatchCall) and [*update*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::CustomchannelUpdateCall) +* [reports](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::Report) + * [*generate*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::ReportGenerateCall) * urlchannels - * [*delete*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::UrlchannelDeleteCall), [*insert*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::UrlchannelInsertCall) and [*list*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/api::UrlchannelListCall) + * [*delete*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::UrlchannelDeleteCall), [*insert*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::UrlchannelInsertCall) and [*list*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/api::UrlchannelListCall) @@ -33,17 +33,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/AdSenseHost)** +* **[Hub](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/AdSenseHost)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::CallBuilder) -* **[Resources](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::CallBuilder) +* **[Resources](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Part)** + * **[Parts](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -146,17 +146,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -166,29 +166,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Delegate) to the -[Method Builder](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Delegate) to the +[Method Builder](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::RequestValue) and -[decodable](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::RequestValue) and +[decodable](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-adsensehost4d1/5.0.2-beta-1+20200930/google_adsensehost4d1/client::RequestValue) are moved +* [request values](https://docs.rs/google-adsensehost4d1/5.0.2+20200930/google_adsensehost4d1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/adsensehost4d1/src/api.rs b/gen/adsensehost4d1/src/api.rs index b2ed466e8e..d8c9af4d38 100644 --- a/gen/adsensehost4d1/src/api.rs +++ b/gen/adsensehost4d1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> AdSenseHost { AdSenseHost { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/adsensehost/v4.1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -152,7 +152,7 @@ impl<'a, S> AdSenseHost { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/adsensehost4d1/src/client.rs b/gen/adsensehost4d1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/adsensehost4d1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/adsensehost4d1/src/lib.rs b/gen/adsensehost4d1/src/lib.rs index 672b61d69a..c18bb03c53 100644 --- a/gen/adsensehost4d1/src/lib.rs +++ b/gen/adsensehost4d1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AdSense Host* crate version *5.0.2-beta-1+20200930*, where *20200930* is the exact revision of the *adsensehost:v4.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AdSense Host* crate version *5.0.2+20200930*, where *20200930* is the exact revision of the *adsensehost:v4.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AdSense Host* *v4d1* API can be found at the //! [official documentation site](https://developers.google.com/adsense/host/). diff --git a/gen/alertcenter1_beta1-cli/Cargo.toml b/gen/alertcenter1_beta1-cli/Cargo.toml index 88c2f5e46d..77466159c3 100644 --- a/gen/alertcenter1_beta1-cli/Cargo.toml +++ b/gen/alertcenter1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-alertcenter1_beta1-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with AlertCenter (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/alertcenter1_beta1-cli" @@ -20,13 +20,13 @@ name = "alertcenter1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-alertcenter1_beta1] path = "../alertcenter1_beta1" -version = "4.0.1+20220221" +version = "5.0.2+20230123" + diff --git a/gen/alertcenter1_beta1-cli/README.md b/gen/alertcenter1_beta1-cli/README.md index 59adb1dc5a..034f4556cc 100644 --- a/gen/alertcenter1_beta1-cli/README.md +++ b/gen/alertcenter1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AlertCenter* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *AlertCenter* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash alertcenter1-beta1 [options] diff --git a/gen/alertcenter1_beta1-cli/mkdocs.yml b/gen/alertcenter1_beta1-cli/mkdocs.yml index 841ee6d887..5a2a7ad194 100644 --- a/gen/alertcenter1_beta1-cli/mkdocs.yml +++ b/gen/alertcenter1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AlertCenter v4.0.1+20220221 +site_name: AlertCenter v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-alertcenter1_beta1-cli site_description: A complete library to interact with AlertCenter (protocol v1beta1) @@ -7,19 +7,21 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/alertcenter1_bet docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['alerts_batch-delete.md', 'Alerts', 'Batch Delete'] -- ['alerts_batch-undelete.md', 'Alerts', 'Batch Undelete'] -- ['alerts_delete.md', 'Alerts', 'Delete'] -- ['alerts_feedback-create.md', 'Alerts', 'Feedback Create'] -- ['alerts_feedback-list.md', 'Alerts', 'Feedback List'] -- ['alerts_get.md', 'Alerts', 'Get'] -- ['alerts_get-metadata.md', 'Alerts', 'Get Metadata'] -- ['alerts_list.md', 'Alerts', 'List'] -- ['alerts_undelete.md', 'Alerts', 'Undelete'] -- ['methods_get-settings.md', 'Methods', 'Get Settings'] -- ['methods_update-settings.md', 'Methods', 'Update Settings'] +nav: +- Home: 'index.md' +- 'Alerts': + - 'Batch Delete': 'alerts_batch-delete.md' + - 'Batch Undelete': 'alerts_batch-undelete.md' + - 'Delete': 'alerts_delete.md' + - 'Feedback Create': 'alerts_feedback-create.md' + - 'Feedback List': 'alerts_feedback-list.md' + - 'Get': 'alerts_get.md' + - 'Get Metadata': 'alerts_get-metadata.md' + - 'List': 'alerts_list.md' + - 'Undelete': 'alerts_undelete.md' +- 'Methods': + - 'Get Settings': 'methods_get-settings.md' + - 'Update Settings': 'methods_update-settings.md' theme: readthedocs diff --git a/gen/alertcenter1_beta1-cli/src/client.rs b/gen/alertcenter1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/alertcenter1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/alertcenter1_beta1-cli/src/main.rs b/gen/alertcenter1_beta1-cli/src/main.rs index 59635f97dc..1fe767769e 100644 --- a/gen/alertcenter1_beta1-cli/src/main.rs +++ b/gen/alertcenter1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_alertcenter1_beta1::{api, Error, oauth2}; +use google_alertcenter1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -554,7 +553,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1220,8 +1219,8 @@ async fn main() { let mut app = App::new("alertcenter1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220221") - .about("Manages alerts on issues affecting your domain.") + .version("5.0.2+20230123") + .about("Manages alerts on issues affecting your domain. Note: The current version of this API (v1beta1) is available to all Google Workspace customers. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_alertcenter1_beta1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/alertcenter1_beta1/Cargo.toml b/gen/alertcenter1_beta1/Cargo.toml index af141ac635..36ffa9f1aa 100644 --- a/gen/alertcenter1_beta1/Cargo.toml +++ b/gen/alertcenter1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-alertcenter1_beta1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with AlertCenter (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/alertcenter1_beta1" homepage = "https://developers.google.com/admin-sdk/alertcenter/" -documentation = "https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123" license = "MIT" keywords = ["alertcenter", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/alertcenter1_beta1/README.md b/gen/alertcenter1_beta1/README.md index 79cde2d76f..d03c69d05c 100644 --- a/gen/alertcenter1_beta1/README.md +++ b/gen/alertcenter1_beta1/README.md @@ -5,21 +5,21 @@ DO NOT EDIT ! --> The `google-alertcenter1_beta1` library allows access to all features of the *Google AlertCenter* service. -This documentation was generated from *AlertCenter* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *alertcenter:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AlertCenter* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *alertcenter:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AlertCenter* *v1_beta1* API can be found at the [official documentation site](https://developers.google.com/admin-sdk/alertcenter/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/AlertCenter) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/AlertCenter) ... -* [alerts](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::Alert) - * [*batch delete*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertBatchDeleteCall), [*batch undelete*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertBatchUndeleteCall), [*delete*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertDeleteCall), [*feedback create*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertFeedbackCreateCall), [*feedback list*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertFeedbackListCall), [*get*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertGetCall), [*get metadata*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertGetMetadataCall), [*list*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertListCall) and [*undelete*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::AlertUndeleteCall) +* [alerts](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::Alert) + * [*batch delete*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertBatchDeleteCall), [*batch undelete*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertBatchUndeleteCall), [*delete*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertDeleteCall), [*feedback create*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertFeedbackCreateCall), [*feedback list*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertFeedbackListCall), [*get*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertGetCall), [*get metadata*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertGetMetadataCall), [*list*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertListCall) and [*undelete*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::AlertUndeleteCall) Other activities are ... -* [get settings](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::MethodGetSettingCall) -* [update settings](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/api::MethodUpdateSettingCall) +* [get settings](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::MethodGetSettingCall) +* [update settings](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/api::MethodUpdateSettingCall) @@ -27,17 +27,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/AlertCenter)** +* **[Hub](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/AlertCenter)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -135,17 +135,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -155,29 +155,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-alertcenter1_beta1/5.0.2-beta-1+20230123/google_alertcenter1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-alertcenter1_beta1/5.0.2+20230123/google_alertcenter1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/alertcenter1_beta1/src/api.rs b/gen/alertcenter1_beta1/src/api.rs index 0fd010836b..1979181948 100644 --- a/gen/alertcenter1_beta1/src/api.rs +++ b/gen/alertcenter1_beta1/src/api.rs @@ -124,7 +124,7 @@ impl<'a, S> AlertCenter { AlertCenter { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://alertcenter.googleapis.com/".to_string(), _root_url: "https://alertcenter.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> AlertCenter { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/alertcenter1_beta1/src/client.rs b/gen/alertcenter1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/alertcenter1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/alertcenter1_beta1/src/lib.rs b/gen/alertcenter1_beta1/src/lib.rs index bdd8e01227..1b929574fc 100644 --- a/gen/alertcenter1_beta1/src/lib.rs +++ b/gen/alertcenter1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AlertCenter* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *alertcenter:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AlertCenter* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *alertcenter:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AlertCenter* *v1_beta1* API can be found at the //! [official documentation site](https://developers.google.com/admin-sdk/alertcenter/). diff --git a/gen/analytics3-cli/Cargo.toml b/gen/analytics3-cli/Cargo.toml index 304c0ff039..91011c1ea4 100644 --- a/gen/analytics3-cli/Cargo.toml +++ b/gen/analytics3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-analytics3-cli" -version = "4.0.1+20190807" +version = "5.0.2+20190807" authors = ["Sebastian Thiel "] description = "A complete library to interact with analytics (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analytics3-cli" @@ -20,13 +20,13 @@ name = "analytics3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-analytics3] path = "../analytics3" -version = "4.0.1+20190807" +version = "5.0.2+20190807" + diff --git a/gen/analytics3-cli/README.md b/gen/analytics3-cli/README.md index 9cd6361847..104505116e 100644 --- a/gen/analytics3-cli/README.md +++ b/gen/analytics3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *analytics* API at revision *20190807*. The CLI is at version *4.0.1*. +This documentation was generated from the *analytics* API at revision *20190807*. The CLI is at version *5.0.2*. ```bash analytics3 [options] diff --git a/gen/analytics3-cli/mkdocs.yml b/gen/analytics3-cli/mkdocs.yml index 088a02c641..5cd29e9284 100644 --- a/gen/analytics3-cli/mkdocs.yml +++ b/gen/analytics3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: analytics v4.0.1+20190807 +site_name: analytics v5.0.2+20190807 site_url: http://byron.github.io/google-apis-rs/google-analytics3-cli site_description: A complete library to interact with analytics (protocol v3) @@ -7,96 +7,101 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/analytics3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['data_ga-get.md', 'Data', 'Ga Get'] -- ['data_mcf-get.md', 'Data', 'Mcf Get'] -- ['data_realtime-get.md', 'Data', 'Realtime Get'] -- ['management_account-summaries-list.md', 'Management', 'Account Summaries List'] -- ['management_account-user-links-delete.md', 'Management', 'Account User Links Delete'] -- ['management_account-user-links-insert.md', 'Management', 'Account User Links Insert'] -- ['management_account-user-links-list.md', 'Management', 'Account User Links List'] -- ['management_account-user-links-update.md', 'Management', 'Account User Links Update'] -- ['management_accounts-list.md', 'Management', 'Accounts List'] -- ['management_client-id-hash-client-id.md', 'Management', 'Client Id Hash Client Id'] -- ['management_custom-data-sources-list.md', 'Management', 'Custom Data Sources List'] -- ['management_custom-dimensions-get.md', 'Management', 'Custom Dimensions Get'] -- ['management_custom-dimensions-insert.md', 'Management', 'Custom Dimensions Insert'] -- ['management_custom-dimensions-list.md', 'Management', 'Custom Dimensions List'] -- ['management_custom-dimensions-patch.md', 'Management', 'Custom Dimensions Patch'] -- ['management_custom-dimensions-update.md', 'Management', 'Custom Dimensions Update'] -- ['management_custom-metrics-get.md', 'Management', 'Custom Metrics Get'] -- ['management_custom-metrics-insert.md', 'Management', 'Custom Metrics Insert'] -- ['management_custom-metrics-list.md', 'Management', 'Custom Metrics List'] -- ['management_custom-metrics-patch.md', 'Management', 'Custom Metrics Patch'] -- ['management_custom-metrics-update.md', 'Management', 'Custom Metrics Update'] -- ['management_experiments-delete.md', 'Management', 'Experiments Delete'] -- ['management_experiments-get.md', 'Management', 'Experiments Get'] -- ['management_experiments-insert.md', 'Management', 'Experiments Insert'] -- ['management_experiments-list.md', 'Management', 'Experiments List'] -- ['management_experiments-patch.md', 'Management', 'Experiments Patch'] -- ['management_experiments-update.md', 'Management', 'Experiments Update'] -- ['management_filters-delete.md', 'Management', 'Filters Delete'] -- ['management_filters-get.md', 'Management', 'Filters Get'] -- ['management_filters-insert.md', 'Management', 'Filters Insert'] -- ['management_filters-list.md', 'Management', 'Filters List'] -- ['management_filters-patch.md', 'Management', 'Filters Patch'] -- ['management_filters-update.md', 'Management', 'Filters Update'] -- ['management_goals-get.md', 'Management', 'Goals Get'] -- ['management_goals-insert.md', 'Management', 'Goals Insert'] -- ['management_goals-list.md', 'Management', 'Goals List'] -- ['management_goals-patch.md', 'Management', 'Goals Patch'] -- ['management_goals-update.md', 'Management', 'Goals Update'] -- ['management_profile-filter-links-delete.md', 'Management', 'Profile Filter Links Delete'] -- ['management_profile-filter-links-get.md', 'Management', 'Profile Filter Links Get'] -- ['management_profile-filter-links-insert.md', 'Management', 'Profile Filter Links Insert'] -- ['management_profile-filter-links-list.md', 'Management', 'Profile Filter Links List'] -- ['management_profile-filter-links-patch.md', 'Management', 'Profile Filter Links Patch'] -- ['management_profile-filter-links-update.md', 'Management', 'Profile Filter Links Update'] -- ['management_profile-user-links-delete.md', 'Management', 'Profile User Links Delete'] -- ['management_profile-user-links-insert.md', 'Management', 'Profile User Links Insert'] -- ['management_profile-user-links-list.md', 'Management', 'Profile User Links List'] -- ['management_profile-user-links-update.md', 'Management', 'Profile User Links Update'] -- ['management_profiles-delete.md', 'Management', 'Profiles Delete'] -- ['management_profiles-get.md', 'Management', 'Profiles Get'] -- ['management_profiles-insert.md', 'Management', 'Profiles Insert'] -- ['management_profiles-list.md', 'Management', 'Profiles List'] -- ['management_profiles-patch.md', 'Management', 'Profiles Patch'] -- ['management_profiles-update.md', 'Management', 'Profiles Update'] -- ['management_remarketing-audience-delete.md', 'Management', 'Remarketing Audience Delete'] -- ['management_remarketing-audience-get.md', 'Management', 'Remarketing Audience Get'] -- ['management_remarketing-audience-insert.md', 'Management', 'Remarketing Audience Insert'] -- ['management_remarketing-audience-list.md', 'Management', 'Remarketing Audience List'] -- ['management_remarketing-audience-patch.md', 'Management', 'Remarketing Audience Patch'] -- ['management_remarketing-audience-update.md', 'Management', 'Remarketing Audience Update'] -- ['management_segments-list.md', 'Management', 'Segments List'] -- ['management_unsampled-reports-delete.md', 'Management', 'Unsampled Reports Delete'] -- ['management_unsampled-reports-get.md', 'Management', 'Unsampled Reports Get'] -- ['management_unsampled-reports-insert.md', 'Management', 'Unsampled Reports Insert'] -- ['management_unsampled-reports-list.md', 'Management', 'Unsampled Reports List'] -- ['management_uploads-delete-upload-data.md', 'Management', 'Uploads Delete Upload Data'] -- ['management_uploads-get.md', 'Management', 'Uploads Get'] -- ['management_uploads-list.md', 'Management', 'Uploads List'] -- ['management_uploads-upload-data.md', 'Management', 'Uploads Upload Data'] -- ['management_web-property-ad-words-links-delete.md', 'Management', 'Web Property Ad Words Links Delete'] -- ['management_web-property-ad-words-links-get.md', 'Management', 'Web Property Ad Words Links Get'] -- ['management_web-property-ad-words-links-insert.md', 'Management', 'Web Property Ad Words Links Insert'] -- ['management_web-property-ad-words-links-list.md', 'Management', 'Web Property Ad Words Links List'] -- ['management_web-property-ad-words-links-patch.md', 'Management', 'Web Property Ad Words Links Patch'] -- ['management_web-property-ad-words-links-update.md', 'Management', 'Web Property Ad Words Links Update'] -- ['management_webproperties-get.md', 'Management', 'Webproperties Get'] -- ['management_webproperties-insert.md', 'Management', 'Webproperties Insert'] -- ['management_webproperties-list.md', 'Management', 'Webproperties List'] -- ['management_webproperties-patch.md', 'Management', 'Webproperties Patch'] -- ['management_webproperties-update.md', 'Management', 'Webproperties Update'] -- ['management_webproperty-user-links-delete.md', 'Management', 'Webproperty User Links Delete'] -- ['management_webproperty-user-links-insert.md', 'Management', 'Webproperty User Links Insert'] -- ['management_webproperty-user-links-list.md', 'Management', 'Webproperty User Links List'] -- ['management_webproperty-user-links-update.md', 'Management', 'Webproperty User Links Update'] -- ['metadata_columns-list.md', 'Metadata', 'Columns List'] -- ['provisioning_create-account-ticket.md', 'Provisioning', 'Create Account Ticket'] -- ['provisioning_create-account-tree.md', 'Provisioning', 'Create Account Tree'] -- ['user-deletion_user-deletion-request-upsert.md', 'User Deletion', 'User Deletion Request Upsert'] +nav: +- Home: 'index.md' +- 'Data': + - 'Ga Get': 'data_ga-get.md' + - 'Mcf Get': 'data_mcf-get.md' + - 'Realtime Get': 'data_realtime-get.md' +- 'Management': + - 'Account Summaries List': 'management_account-summaries-list.md' + - 'Account User Links Delete': 'management_account-user-links-delete.md' + - 'Account User Links Insert': 'management_account-user-links-insert.md' + - 'Account User Links List': 'management_account-user-links-list.md' + - 'Account User Links Update': 'management_account-user-links-update.md' + - 'Accounts List': 'management_accounts-list.md' + - 'Client Id Hash Client Id': 'management_client-id-hash-client-id.md' + - 'Custom Data Sources List': 'management_custom-data-sources-list.md' + - 'Custom Dimensions Get': 'management_custom-dimensions-get.md' + - 'Custom Dimensions Insert': 'management_custom-dimensions-insert.md' + - 'Custom Dimensions List': 'management_custom-dimensions-list.md' + - 'Custom Dimensions Patch': 'management_custom-dimensions-patch.md' + - 'Custom Dimensions Update': 'management_custom-dimensions-update.md' + - 'Custom Metrics Get': 'management_custom-metrics-get.md' + - 'Custom Metrics Insert': 'management_custom-metrics-insert.md' + - 'Custom Metrics List': 'management_custom-metrics-list.md' + - 'Custom Metrics Patch': 'management_custom-metrics-patch.md' + - 'Custom Metrics Update': 'management_custom-metrics-update.md' + - 'Experiments Delete': 'management_experiments-delete.md' + - 'Experiments Get': 'management_experiments-get.md' + - 'Experiments Insert': 'management_experiments-insert.md' + - 'Experiments List': 'management_experiments-list.md' + - 'Experiments Patch': 'management_experiments-patch.md' + - 'Experiments Update': 'management_experiments-update.md' + - 'Filters Delete': 'management_filters-delete.md' + - 'Filters Get': 'management_filters-get.md' + - 'Filters Insert': 'management_filters-insert.md' + - 'Filters List': 'management_filters-list.md' + - 'Filters Patch': 'management_filters-patch.md' + - 'Filters Update': 'management_filters-update.md' + - 'Goals Get': 'management_goals-get.md' + - 'Goals Insert': 'management_goals-insert.md' + - 'Goals List': 'management_goals-list.md' + - 'Goals Patch': 'management_goals-patch.md' + - 'Goals Update': 'management_goals-update.md' + - 'Profile Filter Links Delete': 'management_profile-filter-links-delete.md' + - 'Profile Filter Links Get': 'management_profile-filter-links-get.md' + - 'Profile Filter Links Insert': 'management_profile-filter-links-insert.md' + - 'Profile Filter Links List': 'management_profile-filter-links-list.md' + - 'Profile Filter Links Patch': 'management_profile-filter-links-patch.md' + - 'Profile Filter Links Update': 'management_profile-filter-links-update.md' + - 'Profile User Links Delete': 'management_profile-user-links-delete.md' + - 'Profile User Links Insert': 'management_profile-user-links-insert.md' + - 'Profile User Links List': 'management_profile-user-links-list.md' + - 'Profile User Links Update': 'management_profile-user-links-update.md' + - 'Profiles Delete': 'management_profiles-delete.md' + - 'Profiles Get': 'management_profiles-get.md' + - 'Profiles Insert': 'management_profiles-insert.md' + - 'Profiles List': 'management_profiles-list.md' + - 'Profiles Patch': 'management_profiles-patch.md' + - 'Profiles Update': 'management_profiles-update.md' + - 'Remarketing Audience Delete': 'management_remarketing-audience-delete.md' + - 'Remarketing Audience Get': 'management_remarketing-audience-get.md' + - 'Remarketing Audience Insert': 'management_remarketing-audience-insert.md' + - 'Remarketing Audience List': 'management_remarketing-audience-list.md' + - 'Remarketing Audience Patch': 'management_remarketing-audience-patch.md' + - 'Remarketing Audience Update': 'management_remarketing-audience-update.md' + - 'Segments List': 'management_segments-list.md' + - 'Unsampled Reports Delete': 'management_unsampled-reports-delete.md' + - 'Unsampled Reports Get': 'management_unsampled-reports-get.md' + - 'Unsampled Reports Insert': 'management_unsampled-reports-insert.md' + - 'Unsampled Reports List': 'management_unsampled-reports-list.md' + - 'Uploads Delete Upload Data': 'management_uploads-delete-upload-data.md' + - 'Uploads Get': 'management_uploads-get.md' + - 'Uploads List': 'management_uploads-list.md' + - 'Uploads Upload Data': 'management_uploads-upload-data.md' + - 'Web Property Ad Words Links Delete': 'management_web-property-ad-words-links-delete.md' + - 'Web Property Ad Words Links Get': 'management_web-property-ad-words-links-get.md' + - 'Web Property Ad Words Links Insert': 'management_web-property-ad-words-links-insert.md' + - 'Web Property Ad Words Links List': 'management_web-property-ad-words-links-list.md' + - 'Web Property Ad Words Links Patch': 'management_web-property-ad-words-links-patch.md' + - 'Web Property Ad Words Links Update': 'management_web-property-ad-words-links-update.md' + - 'Webproperties Get': 'management_webproperties-get.md' + - 'Webproperties Insert': 'management_webproperties-insert.md' + - 'Webproperties List': 'management_webproperties-list.md' + - 'Webproperties Patch': 'management_webproperties-patch.md' + - 'Webproperties Update': 'management_webproperties-update.md' + - 'Webproperty User Links Delete': 'management_webproperty-user-links-delete.md' + - 'Webproperty User Links Insert': 'management_webproperty-user-links-insert.md' + - 'Webproperty User Links List': 'management_webproperty-user-links-list.md' + - 'Webproperty User Links Update': 'management_webproperty-user-links-update.md' +- 'Metadata': + - 'Columns List': 'metadata_columns-list.md' +- 'Provisioning': + - 'Create Account Ticket': 'provisioning_create-account-ticket.md' + - 'Create Account Tree': 'provisioning_create-account-tree.md' +- 'User Deletion': + - 'User Deletion Request Upsert': 'user-deletion_user-deletion-request-upsert.md' theme: readthedocs diff --git a/gen/analytics3-cli/src/client.rs b/gen/analytics3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/analytics3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/analytics3-cli/src/main.rs b/gen/analytics3-cli/src/main.rs index 8a19e1bc8a..80687306c2 100644 --- a/gen/analytics3-cli/src/main.rs +++ b/gen/analytics3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_analytics3::{api, Error, oauth2}; +use google_analytics3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "sort" => { call = call.sort(value.unwrap_or("")); @@ -73,10 +72,10 @@ where call = call.output(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-empty-rows" => { - call = call.include_empty_rows(arg_from_str(value.unwrap_or("false"), err, "include-empty-rows", "boolean")); + call = call.include_empty_rows( value.map(|v| arg_from_str(v, err, "include-empty-rows", "boolean")).unwrap_or(false)); }, "filters" => { call = call.filters(value.unwrap_or("")); @@ -138,7 +137,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "sort" => { call = call.sort(value.unwrap_or("")); @@ -147,7 +146,7 @@ where call = call.sampling_level(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "filters" => { call = call.filters(value.unwrap_or("")); @@ -212,7 +211,7 @@ where call = call.sort(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "filters" => { call = call.filters(value.unwrap_or("")); @@ -274,10 +273,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -486,10 +485,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -654,10 +653,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -800,10 +799,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1008,10 +1007,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1112,7 +1111,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-custom-data-source-links" => { - call = call.ignore_custom_data_source_links(arg_from_str(value.unwrap_or("false"), err, "ignore-custom-data-source-links", "boolean")); + call = call.ignore_custom_data_source_links( value.map(|v| arg_from_str(v, err, "ignore-custom-data-source-links", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1213,7 +1212,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-custom-data-source-links" => { - call = call.ignore_custom_data_source_links(arg_from_str(value.unwrap_or("false"), err, "ignore-custom-data-source-links", "boolean")); + call = call.ignore_custom_data_source_links( value.map(|v| arg_from_str(v, err, "ignore-custom-data-source-links", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1421,10 +1420,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1528,7 +1527,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-custom-data-source-links" => { - call = call.ignore_custom_data_source_links(arg_from_str(value.unwrap_or("false"), err, "ignore-custom-data-source-links", "boolean")); + call = call.ignore_custom_data_source_links( value.map(|v| arg_from_str(v, err, "ignore-custom-data-source-links", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1632,7 +1631,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-custom-data-source-links" => { - call = call.ignore_custom_data_source_links(arg_from_str(value.unwrap_or("false"), err, "ignore-custom-data-source-links", "boolean")); + call = call.ignore_custom_data_source_links( value.map(|v| arg_from_str(v, err, "ignore-custom-data-source-links", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1896,10 +1895,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2411,10 +2410,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2886,10 +2885,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3357,10 +3356,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3769,10 +3768,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4145,10 +4144,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4634,10 +4633,10 @@ where call = call.type_(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4907,10 +4906,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5166,10 +5165,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5354,10 +5353,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5659,10 +5658,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6065,10 +6064,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6487,10 +6486,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10376,7 +10375,7 @@ async fn main() { let mut app = App::new("analytics3") .author("Sebastian Thiel ") - .version("4.0.1+20190807") + .version("5.0.2+20190807") .about("Views and manages your Google Analytics data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_analytics3_cli") .arg(Arg::with_name("url") diff --git a/gen/analytics3/Cargo.toml b/gen/analytics3/Cargo.toml index 6bd24e4029..3321309422 100644 --- a/gen/analytics3/Cargo.toml +++ b/gen/analytics3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-analytics3" -version = "5.0.2-beta-1+20190807" +version = "5.0.2+20190807" authors = ["Sebastian Thiel "] description = "A complete library to interact with analytics (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analytics3" homepage = "https://developers.google.com/analytics/" -documentation = "https://docs.rs/google-analytics3/5.0.2-beta-1+20190807" +documentation = "https://docs.rs/google-analytics3/5.0.2+20190807" license = "MIT" keywords = ["analytics", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/analytics3/README.md b/gen/analytics3/README.md index 37701e4be0..547a186df8 100644 --- a/gen/analytics3/README.md +++ b/gen/analytics3/README.md @@ -5,29 +5,29 @@ DO NOT EDIT ! --> The `google-analytics3` library allows access to all features of the *Google analytics* service. -This documentation was generated from *analytics* crate version *5.0.2-beta-1+20190807*, where *20190807* is the exact revision of the *analytics:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *analytics* crate version *5.0.2+20190807*, where *20190807* is the exact revision of the *analytics:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *analytics* *v3* API can be found at the [official documentation site](https://developers.google.com/analytics/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/Analytics) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/Analytics) ... * data - * [*ga get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::DataGaGetCall), [*mcf get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::DataMcfGetCall) and [*realtime get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::DataRealtimeGetCall) + * [*ga get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::DataGaGetCall), [*mcf get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::DataMcfGetCall) and [*realtime get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::DataRealtimeGetCall) * management - * [*account summaries list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementAccountSummaryListCall), [*account user links delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementAccountUserLinkDeleteCall), [*account user links insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementAccountUserLinkInsertCall), [*account user links list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementAccountUserLinkListCall), [*account user links update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementAccountUserLinkUpdateCall), [*accounts list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementAccountListCall), [*client id hash client id*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementClientIdHashClientIdCall), [*custom data sources list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomDataSourceListCall), [*custom dimensions get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomDimensionGetCall), [*custom dimensions insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomDimensionInsertCall), [*custom dimensions list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomDimensionListCall), [*custom dimensions patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomDimensionPatchCall), [*custom dimensions update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomDimensionUpdateCall), [*custom metrics get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomMetricGetCall), [*custom metrics insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomMetricInsertCall), [*custom metrics list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomMetricListCall), [*custom metrics patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomMetricPatchCall), [*custom metrics update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementCustomMetricUpdateCall), [*experiments delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementExperimentDeleteCall), [*experiments get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementExperimentGetCall), [*experiments insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementExperimentInsertCall), [*experiments list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementExperimentListCall), [*experiments patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementExperimentPatchCall), [*experiments update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementExperimentUpdateCall), [*filters delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementFilterDeleteCall), [*filters get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementFilterGetCall), [*filters insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementFilterInsertCall), [*filters list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementFilterListCall), [*filters patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementFilterPatchCall), [*filters update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementFilterUpdateCall), [*goals get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementGoalGetCall), [*goals insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementGoalInsertCall), [*goals list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementGoalListCall), [*goals patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementGoalPatchCall), [*goals update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementGoalUpdateCall), [*profile filter links delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileFilterLinkDeleteCall), [*profile filter links get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileFilterLinkGetCall), [*profile filter links insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileFilterLinkInsertCall), [*profile filter links list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileFilterLinkListCall), [*profile filter links patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileFilterLinkPatchCall), [*profile filter links update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileFilterLinkUpdateCall), [*profile user links delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileUserLinkDeleteCall), [*profile user links insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileUserLinkInsertCall), [*profile user links list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileUserLinkListCall), [*profile user links update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileUserLinkUpdateCall), [*profiles delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileDeleteCall), [*profiles get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileGetCall), [*profiles insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileInsertCall), [*profiles list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileListCall), [*profiles patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfilePatchCall), [*profiles update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementProfileUpdateCall), [*remarketing audience delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementRemarketingAudienceDeleteCall), [*remarketing audience get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementRemarketingAudienceGetCall), [*remarketing audience insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementRemarketingAudienceInsertCall), [*remarketing audience list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementRemarketingAudienceListCall), [*remarketing audience patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementRemarketingAudiencePatchCall), [*remarketing audience update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementRemarketingAudienceUpdateCall), [*segments list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementSegmentListCall), [*unsampled reports delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUnsampledReportDeleteCall), [*unsampled reports get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUnsampledReportGetCall), [*unsampled reports insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUnsampledReportInsertCall), [*unsampled reports list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUnsampledReportListCall), [*uploads delete upload data*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUploadDeleteUploadDataCall), [*uploads get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUploadGetCall), [*uploads list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUploadListCall), [*uploads upload data*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUploadUploadDataCall), [*web property ad words links delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkDeleteCall), [*web property ad words links get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkGetCall), [*web property ad words links insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkInsertCall), [*web property ad words links list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkListCall), [*web property ad words links patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkPatchCall), [*web property ad words links update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkUpdateCall), [*webproperties get*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyGetCall), [*webproperties insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyInsertCall), [*webproperties list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyListCall), [*webproperties patch*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyPatchCall), [*webproperties update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyUpdateCall), [*webproperty user links delete*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyUserLinkDeleteCall), [*webproperty user links insert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyUserLinkInsertCall), [*webproperty user links list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyUserLinkListCall) and [*webproperty user links update*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementWebpropertyUserLinkUpdateCall) + * [*account summaries list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementAccountSummaryListCall), [*account user links delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementAccountUserLinkDeleteCall), [*account user links insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementAccountUserLinkInsertCall), [*account user links list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementAccountUserLinkListCall), [*account user links update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementAccountUserLinkUpdateCall), [*accounts list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementAccountListCall), [*client id hash client id*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementClientIdHashClientIdCall), [*custom data sources list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomDataSourceListCall), [*custom dimensions get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomDimensionGetCall), [*custom dimensions insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomDimensionInsertCall), [*custom dimensions list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomDimensionListCall), [*custom dimensions patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomDimensionPatchCall), [*custom dimensions update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomDimensionUpdateCall), [*custom metrics get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomMetricGetCall), [*custom metrics insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomMetricInsertCall), [*custom metrics list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomMetricListCall), [*custom metrics patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomMetricPatchCall), [*custom metrics update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementCustomMetricUpdateCall), [*experiments delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementExperimentDeleteCall), [*experiments get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementExperimentGetCall), [*experiments insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementExperimentInsertCall), [*experiments list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementExperimentListCall), [*experiments patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementExperimentPatchCall), [*experiments update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementExperimentUpdateCall), [*filters delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementFilterDeleteCall), [*filters get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementFilterGetCall), [*filters insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementFilterInsertCall), [*filters list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementFilterListCall), [*filters patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementFilterPatchCall), [*filters update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementFilterUpdateCall), [*goals get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementGoalGetCall), [*goals insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementGoalInsertCall), [*goals list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementGoalListCall), [*goals patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementGoalPatchCall), [*goals update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementGoalUpdateCall), [*profile filter links delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileFilterLinkDeleteCall), [*profile filter links get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileFilterLinkGetCall), [*profile filter links insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileFilterLinkInsertCall), [*profile filter links list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileFilterLinkListCall), [*profile filter links patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileFilterLinkPatchCall), [*profile filter links update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileFilterLinkUpdateCall), [*profile user links delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileUserLinkDeleteCall), [*profile user links insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileUserLinkInsertCall), [*profile user links list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileUserLinkListCall), [*profile user links update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileUserLinkUpdateCall), [*profiles delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileDeleteCall), [*profiles get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileGetCall), [*profiles insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileInsertCall), [*profiles list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileListCall), [*profiles patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfilePatchCall), [*profiles update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementProfileUpdateCall), [*remarketing audience delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementRemarketingAudienceDeleteCall), [*remarketing audience get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementRemarketingAudienceGetCall), [*remarketing audience insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementRemarketingAudienceInsertCall), [*remarketing audience list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementRemarketingAudienceListCall), [*remarketing audience patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementRemarketingAudiencePatchCall), [*remarketing audience update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementRemarketingAudienceUpdateCall), [*segments list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementSegmentListCall), [*unsampled reports delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUnsampledReportDeleteCall), [*unsampled reports get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUnsampledReportGetCall), [*unsampled reports insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUnsampledReportInsertCall), [*unsampled reports list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUnsampledReportListCall), [*uploads delete upload data*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUploadDeleteUploadDataCall), [*uploads get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUploadGetCall), [*uploads list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUploadListCall), [*uploads upload data*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUploadUploadDataCall), [*web property ad words links delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkDeleteCall), [*web property ad words links get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkGetCall), [*web property ad words links insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkInsertCall), [*web property ad words links list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkListCall), [*web property ad words links patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkPatchCall), [*web property ad words links update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebPropertyAdWordsLinkUpdateCall), [*webproperties get*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyGetCall), [*webproperties insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyInsertCall), [*webproperties list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyListCall), [*webproperties patch*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyPatchCall), [*webproperties update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyUpdateCall), [*webproperty user links delete*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyUserLinkDeleteCall), [*webproperty user links insert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyUserLinkInsertCall), [*webproperty user links list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyUserLinkListCall) and [*webproperty user links update*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementWebpropertyUserLinkUpdateCall) * metadata - * [*columns list*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::MetadataColumnListCall) + * [*columns list*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::MetadataColumnListCall) * provisioning - * [*create account ticket*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ProvisioningCreateAccountTicketCall) and [*create account tree*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ProvisioningCreateAccountTreeCall) + * [*create account ticket*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ProvisioningCreateAccountTicketCall) and [*create account tree*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ProvisioningCreateAccountTreeCall) * user deletion - * [*user deletion request upsert*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::UserDeletionUserDeletionRequestUpsertCall) + * [*user deletion request upsert*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::UserDeletionUserDeletionRequestUpsertCall) Upload supported by ... -* [*uploads upload data management*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/api::ManagementUploadUploadDataCall) +* [*uploads upload data management*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/api::ManagementUploadUploadDataCall) @@ -35,17 +35,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/Analytics)** +* **[Hub](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/Analytics)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::CallBuilder) -* **[Resources](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::CallBuilder) +* **[Resources](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Part)** + * **[Parts](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -141,17 +141,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -161,29 +161,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Delegate) to the -[Method Builder](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Delegate) to the +[Method Builder](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::RequestValue) and -[decodable](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::RequestValue) and +[decodable](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-analytics3/5.0.2-beta-1+20190807/google_analytics3/client::RequestValue) are moved +* [request values](https://docs.rs/google-analytics3/5.0.2+20190807/google_analytics3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/analytics3/src/api.rs b/gen/analytics3/src/api.rs index 1539b89e1c..ce0f173108 100644 --- a/gen/analytics3/src/api.rs +++ b/gen/analytics3/src/api.rs @@ -149,7 +149,7 @@ impl<'a, S> Analytics { Analytics { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/analytics/v3/".to_string(), _root_url: "https://analytics.googleapis.com/".to_string(), } @@ -172,7 +172,7 @@ impl<'a, S> Analytics { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/analytics3/src/client.rs b/gen/analytics3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/analytics3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/analytics3/src/lib.rs b/gen/analytics3/src/lib.rs index ff2b81c0ad..1fe4e33b7d 100644 --- a/gen/analytics3/src/lib.rs +++ b/gen/analytics3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *analytics* crate version *5.0.2-beta-1+20190807*, where *20190807* is the exact revision of the *analytics:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *analytics* crate version *5.0.2+20190807*, where *20190807* is the exact revision of the *analytics:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *analytics* *v3* API can be found at the //! [official documentation site](https://developers.google.com/analytics/). diff --git a/gen/analyticsadmin1_alpha-cli/Cargo.toml b/gen/analyticsadmin1_alpha-cli/Cargo.toml index c15f3224db..b5a76341d1 100644 --- a/gen/analyticsadmin1_alpha-cli/Cargo.toml +++ b/gen/analyticsadmin1_alpha-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-analyticsadmin1_alpha-cli" -version = "4.0.1+20220307" +version = "5.0.2+20220307" authors = ["Sebastian Thiel "] description = "A complete library to interact with Google Analytics Admin (protocol v1alpha)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsadmin1_alpha-cli" @@ -20,13 +20,13 @@ name = "analyticsadmin1-alpha" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-analyticsadmin1_alpha] path = "../analyticsadmin1_alpha" -version = "4.0.1+20220307" +version = "5.0.2+20220307" + diff --git a/gen/analyticsadmin1_alpha-cli/README.md b/gen/analyticsadmin1_alpha-cli/README.md index e867c23bd3..665315aa78 100644 --- a/gen/analyticsadmin1_alpha-cli/README.md +++ b/gen/analyticsadmin1_alpha-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Google Analytics Admin* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *Google Analytics Admin* API at revision *20220307*. The CLI is at version *5.0.2*. ```bash analyticsadmin1-alpha [options] diff --git a/gen/analyticsadmin1_alpha-cli/mkdocs.yml b/gen/analyticsadmin1_alpha-cli/mkdocs.yml index 123e15a672..90322c18ab 100644 --- a/gen/analyticsadmin1_alpha-cli/mkdocs.yml +++ b/gen/analyticsadmin1_alpha-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Google Analytics Admin v4.0.1+20220307 +site_name: Google Analytics Admin v5.0.2+20220307 site_url: http://byron.github.io/google-apis-rs/google-analyticsadmin1_alpha-cli site_description: A complete library to interact with Google Analytics Admin (protocol v1alpha) @@ -7,89 +7,92 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsadmin1_ docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-summaries_list.md', 'Account Summaries', 'List'] -- ['accounts_delete.md', 'Accounts', 'Delete'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_get-data-sharing-settings.md', 'Accounts', 'Get Data Sharing Settings'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_provision-account-ticket.md', 'Accounts', 'Provision Account Ticket'] -- ['accounts_search-change-history-events.md', 'Accounts', 'Search Change History Events'] -- ['accounts_user-links-audit.md', 'Accounts', 'User Links Audit'] -- ['accounts_user-links-batch-create.md', 'Accounts', 'User Links Batch Create'] -- ['accounts_user-links-batch-delete.md', 'Accounts', 'User Links Batch Delete'] -- ['accounts_user-links-batch-get.md', 'Accounts', 'User Links Batch Get'] -- ['accounts_user-links-batch-update.md', 'Accounts', 'User Links Batch Update'] -- ['accounts_user-links-create.md', 'Accounts', 'User Links Create'] -- ['accounts_user-links-delete.md', 'Accounts', 'User Links Delete'] -- ['accounts_user-links-get.md', 'Accounts', 'User Links Get'] -- ['accounts_user-links-list.md', 'Accounts', 'User Links List'] -- ['accounts_user-links-patch.md', 'Accounts', 'User Links Patch'] -- ['properties_acknowledge-user-data-collection.md', 'Properties', 'Acknowledge User Data Collection'] -- ['properties_conversion-events-create.md', 'Properties', 'Conversion Events Create'] -- ['properties_conversion-events-delete.md', 'Properties', 'Conversion Events Delete'] -- ['properties_conversion-events-get.md', 'Properties', 'Conversion Events Get'] -- ['properties_conversion-events-list.md', 'Properties', 'Conversion Events List'] -- ['properties_create.md', 'Properties', 'Create'] -- ['properties_custom-dimensions-archive.md', 'Properties', 'Custom Dimensions Archive'] -- ['properties_custom-dimensions-create.md', 'Properties', 'Custom Dimensions Create'] -- ['properties_custom-dimensions-get.md', 'Properties', 'Custom Dimensions Get'] -- ['properties_custom-dimensions-list.md', 'Properties', 'Custom Dimensions List'] -- ['properties_custom-dimensions-patch.md', 'Properties', 'Custom Dimensions Patch'] -- ['properties_custom-metrics-archive.md', 'Properties', 'Custom Metrics Archive'] -- ['properties_custom-metrics-create.md', 'Properties', 'Custom Metrics Create'] -- ['properties_custom-metrics-get.md', 'Properties', 'Custom Metrics Get'] -- ['properties_custom-metrics-list.md', 'Properties', 'Custom Metrics List'] -- ['properties_custom-metrics-patch.md', 'Properties', 'Custom Metrics Patch'] -- ['properties_data-streams-create.md', 'Properties', 'Data Streams Create'] -- ['properties_data-streams-delete.md', 'Properties', 'Data Streams Delete'] -- ['properties_data-streams-get.md', 'Properties', 'Data Streams Get'] -- ['properties_data-streams-get-global-site-tag.md', 'Properties', 'Data Streams Get Global Site Tag'] -- ['properties_data-streams-list.md', 'Properties', 'Data Streams List'] -- ['properties_data-streams-measurement-protocol-secrets-create.md', 'Properties', 'Data Streams Measurement Protocol Secrets Create'] -- ['properties_data-streams-measurement-protocol-secrets-delete.md', 'Properties', 'Data Streams Measurement Protocol Secrets Delete'] -- ['properties_data-streams-measurement-protocol-secrets-get.md', 'Properties', 'Data Streams Measurement Protocol Secrets Get'] -- ['properties_data-streams-measurement-protocol-secrets-list.md', 'Properties', 'Data Streams Measurement Protocol Secrets List'] -- ['properties_data-streams-measurement-protocol-secrets-patch.md', 'Properties', 'Data Streams Measurement Protocol Secrets Patch'] -- ['properties_data-streams-patch.md', 'Properties', 'Data Streams Patch'] -- ['properties_delete.md', 'Properties', 'Delete'] -- ['properties_display-video360-advertiser-link-proposals-approve.md', 'Properties', 'Display Video360 Advertiser Link Proposals Approve'] -- ['properties_display-video360-advertiser-link-proposals-cancel.md', 'Properties', 'Display Video360 Advertiser Link Proposals Cancel'] -- ['properties_display-video360-advertiser-link-proposals-create.md', 'Properties', 'Display Video360 Advertiser Link Proposals Create'] -- ['properties_display-video360-advertiser-link-proposals-delete.md', 'Properties', 'Display Video360 Advertiser Link Proposals Delete'] -- ['properties_display-video360-advertiser-link-proposals-get.md', 'Properties', 'Display Video360 Advertiser Link Proposals Get'] -- ['properties_display-video360-advertiser-link-proposals-list.md', 'Properties', 'Display Video360 Advertiser Link Proposals List'] -- ['properties_display-video360-advertiser-links-create.md', 'Properties', 'Display Video360 Advertiser Links Create'] -- ['properties_display-video360-advertiser-links-delete.md', 'Properties', 'Display Video360 Advertiser Links Delete'] -- ['properties_display-video360-advertiser-links-get.md', 'Properties', 'Display Video360 Advertiser Links Get'] -- ['properties_display-video360-advertiser-links-list.md', 'Properties', 'Display Video360 Advertiser Links List'] -- ['properties_display-video360-advertiser-links-patch.md', 'Properties', 'Display Video360 Advertiser Links Patch'] -- ['properties_firebase-links-create.md', 'Properties', 'Firebase Links Create'] -- ['properties_firebase-links-delete.md', 'Properties', 'Firebase Links Delete'] -- ['properties_firebase-links-list.md', 'Properties', 'Firebase Links List'] -- ['properties_get.md', 'Properties', 'Get'] -- ['properties_get-data-retention-settings.md', 'Properties', 'Get Data Retention Settings'] -- ['properties_get-google-signals-settings.md', 'Properties', 'Get Google Signals Settings'] -- ['properties_google-ads-links-create.md', 'Properties', 'Google Ads Links Create'] -- ['properties_google-ads-links-delete.md', 'Properties', 'Google Ads Links Delete'] -- ['properties_google-ads-links-list.md', 'Properties', 'Google Ads Links List'] -- ['properties_google-ads-links-patch.md', 'Properties', 'Google Ads Links Patch'] -- ['properties_list.md', 'Properties', 'List'] -- ['properties_patch.md', 'Properties', 'Patch'] -- ['properties_update-data-retention-settings.md', 'Properties', 'Update Data Retention Settings'] -- ['properties_update-google-signals-settings.md', 'Properties', 'Update Google Signals Settings'] -- ['properties_user-links-audit.md', 'Properties', 'User Links Audit'] -- ['properties_user-links-batch-create.md', 'Properties', 'User Links Batch Create'] -- ['properties_user-links-batch-delete.md', 'Properties', 'User Links Batch Delete'] -- ['properties_user-links-batch-get.md', 'Properties', 'User Links Batch Get'] -- ['properties_user-links-batch-update.md', 'Properties', 'User Links Batch Update'] -- ['properties_user-links-create.md', 'Properties', 'User Links Create'] -- ['properties_user-links-delete.md', 'Properties', 'User Links Delete'] -- ['properties_user-links-get.md', 'Properties', 'User Links Get'] -- ['properties_user-links-list.md', 'Properties', 'User Links List'] -- ['properties_user-links-patch.md', 'Properties', 'User Links Patch'] +nav: +- Home: 'index.md' +- 'Account Summaries': + - 'List': 'account-summaries_list.md' +- 'Accounts': + - 'Delete': 'accounts_delete.md' + - 'Get': 'accounts_get.md' + - 'Get Data Sharing Settings': 'accounts_get-data-sharing-settings.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Provision Account Ticket': 'accounts_provision-account-ticket.md' + - 'Search Change History Events': 'accounts_search-change-history-events.md' + - 'User Links Audit': 'accounts_user-links-audit.md' + - 'User Links Batch Create': 'accounts_user-links-batch-create.md' + - 'User Links Batch Delete': 'accounts_user-links-batch-delete.md' + - 'User Links Batch Get': 'accounts_user-links-batch-get.md' + - 'User Links Batch Update': 'accounts_user-links-batch-update.md' + - 'User Links Create': 'accounts_user-links-create.md' + - 'User Links Delete': 'accounts_user-links-delete.md' + - 'User Links Get': 'accounts_user-links-get.md' + - 'User Links List': 'accounts_user-links-list.md' + - 'User Links Patch': 'accounts_user-links-patch.md' +- 'Properties': + - 'Acknowledge User Data Collection': 'properties_acknowledge-user-data-collection.md' + - 'Conversion Events Create': 'properties_conversion-events-create.md' + - 'Conversion Events Delete': 'properties_conversion-events-delete.md' + - 'Conversion Events Get': 'properties_conversion-events-get.md' + - 'Conversion Events List': 'properties_conversion-events-list.md' + - 'Create': 'properties_create.md' + - 'Custom Dimensions Archive': 'properties_custom-dimensions-archive.md' + - 'Custom Dimensions Create': 'properties_custom-dimensions-create.md' + - 'Custom Dimensions Get': 'properties_custom-dimensions-get.md' + - 'Custom Dimensions List': 'properties_custom-dimensions-list.md' + - 'Custom Dimensions Patch': 'properties_custom-dimensions-patch.md' + - 'Custom Metrics Archive': 'properties_custom-metrics-archive.md' + - 'Custom Metrics Create': 'properties_custom-metrics-create.md' + - 'Custom Metrics Get': 'properties_custom-metrics-get.md' + - 'Custom Metrics List': 'properties_custom-metrics-list.md' + - 'Custom Metrics Patch': 'properties_custom-metrics-patch.md' + - 'Data Streams Create': 'properties_data-streams-create.md' + - 'Data Streams Delete': 'properties_data-streams-delete.md' + - 'Data Streams Get': 'properties_data-streams-get.md' + - 'Data Streams Get Global Site Tag': 'properties_data-streams-get-global-site-tag.md' + - 'Data Streams List': 'properties_data-streams-list.md' + - 'Data Streams Measurement Protocol Secrets Create': 'properties_data-streams-measurement-protocol-secrets-create.md' + - 'Data Streams Measurement Protocol Secrets Delete': 'properties_data-streams-measurement-protocol-secrets-delete.md' + - 'Data Streams Measurement Protocol Secrets Get': 'properties_data-streams-measurement-protocol-secrets-get.md' + - 'Data Streams Measurement Protocol Secrets List': 'properties_data-streams-measurement-protocol-secrets-list.md' + - 'Data Streams Measurement Protocol Secrets Patch': 'properties_data-streams-measurement-protocol-secrets-patch.md' + - 'Data Streams Patch': 'properties_data-streams-patch.md' + - 'Delete': 'properties_delete.md' + - 'Display Video360 Advertiser Link Proposals Approve': 'properties_display-video360-advertiser-link-proposals-approve.md' + - 'Display Video360 Advertiser Link Proposals Cancel': 'properties_display-video360-advertiser-link-proposals-cancel.md' + - 'Display Video360 Advertiser Link Proposals Create': 'properties_display-video360-advertiser-link-proposals-create.md' + - 'Display Video360 Advertiser Link Proposals Delete': 'properties_display-video360-advertiser-link-proposals-delete.md' + - 'Display Video360 Advertiser Link Proposals Get': 'properties_display-video360-advertiser-link-proposals-get.md' + - 'Display Video360 Advertiser Link Proposals List': 'properties_display-video360-advertiser-link-proposals-list.md' + - 'Display Video360 Advertiser Links Create': 'properties_display-video360-advertiser-links-create.md' + - 'Display Video360 Advertiser Links Delete': 'properties_display-video360-advertiser-links-delete.md' + - 'Display Video360 Advertiser Links Get': 'properties_display-video360-advertiser-links-get.md' + - 'Display Video360 Advertiser Links List': 'properties_display-video360-advertiser-links-list.md' + - 'Display Video360 Advertiser Links Patch': 'properties_display-video360-advertiser-links-patch.md' + - 'Firebase Links Create': 'properties_firebase-links-create.md' + - 'Firebase Links Delete': 'properties_firebase-links-delete.md' + - 'Firebase Links List': 'properties_firebase-links-list.md' + - 'Get': 'properties_get.md' + - 'Get Data Retention Settings': 'properties_get-data-retention-settings.md' + - 'Get Google Signals Settings': 'properties_get-google-signals-settings.md' + - 'Google Ads Links Create': 'properties_google-ads-links-create.md' + - 'Google Ads Links Delete': 'properties_google-ads-links-delete.md' + - 'Google Ads Links List': 'properties_google-ads-links-list.md' + - 'Google Ads Links Patch': 'properties_google-ads-links-patch.md' + - 'List': 'properties_list.md' + - 'Patch': 'properties_patch.md' + - 'Update Data Retention Settings': 'properties_update-data-retention-settings.md' + - 'Update Google Signals Settings': 'properties_update-google-signals-settings.md' + - 'User Links Audit': 'properties_user-links-audit.md' + - 'User Links Batch Create': 'properties_user-links-batch-create.md' + - 'User Links Batch Delete': 'properties_user-links-batch-delete.md' + - 'User Links Batch Get': 'properties_user-links-batch-get.md' + - 'User Links Batch Update': 'properties_user-links-batch-update.md' + - 'User Links Create': 'properties_user-links-create.md' + - 'User Links Delete': 'properties_user-links-delete.md' + - 'User Links Get': 'properties_user-links-get.md' + - 'User Links List': 'properties_user-links-list.md' + - 'User Links Patch': 'properties_user-links-patch.md' theme: readthedocs diff --git a/gen/analyticsadmin1_alpha-cli/src/client.rs b/gen/analyticsadmin1_alpha-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/analyticsadmin1_alpha-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/analyticsadmin1_alpha-cli/src/main.rs b/gen/analyticsadmin1_alpha-cli/src/main.rs index e518284632..c7480bb8af 100644 --- a/gen/analyticsadmin1_alpha-cli/src/main.rs +++ b/gen/analyticsadmin1_alpha-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_analyticsadmin1_alpha::{api, Error, oauth2}; +use google_analyticsadmin1_alpha::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -273,13 +272,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -373,7 +372,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1042,7 +1041,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "notify-new-user" => { - call = call.notify_new_user(arg_from_str(value.unwrap_or("false"), err, "notify-new-user", "boolean")); + call = call.notify_new_user( value.map(|v| arg_from_str(v, err, "notify-new-user", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1205,7 +1204,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1629,7 +1628,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2010,7 +2009,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2104,7 +2103,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2390,7 +2389,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2485,7 +2484,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2796,7 +2795,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3046,7 +3045,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3137,7 +3136,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3237,7 +3236,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3714,7 +3713,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3967,7 +3966,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4061,7 +4060,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4259,7 +4258,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4617,7 +4616,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4712,7 +4711,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4768,13 +4767,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4877,7 +4876,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4968,7 +4967,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5059,7 +5058,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5545,7 +5544,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "notify-new-user" => { - call = call.notify_new_user(arg_from_str(value.unwrap_or("false"), err, "notify-new-user", "boolean")); + call = call.notify_new_user( value.map(|v| arg_from_str(v, err, "notify-new-user", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5708,7 +5707,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8188,7 +8187,7 @@ async fn main() { let mut app = App::new("analyticsadmin1-alpha") .author("Sebastian Thiel ") - .version("4.0.1+20220307") + .version("5.0.2+20220307") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_analyticsadmin1_alpha_cli") .arg(Arg::with_name("url") diff --git a/gen/analyticsadmin1_alpha/Cargo.toml b/gen/analyticsadmin1_alpha/Cargo.toml index 9914cc4aed..ca9847c9d8 100644 --- a/gen/analyticsadmin1_alpha/Cargo.toml +++ b/gen/analyticsadmin1_alpha/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-analyticsadmin1_alpha" -version = "5.0.2-beta-1+20220307" +version = "5.0.2+20220307" authors = ["Sebastian Thiel "] description = "A complete library to interact with Google Analytics Admin (protocol v1alpha)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsadmin1_alpha" homepage = "http://code.google.com/apis/analytics/docs/mgmt/home.html" -documentation = "https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307" +documentation = "https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307" license = "MIT" keywords = ["analyticsadmin", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/analyticsadmin1_alpha/README.md b/gen/analyticsadmin1_alpha/README.md index fe5234f802..2210106b9f 100644 --- a/gen/analyticsadmin1_alpha/README.md +++ b/gen/analyticsadmin1_alpha/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-analyticsadmin1_alpha` library allows access to all features of the *Google Google Analytics Admin* service. -This documentation was generated from *Google Analytics Admin* crate version *5.0.2-beta-1+20220307*, where *20220307* is the exact revision of the *analyticsadmin:v1alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Google Analytics Admin* crate version *5.0.2+20220307*, where *20220307* is the exact revision of the *analyticsadmin:v1alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Google Analytics Admin* *v1_alpha* API can be found at the [official documentation site](http://code.google.com/apis/analytics/docs/mgmt/home.html). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/GoogleAnalyticsAdmin) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/GoogleAnalyticsAdmin) ... * account summaries - * [*list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountSummaryListCall) + * [*list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountSummaryListCall) * accounts - * [*delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountDeleteCall), [*get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountGetCall), [*get data sharing settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountGetDataSharingSettingCall), [*list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountListCall), [*patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountPatchCall), [*provision account ticket*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountProvisionAccountTicketCall), [*search change history events*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountSearchChangeHistoryEventCall), [*user links audit*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkAuditCall), [*user links batch create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchCreateCall), [*user links batch delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchDeleteCall), [*user links batch get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchGetCall), [*user links batch update*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchUpdateCall), [*user links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkCreateCall), [*user links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkDeleteCall), [*user links get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkGetCall), [*user links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkListCall) and [*user links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkPatchCall) + * [*delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountDeleteCall), [*get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountGetCall), [*get data sharing settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountGetDataSharingSettingCall), [*list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountListCall), [*patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountPatchCall), [*provision account ticket*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountProvisionAccountTicketCall), [*search change history events*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountSearchChangeHistoryEventCall), [*user links audit*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkAuditCall), [*user links batch create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchCreateCall), [*user links batch delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchDeleteCall), [*user links batch get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchGetCall), [*user links batch update*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkBatchUpdateCall), [*user links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkCreateCall), [*user links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkDeleteCall), [*user links get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkGetCall), [*user links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkListCall) and [*user links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::AccountUserLinkPatchCall) * properties - * [*acknowledge user data collection*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyAcknowledgeUserDataCollectionCall), [*conversion events create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventCreateCall), [*conversion events delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventDeleteCall), [*conversion events get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventGetCall), [*conversion events list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventListCall), [*create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCreateCall), [*custom dimensions archive*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionArchiveCall), [*custom dimensions create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionCreateCall), [*custom dimensions get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionGetCall), [*custom dimensions list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionListCall), [*custom dimensions patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionPatchCall), [*custom metrics archive*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricArchiveCall), [*custom metrics create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricCreateCall), [*custom metrics get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricGetCall), [*custom metrics list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricListCall), [*custom metrics patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricPatchCall), [*data streams create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamCreateCall), [*data streams delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamDeleteCall), [*data streams get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamGetCall), [*data streams get global site tag*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamGetGlobalSiteTagCall), [*data streams list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamListCall), [*data streams measurement protocol secrets create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretCreateCall), [*data streams measurement protocol secrets delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretDeleteCall), [*data streams measurement protocol secrets get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretGetCall), [*data streams measurement protocol secrets list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretListCall), [*data streams measurement protocol secrets patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretPatchCall), [*data streams patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamPatchCall), [*delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDeleteCall), [*display video360 advertiser link proposals approve*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalApproveCall), [*display video360 advertiser link proposals cancel*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalCancelCall), [*display video360 advertiser link proposals create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalCreateCall), [*display video360 advertiser link proposals delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalDeleteCall), [*display video360 advertiser link proposals get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalGetCall), [*display video360 advertiser link proposals list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalListCall), [*display video360 advertiser links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkCreateCall), [*display video360 advertiser links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkDeleteCall), [*display video360 advertiser links get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkGetCall), [*display video360 advertiser links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkListCall), [*display video360 advertiser links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkPatchCall), [*firebase links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyFirebaseLinkCreateCall), [*firebase links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyFirebaseLinkDeleteCall), [*firebase links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyFirebaseLinkListCall), [*get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyGetCall), [*get data retention settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyGetDataRetentionSettingCall), [*get google signals settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyGetGoogleSignalsSettingCall), [*google ads links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkCreateCall), [*google ads links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkDeleteCall), [*google ads links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkListCall), [*google ads links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkPatchCall), [*list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyListCall), [*patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyPatchCall), [*update data retention settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUpdateDataRetentionSettingCall), [*update google signals settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUpdateGoogleSignalsSettingCall), [*user links audit*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkAuditCall), [*user links batch create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchCreateCall), [*user links batch delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchDeleteCall), [*user links batch get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchGetCall), [*user links batch update*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchUpdateCall), [*user links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkCreateCall), [*user links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkDeleteCall), [*user links get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkGetCall), [*user links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkListCall) and [*user links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkPatchCall) + * [*acknowledge user data collection*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyAcknowledgeUserDataCollectionCall), [*conversion events create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventCreateCall), [*conversion events delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventDeleteCall), [*conversion events get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventGetCall), [*conversion events list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyConversionEventListCall), [*create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCreateCall), [*custom dimensions archive*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionArchiveCall), [*custom dimensions create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionCreateCall), [*custom dimensions get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionGetCall), [*custom dimensions list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionListCall), [*custom dimensions patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomDimensionPatchCall), [*custom metrics archive*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricArchiveCall), [*custom metrics create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricCreateCall), [*custom metrics get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricGetCall), [*custom metrics list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricListCall), [*custom metrics patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyCustomMetricPatchCall), [*data streams create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamCreateCall), [*data streams delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamDeleteCall), [*data streams get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamGetCall), [*data streams get global site tag*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamGetGlobalSiteTagCall), [*data streams list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamListCall), [*data streams measurement protocol secrets create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretCreateCall), [*data streams measurement protocol secrets delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretDeleteCall), [*data streams measurement protocol secrets get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretGetCall), [*data streams measurement protocol secrets list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretListCall), [*data streams measurement protocol secrets patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamMeasurementProtocolSecretPatchCall), [*data streams patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDataStreamPatchCall), [*delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDeleteCall), [*display video360 advertiser link proposals approve*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalApproveCall), [*display video360 advertiser link proposals cancel*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalCancelCall), [*display video360 advertiser link proposals create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalCreateCall), [*display video360 advertiser link proposals delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalDeleteCall), [*display video360 advertiser link proposals get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalGetCall), [*display video360 advertiser link proposals list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkProposalListCall), [*display video360 advertiser links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkCreateCall), [*display video360 advertiser links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkDeleteCall), [*display video360 advertiser links get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkGetCall), [*display video360 advertiser links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkListCall), [*display video360 advertiser links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyDisplayVideo360AdvertiserLinkPatchCall), [*firebase links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyFirebaseLinkCreateCall), [*firebase links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyFirebaseLinkDeleteCall), [*firebase links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyFirebaseLinkListCall), [*get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyGetCall), [*get data retention settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyGetDataRetentionSettingCall), [*get google signals settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyGetGoogleSignalsSettingCall), [*google ads links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkCreateCall), [*google ads links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkDeleteCall), [*google ads links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkListCall), [*google ads links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyGoogleAdsLinkPatchCall), [*list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyListCall), [*patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyPatchCall), [*update data retention settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUpdateDataRetentionSettingCall), [*update google signals settings*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUpdateGoogleSignalsSettingCall), [*user links audit*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkAuditCall), [*user links batch create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchCreateCall), [*user links batch delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchDeleteCall), [*user links batch get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchGetCall), [*user links batch update*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkBatchUpdateCall), [*user links create*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkCreateCall), [*user links delete*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkDeleteCall), [*user links get*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkGetCall), [*user links list*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkListCall) and [*user links patch*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/api::PropertyUserLinkPatchCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/GoogleAnalyticsAdmin)** +* **[Hub](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/GoogleAnalyticsAdmin)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::CallBuilder) -* **[Resources](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::CallBuilder) +* **[Resources](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Part)** + * **[Parts](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::CallBuilder)** +* **[Activities](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -141,17 +141,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -161,29 +161,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Delegate) to the -[Method Builder](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Delegate) to the +[Method Builder](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::RequestValue) and -[decodable](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::RequestValue) and +[decodable](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-analyticsadmin1_alpha/5.0.2-beta-1+20220307/google_analyticsadmin1_alpha/client::RequestValue) are moved +* [request values](https://docs.rs/google-analyticsadmin1_alpha/5.0.2+20220307/google_analyticsadmin1_alpha/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/analyticsadmin1_alpha/src/api.rs b/gen/analyticsadmin1_alpha/src/api.rs index ecbcef5a84..5118a67112 100644 --- a/gen/analyticsadmin1_alpha/src/api.rs +++ b/gen/analyticsadmin1_alpha/src/api.rs @@ -137,7 +137,7 @@ impl<'a, S> GoogleAnalyticsAdmin { GoogleAnalyticsAdmin { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://analyticsadmin.googleapis.com/".to_string(), _root_url: "https://analyticsadmin.googleapis.com/".to_string(), } @@ -154,7 +154,7 @@ impl<'a, S> GoogleAnalyticsAdmin { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/analyticsadmin1_alpha/src/client.rs b/gen/analyticsadmin1_alpha/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/analyticsadmin1_alpha/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/analyticsadmin1_alpha/src/lib.rs b/gen/analyticsadmin1_alpha/src/lib.rs index d4862267c9..4ccb647135 100644 --- a/gen/analyticsadmin1_alpha/src/lib.rs +++ b/gen/analyticsadmin1_alpha/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Google Analytics Admin* crate version *5.0.2-beta-1+20220307*, where *20220307* is the exact revision of the *analyticsadmin:v1alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Google Analytics Admin* crate version *5.0.2+20220307*, where *20220307* is the exact revision of the *analyticsadmin:v1alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Google Analytics Admin* *v1_alpha* API can be found at the //! [official documentation site](http://code.google.com/apis/analytics/docs/mgmt/home.html). diff --git a/gen/analyticsdata1_beta-cli/Cargo.toml b/gen/analyticsdata1_beta-cli/Cargo.toml index a098aa5e3c..83d459b6ed 100644 --- a/gen/analyticsdata1_beta-cli/Cargo.toml +++ b/gen/analyticsdata1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-analyticsdata1_beta-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with AnalyticsData (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsdata1_beta-cli" @@ -20,13 +20,13 @@ name = "analyticsdata1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-analyticsdata1_beta] path = "../analyticsdata1_beta" -version = "4.0.1+20220303" +version = "5.0.2+20230123" + diff --git a/gen/analyticsdata1_beta-cli/README.md b/gen/analyticsdata1_beta-cli/README.md index ccefbf8246..54d75779d2 100644 --- a/gen/analyticsdata1_beta-cli/README.md +++ b/gen/analyticsdata1_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AnalyticsData* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *AnalyticsData* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash analyticsdata1-beta [options] diff --git a/gen/analyticsdata1_beta-cli/mkdocs.yml b/gen/analyticsdata1_beta-cli/mkdocs.yml index e1de103604..974bccf603 100644 --- a/gen/analyticsdata1_beta-cli/mkdocs.yml +++ b/gen/analyticsdata1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AnalyticsData v4.0.1+20220303 +site_name: AnalyticsData v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-analyticsdata1_beta-cli site_description: A complete library to interact with AnalyticsData (protocol v1beta) @@ -7,15 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsdata1_b docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['properties_batch-run-pivot-reports.md', 'Properties', 'Batch Run Pivot Reports'] -- ['properties_batch-run-reports.md', 'Properties', 'Batch Run Reports'] -- ['properties_check-compatibility.md', 'Properties', 'Check Compatibility'] -- ['properties_get-metadata.md', 'Properties', 'Get Metadata'] -- ['properties_run-pivot-report.md', 'Properties', 'Run Pivot Report'] -- ['properties_run-realtime-report.md', 'Properties', 'Run Realtime Report'] -- ['properties_run-report.md', 'Properties', 'Run Report'] +nav: +- Home: 'index.md' +- 'Properties': + - 'Batch Run Pivot Reports': 'properties_batch-run-pivot-reports.md' + - 'Batch Run Reports': 'properties_batch-run-reports.md' + - 'Check Compatibility': 'properties_check-compatibility.md' + - 'Get Metadata': 'properties_get-metadata.md' + - 'Run Pivot Report': 'properties_run-pivot-report.md' + - 'Run Realtime Report': 'properties_run-realtime-report.md' + - 'Run Report': 'properties_run-report.md' theme: readthedocs diff --git a/gen/analyticsdata1_beta-cli/src/client.rs b/gen/analyticsdata1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/analyticsdata1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/analyticsdata1_beta-cli/src/main.rs b/gen/analyticsdata1_beta-cli/src/main.rs index 8dea9633a9..b0ea4b32aa 100644 --- a/gen/analyticsdata1_beta-cli/src/main.rs +++ b/gen/analyticsdata1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_analyticsdata1_beta::{api, Error, oauth2}; +use google_analyticsdata1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -904,7 +903,7 @@ async fn main() { vec![ (Some(r##"property"##), None, - Some(r##"A Google Analytics GA4 property identifier whose events are tracked. To learn more, see [where to find your Property ID](https://developers.google.com/analytics/devguides/reporting/data/v1/property-id). `property` should be the same value as in your `runReport` request. Example: properties/1234 Set the Property ID to 0 for compatibility checking on dimensions and metrics common to all properties. In this special mode, this method will not return custom dimensions and metrics."##), + Some(r##"A Google Analytics GA4 property identifier whose events are tracked. To learn more, see [where to find your Property ID](https://developers.google.com/analytics/devguides/reporting/data/v1/property-id). `property` should be the same value as in your `runReport` request. Example: properties/1234"##), Some(true), Some(false)), @@ -977,7 +976,7 @@ async fn main() { Some(false)), ]), ("run-realtime-report", - Some(r##"The Google Analytics Realtime API returns a customized report of realtime event data for your property. These reports show events and usage from the last 30 minutes."##), + Some(r##"Returns a customized report of realtime event data for your property. Events appear in realtime reports seconds after they have been sent to the Google Analytics. Realtime reports show events and usage data for the periods of time ranging from the present moment to 30 minutes ago (up to 60 minutes for Google Analytics 360 properties). For a guide to constructing realtime requests & understanding responses, see [Creating a Realtime Report](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-basics)."##), "Details at http://byron.github.io/google-apis-rs/google_analyticsdata1_beta_cli/properties_run-realtime-report", vec![ (Some(r##"property"##), @@ -1005,7 +1004,7 @@ async fn main() { Some(false)), ]), ("run-report", - Some(r##"Returns a customized report of your Google Analytics event data. Reports contain statistics derived from data collected by the Google Analytics tracking code. The data returned from the API is as a table with columns for the requested dimensions and metrics. Metrics are individual measurements of user activity on your property, such as active users or event count. Dimensions break down metrics across some common criteria, such as country or event name."##), + Some(r##"Returns a customized report of your Google Analytics event data. Reports contain statistics derived from data collected by the Google Analytics tracking code. The data returned from the API is as a table with columns for the requested dimensions and metrics. Metrics are individual measurements of user activity on your property, such as active users or event count. Dimensions break down metrics across some common criteria, such as country or event name. For a guide to constructing requests & understanding responses, see [Creating a Report](https://developers.google.com/analytics/devguides/reporting/data/v1/basics)."##), "Details at http://byron.github.io/google-apis-rs/google_analyticsdata1_beta_cli/properties_run-report", vec![ (Some(r##"property"##), @@ -1038,7 +1037,7 @@ async fn main() { let mut app = App::new("analyticsdata1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230123") .about("Accesses report data in Google Analytics.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_analyticsdata1_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/analyticsdata1_beta/Cargo.toml b/gen/analyticsdata1_beta/Cargo.toml index e96aa088d0..7e5add005a 100644 --- a/gen/analyticsdata1_beta/Cargo.toml +++ b/gen/analyticsdata1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-analyticsdata1_beta" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with AnalyticsData (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsdata1_beta" homepage = "https://developers.google.com/analytics/devguides/reporting/data/v1/" -documentation = "https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123" license = "MIT" keywords = ["analyticsdata", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/analyticsdata1_beta/README.md b/gen/analyticsdata1_beta/README.md index c2d8b9fd0c..d13bedb185 100644 --- a/gen/analyticsdata1_beta/README.md +++ b/gen/analyticsdata1_beta/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-analyticsdata1_beta` library allows access to all features of the *Google AnalyticsData* service. -This documentation was generated from *AnalyticsData* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *analyticsdata:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AnalyticsData* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *analyticsdata:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AnalyticsData* *v1_beta* API can be found at the [official documentation site](https://developers.google.com/analytics/devguides/reporting/data/v1/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/AnalyticsData) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/AnalyticsData) ... * properties - * [*batch run pivot reports*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/api::PropertyBatchRunPivotReportCall), [*batch run reports*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/api::PropertyBatchRunReportCall), [*check compatibility*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/api::PropertyCheckCompatibilityCall), [*get metadata*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/api::PropertyGetMetadataCall), [*run pivot report*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/api::PropertyRunPivotReportCall), [*run realtime report*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/api::PropertyRunRealtimeReportCall) and [*run report*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/api::PropertyRunReportCall) + * [*batch run pivot reports*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/api::PropertyBatchRunPivotReportCall), [*batch run reports*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/api::PropertyBatchRunReportCall), [*check compatibility*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/api::PropertyCheckCompatibilityCall), [*get metadata*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/api::PropertyGetMetadataCall), [*run pivot report*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/api::PropertyRunPivotReportCall), [*run realtime report*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/api::PropertyRunRealtimeReportCall) and [*run report*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/api::PropertyRunReportCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/AnalyticsData)** +* **[Hub](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/AnalyticsData)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Part)** + * **[Parts](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-analyticsdata1_beta/5.0.2-beta-1+20230123/google_analyticsdata1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-analyticsdata1_beta/5.0.2+20230123/google_analyticsdata1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/analyticsdata1_beta/src/api.rs b/gen/analyticsdata1_beta/src/api.rs index 6ea8e3e356..b5eefb9ce1 100644 --- a/gen/analyticsdata1_beta/src/api.rs +++ b/gen/analyticsdata1_beta/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> AnalyticsData { AnalyticsData { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://analyticsdata.googleapis.com/".to_string(), _root_url: "https://analyticsdata.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> AnalyticsData { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/analyticsdata1_beta/src/client.rs b/gen/analyticsdata1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/analyticsdata1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/analyticsdata1_beta/src/lib.rs b/gen/analyticsdata1_beta/src/lib.rs index 5ec07f441e..75ccbd4639 100644 --- a/gen/analyticsdata1_beta/src/lib.rs +++ b/gen/analyticsdata1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AnalyticsData* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *analyticsdata:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AnalyticsData* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *analyticsdata:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AnalyticsData* *v1_beta* API can be found at the //! [official documentation site](https://developers.google.com/analytics/devguides/reporting/data/v1/). diff --git a/gen/analyticsreporting4-cli/Cargo.toml b/gen/analyticsreporting4-cli/Cargo.toml index 5345f3036a..965667c210 100644 --- a/gen/analyticsreporting4-cli/Cargo.toml +++ b/gen/analyticsreporting4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-analyticsreporting4-cli" -version = "4.0.1+20220215" +version = "5.0.2+20221104" authors = ["Sebastian Thiel "] description = "A complete library to interact with AnalyticsReporting (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsreporting4-cli" @@ -20,13 +20,13 @@ name = "analyticsreporting4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-analyticsreporting4] path = "../analyticsreporting4" -version = "4.0.1+20220215" +version = "5.0.2+20221104" + diff --git a/gen/analyticsreporting4-cli/README.md b/gen/analyticsreporting4-cli/README.md index 89c75c0918..75692f2f8b 100644 --- a/gen/analyticsreporting4-cli/README.md +++ b/gen/analyticsreporting4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AnalyticsReporting* API at revision *20220215*. The CLI is at version *4.0.1*. +This documentation was generated from the *AnalyticsReporting* API at revision *20221104*. The CLI is at version *5.0.2*. ```bash analyticsreporting4 [options] diff --git a/gen/analyticsreporting4-cli/mkdocs.yml b/gen/analyticsreporting4-cli/mkdocs.yml index c45d4b04dc..6f34cbce24 100644 --- a/gen/analyticsreporting4-cli/mkdocs.yml +++ b/gen/analyticsreporting4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AnalyticsReporting v4.0.1+20220215 +site_name: AnalyticsReporting v5.0.2+20221104 site_url: http://byron.github.io/google-apis-rs/google-analyticsreporting4-cli site_description: A complete library to interact with AnalyticsReporting (protocol v4) @@ -7,10 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsreporti docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['reports_batch-get.md', 'Reports', 'Batch Get'] -- ['user-activity_search.md', 'User Activity', 'Search'] +nav: +- Home: 'index.md' +- 'Reports': + - 'Batch Get': 'reports_batch-get.md' +- 'User Activity': + - 'Search': 'user-activity_search.md' theme: readthedocs diff --git a/gen/analyticsreporting4-cli/src/client.rs b/gen/analyticsreporting4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/analyticsreporting4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/analyticsreporting4-cli/src/main.rs b/gen/analyticsreporting4-cli/src/main.rs index 02f969e771..37a563c61e 100644 --- a/gen/analyticsreporting4-cli/src/main.rs +++ b/gen/analyticsreporting4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_analyticsreporting4::{api, Error, oauth2}; +use google_analyticsreporting4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -382,7 +381,7 @@ async fn main() { let mut app = App::new("analyticsreporting4") .author("Sebastian Thiel ") - .version("4.0.1+20220215") + .version("5.0.2+20221104") .about("Accesses Analytics report data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_analyticsreporting4_cli") .arg(Arg::with_name("url") diff --git a/gen/analyticsreporting4/Cargo.toml b/gen/analyticsreporting4/Cargo.toml index bec3ba5523..b2d81250c2 100644 --- a/gen/analyticsreporting4/Cargo.toml +++ b/gen/analyticsreporting4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-analyticsreporting4" -version = "5.0.2-beta-1+20221104" +version = "5.0.2+20221104" authors = ["Sebastian Thiel "] description = "A complete library to interact with AnalyticsReporting (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/analyticsreporting4" homepage = "https://developers.google.com/analytics/devguides/reporting/core/v4/" -documentation = "https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104" +documentation = "https://docs.rs/google-analyticsreporting4/5.0.2+20221104" license = "MIT" keywords = ["analyticsreporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/analyticsreporting4/README.md b/gen/analyticsreporting4/README.md index edeee329a7..fd52901df4 100644 --- a/gen/analyticsreporting4/README.md +++ b/gen/analyticsreporting4/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-analyticsreporting4` library allows access to all features of the *Google AnalyticsReporting* service. -This documentation was generated from *AnalyticsReporting* crate version *5.0.2-beta-1+20221104*, where *20221104* is the exact revision of the *analyticsreporting:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AnalyticsReporting* crate version *5.0.2+20221104*, where *20221104* is the exact revision of the *analyticsreporting:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AnalyticsReporting* *v4* API can be found at the [official documentation site](https://developers.google.com/analytics/devguides/reporting/core/v4/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/AnalyticsReporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/AnalyticsReporting) ... -* [reports](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/api::Report) - * [*batch get*](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/api::ReportBatchGetCall) +* [reports](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/api::Report) + * [*batch get*](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/api::ReportBatchGetCall) * user activity - * [*search*](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/api::UserActivitySearchCall) + * [*search*](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/api::UserActivitySearchCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/AnalyticsReporting)** +* **[Hub](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/AnalyticsReporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::CallBuilder) -* **[Resources](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::CallBuilder) +* **[Resources](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Part)** + * **[Parts](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Delegate) to the -[Method Builder](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Delegate) to the +[Method Builder](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::RequestValue) and -[decodable](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::RequestValue) and +[decodable](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-analyticsreporting4/5.0.2-beta-1+20221104/google_analyticsreporting4/client::RequestValue) are moved +* [request values](https://docs.rs/google-analyticsreporting4/5.0.2+20221104/google_analyticsreporting4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/analyticsreporting4/src/api.rs b/gen/analyticsreporting4/src/api.rs index d144ae227b..b173db6321 100644 --- a/gen/analyticsreporting4/src/api.rs +++ b/gen/analyticsreporting4/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> AnalyticsReporting { AnalyticsReporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://analyticsreporting.googleapis.com/".to_string(), _root_url: "https://analyticsreporting.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> AnalyticsReporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/analyticsreporting4/src/client.rs b/gen/analyticsreporting4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/analyticsreporting4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/analyticsreporting4/src/lib.rs b/gen/analyticsreporting4/src/lib.rs index 7423bd8bbf..4bfa767ea0 100644 --- a/gen/analyticsreporting4/src/lib.rs +++ b/gen/analyticsreporting4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AnalyticsReporting* crate version *5.0.2-beta-1+20221104*, where *20221104* is the exact revision of the *analyticsreporting:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AnalyticsReporting* crate version *5.0.2+20221104*, where *20221104* is the exact revision of the *analyticsreporting:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AnalyticsReporting* *v4* API can be found at the //! [official documentation site](https://developers.google.com/analytics/devguides/reporting/core/v4/). diff --git a/gen/androiddeviceprovisioning1-cli/Cargo.toml b/gen/androiddeviceprovisioning1-cli/Cargo.toml index f74ee7dd19..002427dad0 100644 --- a/gen/androiddeviceprovisioning1-cli/Cargo.toml +++ b/gen/androiddeviceprovisioning1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-androiddeviceprovisioning1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Provisioning Partner (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androiddeviceprovisioning1-cli" @@ -20,13 +20,13 @@ name = "androiddeviceprovisioning1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-androiddeviceprovisioning1] path = "../androiddeviceprovisioning1" -version = "4.0.1+20220305" +version = "5.0.2+20230115" + diff --git a/gen/androiddeviceprovisioning1-cli/README.md b/gen/androiddeviceprovisioning1-cli/README.md index 13cccf0a5b..ef812351dc 100644 --- a/gen/androiddeviceprovisioning1-cli/README.md +++ b/gen/androiddeviceprovisioning1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Android Provisioning Partner* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Android Provisioning Partner* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash androiddeviceprovisioning1 [options] diff --git a/gen/androiddeviceprovisioning1-cli/mkdocs.yml b/gen/androiddeviceprovisioning1-cli/mkdocs.yml index d6c944ad03..4bc781e898 100644 --- a/gen/androiddeviceprovisioning1-cli/mkdocs.yml +++ b/gen/androiddeviceprovisioning1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Android Provisioning Partner v4.0.1+20220305 +site_name: Android Provisioning Partner v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-androiddeviceprovisioning1-cli site_description: A complete library to interact with Android Provisioning Partner (protocol v1) @@ -7,34 +7,37 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/androiddevicepro docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['customers_configurations-create.md', 'Customers', 'Configurations Create'] -- ['customers_configurations-delete.md', 'Customers', 'Configurations Delete'] -- ['customers_configurations-get.md', 'Customers', 'Configurations Get'] -- ['customers_configurations-list.md', 'Customers', 'Configurations List'] -- ['customers_configurations-patch.md', 'Customers', 'Configurations Patch'] -- ['customers_devices-apply-configuration.md', 'Customers', 'Devices Apply Configuration'] -- ['customers_devices-get.md', 'Customers', 'Devices Get'] -- ['customers_devices-list.md', 'Customers', 'Devices List'] -- ['customers_devices-remove-configuration.md', 'Customers', 'Devices Remove Configuration'] -- ['customers_devices-unclaim.md', 'Customers', 'Devices Unclaim'] -- ['customers_dpcs-list.md', 'Customers', 'Dpcs List'] -- ['customers_list.md', 'Customers', 'List'] -- ['operations_get.md', 'Operations', 'Get'] -- ['partners_customers-create.md', 'Partners', 'Customers Create'] -- ['partners_customers-list.md', 'Partners', 'Customers List'] -- ['partners_devices-claim.md', 'Partners', 'Devices Claim'] -- ['partners_devices-claim-async.md', 'Partners', 'Devices Claim Async'] -- ['partners_devices-find-by-identifier.md', 'Partners', 'Devices Find By Identifier'] -- ['partners_devices-find-by-owner.md', 'Partners', 'Devices Find By Owner'] -- ['partners_devices-get.md', 'Partners', 'Devices Get'] -- ['partners_devices-metadata.md', 'Partners', 'Devices Metadata'] -- ['partners_devices-unclaim.md', 'Partners', 'Devices Unclaim'] -- ['partners_devices-unclaim-async.md', 'Partners', 'Devices Unclaim Async'] -- ['partners_devices-update-metadata-async.md', 'Partners', 'Devices Update Metadata Async'] -- ['partners_vendors-customers-list.md', 'Partners', 'Vendors Customers List'] -- ['partners_vendors-list.md', 'Partners', 'Vendors List'] +nav: +- Home: 'index.md' +- 'Customers': + - 'Configurations Create': 'customers_configurations-create.md' + - 'Configurations Delete': 'customers_configurations-delete.md' + - 'Configurations Get': 'customers_configurations-get.md' + - 'Configurations List': 'customers_configurations-list.md' + - 'Configurations Patch': 'customers_configurations-patch.md' + - 'Devices Apply Configuration': 'customers_devices-apply-configuration.md' + - 'Devices Get': 'customers_devices-get.md' + - 'Devices List': 'customers_devices-list.md' + - 'Devices Remove Configuration': 'customers_devices-remove-configuration.md' + - 'Devices Unclaim': 'customers_devices-unclaim.md' + - 'Dpcs List': 'customers_dpcs-list.md' + - 'List': 'customers_list.md' +- 'Operations': + - 'Get': 'operations_get.md' +- 'Partners': + - 'Customers Create': 'partners_customers-create.md' + - 'Customers List': 'partners_customers-list.md' + - 'Devices Claim': 'partners_devices-claim.md' + - 'Devices Claim Async': 'partners_devices-claim-async.md' + - 'Devices Find By Identifier': 'partners_devices-find-by-identifier.md' + - 'Devices Find By Owner': 'partners_devices-find-by-owner.md' + - 'Devices Get': 'partners_devices-get.md' + - 'Devices Metadata': 'partners_devices-metadata.md' + - 'Devices Unclaim': 'partners_devices-unclaim.md' + - 'Devices Unclaim Async': 'partners_devices-unclaim-async.md' + - 'Devices Update Metadata Async': 'partners_devices-update-metadata-async.md' + - 'Vendors Customers List': 'partners_vendors-customers-list.md' + - 'Vendors List': 'partners_vendors-list.md' theme: readthedocs diff --git a/gen/androiddeviceprovisioning1-cli/src/client.rs b/gen/androiddeviceprovisioning1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/androiddeviceprovisioning1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/androiddeviceprovisioning1-cli/src/main.rs b/gen/androiddeviceprovisioning1-cli/src/main.rs index fa4635e2ff..32a04ac0a7 100644 --- a/gen/androiddeviceprovisioning1-cli/src/main.rs +++ b/gen/androiddeviceprovisioning1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_androiddeviceprovisioning1::{api, Error, oauth2}; +use google_androiddeviceprovisioning1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -338,7 +337,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -409,13 +408,15 @@ where match &temp_cursor.to_string()[..] { "configuration" => Some(("configuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-id" => Some(("device.deviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device.device-identifier.chrome-os-attested-device-id" => Some(("device.deviceIdentifier.chromeOsAttestedDeviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device.device-identifier.device-type" => Some(("device.deviceIdentifier.deviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.imei" => Some(("device.deviceIdentifier.imei", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.manufacturer" => Some(("device.deviceIdentifier.manufacturer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.meid" => Some(("device.deviceIdentifier.meid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.model" => Some(("device.deviceIdentifier.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.serial-number" => Some(("device.deviceIdentifier.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["configuration", "device", "device-id", "device-identifier", "imei", "manufacturer", "meid", "model", "serial-number"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["chrome-os-attested-device-id", "configuration", "device", "device-id", "device-identifier", "device-type", "imei", "manufacturer", "meid", "model", "serial-number"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -531,7 +532,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(value.unwrap_or("")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -601,13 +602,15 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "device.device-id" => Some(("device.deviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device.device-identifier.chrome-os-attested-device-id" => Some(("device.deviceIdentifier.chromeOsAttestedDeviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device.device-identifier.device-type" => Some(("device.deviceIdentifier.deviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.imei" => Some(("device.deviceIdentifier.imei", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.manufacturer" => Some(("device.deviceIdentifier.manufacturer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.meid" => Some(("device.deviceIdentifier.meid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.model" => Some(("device.deviceIdentifier.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.serial-number" => Some(("device.deviceIdentifier.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["device", "device-id", "device-identifier", "imei", "manufacturer", "meid", "model", "serial-number"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["chrome-os-attested-device-id", "device", "device-id", "device-identifier", "device-type", "imei", "manufacturer", "meid", "model", "serial-number"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -688,13 +691,15 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "device.device-id" => Some(("device.deviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device.device-identifier.chrome-os-attested-device-id" => Some(("device.deviceIdentifier.chromeOsAttestedDeviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device.device-identifier.device-type" => Some(("device.deviceIdentifier.deviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.imei" => Some(("device.deviceIdentifier.imei", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.manufacturer" => Some(("device.deviceIdentifier.manufacturer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.meid" => Some(("device.deviceIdentifier.meid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.model" => Some(("device.deviceIdentifier.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-identifier.serial-number" => Some(("device.deviceIdentifier.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["device", "device-id", "device-identifier", "imei", "manufacturer", "meid", "model", "serial-number"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["chrome-os-attested-device-id", "device", "device-id", "device-identifier", "device-type", "imei", "manufacturer", "meid", "model", "serial-number"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -810,7 +815,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -931,13 +936,15 @@ where "customer.admin-emails" => Some(("customer.adminEmails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "customer.company-id" => Some(("customer.companyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "customer.company-name" => Some(("customer.companyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "customer.google-workspace-account.customer-id" => Some(("customer.googleWorkspaceAccount.customerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "customer.google-workspace-account.pre-provisioning-tokens" => Some(("customer.googleWorkspaceAccount.preProvisioningTokens", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "customer.language-code" => Some(("customer.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "customer.name" => Some(("customer.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "customer.owner-emails" => Some(("customer.ownerEmails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "customer.skip-welcome-email" => Some(("customer.skipWelcomeEmail", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "customer.terms-status" => Some(("customer.termsStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-emails", "company-id", "company-name", "customer", "language-code", "name", "owner-emails", "skip-welcome-email", "terms-status"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-emails", "company-id", "company-name", "customer", "customer-id", "google-workspace-account", "language-code", "name", "owner-emails", "pre-provisioning-tokens", "skip-welcome-email", "terms-status"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1004,7 +1011,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1074,15 +1081,20 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "customer-id" => Some(("customerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-identifier.chrome-os-attested-device-id" => Some(("deviceIdentifier.chromeOsAttestedDeviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-identifier.device-type" => Some(("deviceIdentifier.deviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.imei" => Some(("deviceIdentifier.imei", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.manufacturer" => Some(("deviceIdentifier.manufacturer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.meid" => Some(("deviceIdentifier.meid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.model" => Some(("deviceIdentifier.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.serial-number" => Some(("deviceIdentifier.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.entries" => Some(("deviceMetadata.entries", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "google-workspace-customer-id" => Some(("googleWorkspaceCustomerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pre-provisioning-token" => Some(("preProvisioningToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "section-type" => Some(("sectionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "simlock-profile-id" => Some(("simlockProfileId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["customer-id", "device-identifier", "device-metadata", "entries", "imei", "manufacturer", "meid", "model", "section-type", "serial-number"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["chrome-os-attested-device-id", "customer-id", "device-identifier", "device-metadata", "device-type", "entries", "google-workspace-customer-id", "imei", "manufacturer", "meid", "model", "pre-provisioning-token", "section-type", "serial-number", "simlock-profile-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1243,6 +1255,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "device-identifier.chrome-os-attested-device-id" => Some(("deviceIdentifier.chromeOsAttestedDeviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-identifier.device-type" => Some(("deviceIdentifier.deviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.imei" => Some(("deviceIdentifier.imei", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.manufacturer" => Some(("deviceIdentifier.manufacturer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.meid" => Some(("deviceIdentifier.meid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1251,7 +1265,7 @@ where "limit" => Some(("limit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["device-identifier", "imei", "limit", "manufacturer", "meid", "model", "page-token", "serial-number"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["chrome-os-attested-device-id", "device-identifier", "device-type", "imei", "limit", "manufacturer", "meid", "model", "page-token", "serial-number"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1332,11 +1346,12 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "customer-id" => Some(("customerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "google-workspace-customer-id" => Some(("googleWorkspaceCustomerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "limit" => Some(("limit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "section-type" => Some(("sectionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["customer-id", "limit", "page-token", "section-type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["customer-id", "google-workspace-customer-id", "limit", "page-token", "section-type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1548,6 +1563,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "device-id" => Some(("deviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-identifier.chrome-os-attested-device-id" => Some(("deviceIdentifier.chromeOsAttestedDeviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-identifier.device-type" => Some(("deviceIdentifier.deviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.imei" => Some(("deviceIdentifier.imei", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.manufacturer" => Some(("deviceIdentifier.manufacturer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-identifier.meid" => Some(("deviceIdentifier.meid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1557,7 +1574,7 @@ where "vacation-mode-days" => Some(("vacationModeDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "vacation-mode-expire-time" => Some(("vacationModeExpireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["device-id", "device-identifier", "imei", "manufacturer", "meid", "model", "section-type", "serial-number", "vacation-mode-days", "vacation-mode-expire-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["chrome-os-attested-device-id", "device-id", "device-identifier", "device-type", "imei", "manufacturer", "meid", "model", "section-type", "serial-number", "vacation-mode-days", "vacation-mode-expire-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1786,7 +1803,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1842,7 +1859,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2570,7 +2587,7 @@ async fn main() { Some(false)), ]), ("devices-metadata", - Some(r##"Updates reseller metadata associated with the device."##), + Some(r##"Updates reseller metadata associated with the device. Android devices only."##), "Details at http://byron.github.io/google-apis-rs/google_androiddeviceprovisioning1_cli/partners_devices-metadata", vec![ (Some(r##"metadata-owner-id"##), @@ -2660,7 +2677,7 @@ async fn main() { Some(false)), ]), ("devices-update-metadata-async", - Some(r##"Updates the reseller metadata attached to a batch of devices. This method updates devices asynchronously and returns an `Operation` that can be used to track progress. Read [Long‑running batch operations](/zero-touch/guides/how-it-works#operations)."##), + Some(r##"Updates the reseller metadata attached to a batch of devices. This method updates devices asynchronously and returns an `Operation` that can be used to track progress. Read [Long‑running batch operations](/zero-touch/guides/how-it-works#operations). Android Devices only."##), "Details at http://byron.github.io/google-apis-rs/google_androiddeviceprovisioning1_cli/partners_devices-update-metadata-async", vec![ (Some(r##"partner-id"##), @@ -2737,7 +2754,7 @@ async fn main() { let mut app = App::new("androiddeviceprovisioning1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230115") .about("Automates Android zero-touch enrollment for device resellers, customers, and EMMs.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_androiddeviceprovisioning1_cli") .arg(Arg::with_name("folder") diff --git a/gen/androiddeviceprovisioning1/Cargo.toml b/gen/androiddeviceprovisioning1/Cargo.toml index 6c8e9c3d57..3df957180a 100644 --- a/gen/androiddeviceprovisioning1/Cargo.toml +++ b/gen/androiddeviceprovisioning1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-androiddeviceprovisioning1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Provisioning Partner (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androiddeviceprovisioning1" homepage = "https://developers.google.com/zero-touch/" -documentation = "https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115" license = "MIT" keywords = ["androiddeviceprovisi", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/androiddeviceprovisioning1/README.md b/gen/androiddeviceprovisioning1/README.md index 751a0d1caa..dfd8776b58 100644 --- a/gen/androiddeviceprovisioning1/README.md +++ b/gen/androiddeviceprovisioning1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-androiddeviceprovisioning1` library allows access to all features of the *Google Android Provisioning Partner* service. -This documentation was generated from *Android Provisioning Partner* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *androiddeviceprovisioning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Android Provisioning Partner* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *androiddeviceprovisioning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Android Provisioning Partner* *v1* API can be found at the [official documentation site](https://developers.google.com/zero-touch/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/AndroidProvisioningPartner) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/AndroidProvisioningPartner) ... * customers - * [*configurations create*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationCreateCall), [*configurations delete*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationDeleteCall), [*configurations get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationGetCall), [*configurations list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationListCall), [*configurations patch*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationPatchCall), [*devices apply configuration*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceApplyConfigurationCall), [*devices get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceGetCall), [*devices list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceListCall), [*devices remove configuration*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceRemoveConfigurationCall), [*devices unclaim*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceUnclaimCall), [*dpcs list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerDpcListCall) and [*list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::CustomerListCall) -* [operations](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::Operation) - * [*get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::OperationGetCall) + * [*configurations create*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationCreateCall), [*configurations delete*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationDeleteCall), [*configurations get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationGetCall), [*configurations list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationListCall), [*configurations patch*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerConfigurationPatchCall), [*devices apply configuration*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceApplyConfigurationCall), [*devices get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceGetCall), [*devices list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceListCall), [*devices remove configuration*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceRemoveConfigurationCall), [*devices unclaim*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerDeviceUnclaimCall), [*dpcs list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerDpcListCall) and [*list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::CustomerListCall) +* [operations](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::Operation) + * [*get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::OperationGetCall) * partners - * [*customers create*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerCustomerCreateCall), [*customers list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerCustomerListCall), [*devices claim*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceClaimCall), [*devices claim async*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceClaimAsyncCall), [*devices find by identifier*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceFindByIdentifierCall), [*devices find by owner*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceFindByOwnerCall), [*devices get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceGetCall), [*devices metadata*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceMetadataCall), [*devices unclaim*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceUnclaimCall), [*devices unclaim async*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceUnclaimAsyncCall), [*devices update metadata async*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceUpdateMetadataAsyncCall), [*vendors customers list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerVendorCustomerListCall) and [*vendors list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/api::PartnerVendorListCall) + * [*customers create*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerCustomerCreateCall), [*customers list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerCustomerListCall), [*devices claim*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceClaimCall), [*devices claim async*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceClaimAsyncCall), [*devices find by identifier*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceFindByIdentifierCall), [*devices find by owner*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceFindByOwnerCall), [*devices get*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceGetCall), [*devices metadata*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceMetadataCall), [*devices unclaim*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceUnclaimCall), [*devices unclaim async*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceUnclaimAsyncCall), [*devices update metadata async*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerDeviceUpdateMetadataAsyncCall), [*vendors customers list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerVendorCustomerListCall) and [*vendors list*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/api::PartnerVendorListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/AndroidProvisioningPartner)** +* **[Hub](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/AndroidProvisioningPartner)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::CallBuilder) -* **[Resources](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::CallBuilder) +* **[Resources](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Part)** + * **[Parts](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Delegate) to the -[Method Builder](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Delegate) to the +[Method Builder](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::RequestValue) and -[decodable](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::RequestValue) and +[decodable](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-androiddeviceprovisioning1/5.0.2-beta-1+20230115/google_androiddeviceprovisioning1/client::RequestValue) are moved +* [request values](https://docs.rs/google-androiddeviceprovisioning1/5.0.2+20230115/google_androiddeviceprovisioning1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/androiddeviceprovisioning1/src/api.rs b/gen/androiddeviceprovisioning1/src/api.rs index c936808a18..42a227128a 100644 --- a/gen/androiddeviceprovisioning1/src/api.rs +++ b/gen/androiddeviceprovisioning1/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> AndroidProvisioningPartner { AndroidProvisioningPartner { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://androiddeviceprovisioning.googleapis.com/".to_string(), _root_url: "https://androiddeviceprovisioning.googleapis.com/".to_string(), } @@ -120,7 +120,7 @@ impl<'a, S> AndroidProvisioningPartner { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/androiddeviceprovisioning1/src/client.rs b/gen/androiddeviceprovisioning1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/androiddeviceprovisioning1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/androiddeviceprovisioning1/src/lib.rs b/gen/androiddeviceprovisioning1/src/lib.rs index 288646c3a7..e9d0bceb61 100644 --- a/gen/androiddeviceprovisioning1/src/lib.rs +++ b/gen/androiddeviceprovisioning1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Android Provisioning Partner* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *androiddeviceprovisioning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Android Provisioning Partner* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *androiddeviceprovisioning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Android Provisioning Partner* *v1* API can be found at the //! [official documentation site](https://developers.google.com/zero-touch/). diff --git a/gen/androidenterprise1-cli/Cargo.toml b/gen/androidenterprise1-cli/Cargo.toml index ec628db586..9646681ef5 100644 --- a/gen/androidenterprise1-cli/Cargo.toml +++ b/gen/androidenterprise1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-androidenterprise1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Enterprise (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidenterprise1-cli" @@ -20,13 +20,13 @@ name = "androidenterprise1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-androidenterprise1] path = "../androidenterprise1" -version = "4.0.1+20220303" +version = "5.0.2+20230123" + diff --git a/gen/androidenterprise1-cli/README.md b/gen/androidenterprise1-cli/README.md index 57226c5dc6..8d0e092a2d 100644 --- a/gen/androidenterprise1-cli/README.md +++ b/gen/androidenterprise1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Android Enterprise* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Android Enterprise* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash androidenterprise1 [options] @@ -39,6 +39,7 @@ androidenterprise1 [options] enterprises acknowledge-notification-set [-p ]... complete-signup [-p ]... [-o ] + create-enrollment-token [-p ]... [-o ] create-web-token (-r )... [-p ]... [-o ] enroll (-r )... [-p ]... [-o ] generate-signup-url [-p ]... [-o ] diff --git a/gen/androidenterprise1-cli/mkdocs.yml b/gen/androidenterprise1-cli/mkdocs.yml index 0ba0e3f68e..86ec9a7701 100644 --- a/gen/androidenterprise1-cli/mkdocs.yml +++ b/gen/androidenterprise1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Android Enterprise v4.0.1+20220303 +site_name: Android Enterprise v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-androidenterprise1-cli site_description: A complete library to interact with Android Enterprise (protocol v1) @@ -7,83 +7,100 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/androidenterpris docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['devices_force-report-upload.md', 'Devices', 'Force Report Upload'] -- ['devices_get.md', 'Devices', 'Get'] -- ['devices_get-state.md', 'Devices', 'Get State'] -- ['devices_list.md', 'Devices', 'List'] -- ['devices_set-state.md', 'Devices', 'Set State'] -- ['devices_update.md', 'Devices', 'Update'] -- ['enterprises_acknowledge-notification-set.md', 'Enterprises', 'Acknowledge Notification Set'] -- ['enterprises_complete-signup.md', 'Enterprises', 'Complete Signup'] -- ['enterprises_create-web-token.md', 'Enterprises', 'Create Web Token'] -- ['enterprises_enroll.md', 'Enterprises', 'Enroll'] -- ['enterprises_generate-signup-url.md', 'Enterprises', 'Generate Signup Url'] -- ['enterprises_get.md', 'Enterprises', 'Get'] -- ['enterprises_get-service-account.md', 'Enterprises', 'Get Service Account'] -- ['enterprises_get-store-layout.md', 'Enterprises', 'Get Store Layout'] -- ['enterprises_list.md', 'Enterprises', 'List'] -- ['enterprises_pull-notification-set.md', 'Enterprises', 'Pull Notification Set'] -- ['enterprises_send-test-push-notification.md', 'Enterprises', 'Send Test Push Notification'] -- ['enterprises_set-account.md', 'Enterprises', 'Set Account'] -- ['enterprises_set-store-layout.md', 'Enterprises', 'Set Store Layout'] -- ['enterprises_unenroll.md', 'Enterprises', 'Unenroll'] -- ['entitlements_delete.md', 'Entitlements', 'Delete'] -- ['entitlements_get.md', 'Entitlements', 'Get'] -- ['entitlements_list.md', 'Entitlements', 'List'] -- ['entitlements_update.md', 'Entitlements', 'Update'] -- ['grouplicenses_get.md', 'Grouplicenses', 'Get'] -- ['grouplicenses_list.md', 'Grouplicenses', 'List'] -- ['grouplicenseusers_list.md', 'Grouplicenseusers', 'List'] -- ['installs_delete.md', 'Installs', 'Delete'] -- ['installs_get.md', 'Installs', 'Get'] -- ['installs_list.md', 'Installs', 'List'] -- ['installs_update.md', 'Installs', 'Update'] -- ['managedconfigurationsfordevice_delete.md', 'Managedconfigurationsfordevice', 'Delete'] -- ['managedconfigurationsfordevice_get.md', 'Managedconfigurationsfordevice', 'Get'] -- ['managedconfigurationsfordevice_list.md', 'Managedconfigurationsfordevice', 'List'] -- ['managedconfigurationsfordevice_update.md', 'Managedconfigurationsfordevice', 'Update'] -- ['managedconfigurationsforuser_delete.md', 'Managedconfigurationsforuser', 'Delete'] -- ['managedconfigurationsforuser_get.md', 'Managedconfigurationsforuser', 'Get'] -- ['managedconfigurationsforuser_list.md', 'Managedconfigurationsforuser', 'List'] -- ['managedconfigurationsforuser_update.md', 'Managedconfigurationsforuser', 'Update'] -- ['managedconfigurationssettings_list.md', 'Managedconfigurationssettings', 'List'] -- ['permissions_get.md', 'Permissions', 'Get'] -- ['products_approve.md', 'Products', 'Approve'] -- ['products_generate-approval-url.md', 'Products', 'Generate Approval Url'] -- ['products_get.md', 'Products', 'Get'] -- ['products_get-app-restrictions-schema.md', 'Products', 'Get App Restrictions Schema'] -- ['products_get-permissions.md', 'Products', 'Get Permissions'] -- ['products_list.md', 'Products', 'List'] -- ['products_unapprove.md', 'Products', 'Unapprove'] -- ['serviceaccountkeys_delete.md', 'Serviceaccountkeys', 'Delete'] -- ['serviceaccountkeys_insert.md', 'Serviceaccountkeys', 'Insert'] -- ['serviceaccountkeys_list.md', 'Serviceaccountkeys', 'List'] -- ['storelayoutclusters_delete.md', 'Storelayoutclusters', 'Delete'] -- ['storelayoutclusters_get.md', 'Storelayoutclusters', 'Get'] -- ['storelayoutclusters_insert.md', 'Storelayoutclusters', 'Insert'] -- ['storelayoutclusters_list.md', 'Storelayoutclusters', 'List'] -- ['storelayoutclusters_update.md', 'Storelayoutclusters', 'Update'] -- ['storelayoutpages_delete.md', 'Storelayoutpages', 'Delete'] -- ['storelayoutpages_get.md', 'Storelayoutpages', 'Get'] -- ['storelayoutpages_insert.md', 'Storelayoutpages', 'Insert'] -- ['storelayoutpages_list.md', 'Storelayoutpages', 'List'] -- ['storelayoutpages_update.md', 'Storelayoutpages', 'Update'] -- ['users_delete.md', 'Users', 'Delete'] -- ['users_generate-authentication-token.md', 'Users', 'Generate Authentication Token'] -- ['users_get.md', 'Users', 'Get'] -- ['users_get-available-product-set.md', 'Users', 'Get Available Product Set'] -- ['users_insert.md', 'Users', 'Insert'] -- ['users_list.md', 'Users', 'List'] -- ['users_revoke-device-access.md', 'Users', 'Revoke Device Access'] -- ['users_set-available-product-set.md', 'Users', 'Set Available Product Set'] -- ['users_update.md', 'Users', 'Update'] -- ['webapps_delete.md', 'Webapps', 'Delete'] -- ['webapps_get.md', 'Webapps', 'Get'] -- ['webapps_insert.md', 'Webapps', 'Insert'] -- ['webapps_list.md', 'Webapps', 'List'] -- ['webapps_update.md', 'Webapps', 'Update'] +nav: +- Home: 'index.md' +- 'Devices': + - 'Force Report Upload': 'devices_force-report-upload.md' + - 'Get': 'devices_get.md' + - 'Get State': 'devices_get-state.md' + - 'List': 'devices_list.md' + - 'Set State': 'devices_set-state.md' + - 'Update': 'devices_update.md' +- 'Enterprises': + - 'Acknowledge Notification Set': 'enterprises_acknowledge-notification-set.md' + - 'Complete Signup': 'enterprises_complete-signup.md' + - 'Create Enrollment Token': 'enterprises_create-enrollment-token.md' + - 'Create Web Token': 'enterprises_create-web-token.md' + - 'Enroll': 'enterprises_enroll.md' + - 'Generate Signup Url': 'enterprises_generate-signup-url.md' + - 'Get': 'enterprises_get.md' + - 'Get Service Account': 'enterprises_get-service-account.md' + - 'Get Store Layout': 'enterprises_get-store-layout.md' + - 'List': 'enterprises_list.md' + - 'Pull Notification Set': 'enterprises_pull-notification-set.md' + - 'Send Test Push Notification': 'enterprises_send-test-push-notification.md' + - 'Set Account': 'enterprises_set-account.md' + - 'Set Store Layout': 'enterprises_set-store-layout.md' + - 'Unenroll': 'enterprises_unenroll.md' +- 'Entitlements': + - 'Delete': 'entitlements_delete.md' + - 'Get': 'entitlements_get.md' + - 'List': 'entitlements_list.md' + - 'Update': 'entitlements_update.md' +- 'Grouplicenses': + - 'Get': 'grouplicenses_get.md' + - 'List': 'grouplicenses_list.md' +- 'Grouplicenseusers': + - 'List': 'grouplicenseusers_list.md' +- 'Installs': + - 'Delete': 'installs_delete.md' + - 'Get': 'installs_get.md' + - 'List': 'installs_list.md' + - 'Update': 'installs_update.md' +- 'Managedconfigurationsfordevice': + - 'Delete': 'managedconfigurationsfordevice_delete.md' + - 'Get': 'managedconfigurationsfordevice_get.md' + - 'List': 'managedconfigurationsfordevice_list.md' + - 'Update': 'managedconfigurationsfordevice_update.md' +- 'Managedconfigurationsforuser': + - 'Delete': 'managedconfigurationsforuser_delete.md' + - 'Get': 'managedconfigurationsforuser_get.md' + - 'List': 'managedconfigurationsforuser_list.md' + - 'Update': 'managedconfigurationsforuser_update.md' +- 'Managedconfigurationssettings': + - 'List': 'managedconfigurationssettings_list.md' +- 'Permissions': + - 'Get': 'permissions_get.md' +- 'Products': + - 'Approve': 'products_approve.md' + - 'Generate Approval Url': 'products_generate-approval-url.md' + - 'Get': 'products_get.md' + - 'Get App Restrictions Schema': 'products_get-app-restrictions-schema.md' + - 'Get Permissions': 'products_get-permissions.md' + - 'List': 'products_list.md' + - 'Unapprove': 'products_unapprove.md' +- 'Serviceaccountkeys': + - 'Delete': 'serviceaccountkeys_delete.md' + - 'Insert': 'serviceaccountkeys_insert.md' + - 'List': 'serviceaccountkeys_list.md' +- 'Storelayoutclusters': + - 'Delete': 'storelayoutclusters_delete.md' + - 'Get': 'storelayoutclusters_get.md' + - 'Insert': 'storelayoutclusters_insert.md' + - 'List': 'storelayoutclusters_list.md' + - 'Update': 'storelayoutclusters_update.md' +- 'Storelayoutpages': + - 'Delete': 'storelayoutpages_delete.md' + - 'Get': 'storelayoutpages_get.md' + - 'Insert': 'storelayoutpages_insert.md' + - 'List': 'storelayoutpages_list.md' + - 'Update': 'storelayoutpages_update.md' +- 'Users': + - 'Delete': 'users_delete.md' + - 'Generate Authentication Token': 'users_generate-authentication-token.md' + - 'Get': 'users_get.md' + - 'Get Available Product Set': 'users_get-available-product-set.md' + - 'Insert': 'users_insert.md' + - 'List': 'users_list.md' + - 'Revoke Device Access': 'users_revoke-device-access.md' + - 'Set Available Product Set': 'users_set-available-product-set.md' + - 'Update': 'users_update.md' +- 'Webapps': + - 'Delete': 'webapps_delete.md' + - 'Get': 'webapps_get.md' + - 'Insert': 'webapps_insert.md' + - 'List': 'webapps_list.md' + - 'Update': 'webapps_update.md' theme: readthedocs diff --git a/gen/androidenterprise1-cli/src/client.rs b/gen/androidenterprise1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/androidenterprise1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/androidenterprise1-cli/src/main.rs b/gen/androidenterprise1-cli/src/main.rs index e0631214af..a50c9894c7 100644 --- a/gen/androidenterprise1-cli/src/main.rs +++ b/gen/androidenterprise1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_androidenterprise1::{api, Error, oauth2}; +use google_androidenterprise1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -539,6 +538,62 @@ where } } + async fn _enterprises_create_enrollment_token(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.enterprises().create_enrollment_token(opt.value_of("enterprise-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "device-type" => { + call = call.device_type(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["device-type"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _enterprises_create_web_token(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -655,11 +710,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "google-authentication-settings.dedicated-devices-allowed" => Some(("googleAuthenticationSettings.dedicatedDevicesAllowed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "google-authentication-settings.google-authentication-required" => Some(("googleAuthenticationSettings.googleAuthenticationRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "primary-domain" => Some(("primaryDomain", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["id", "name", "primary-domain"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["dedicated-devices-allowed", "google-authentication-required", "google-authentication-settings", "id", "name", "primary-domain"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1499,7 +1556,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "install" => { - call = call.install(arg_from_str(value.unwrap_or("false"), err, "install", "boolean")); + call = call.install( value.map(|v| arg_from_str(v, err, "install", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2828,13 +2885,13 @@ where call = call.query(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "language" => { call = call.language(value.unwrap_or("")); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4682,6 +4739,9 @@ where ("complete-signup", Some(opt)) => { call_result = self._enterprises_complete_signup(opt, dry_run, &mut err).await; }, + ("create-enrollment-token", Some(opt)) => { + call_result = self._enterprises_create_enrollment_token(opt, dry_run, &mut err).await; + }, ("create-web-token", Some(opt)) => { call_result = self._enterprises_create_web_token(opt, dry_run, &mut err).await; }, @@ -5281,7 +5341,7 @@ async fn main() { ]), ]), - ("enterprises", "methods: 'acknowledge-notification-set', 'complete-signup', 'create-web-token', 'enroll', 'generate-signup-url', 'get', 'get-service-account', 'get-store-layout', 'list', 'pull-notification-set', 'send-test-push-notification', 'set-account', 'set-store-layout' and 'unenroll'", vec![ + ("enterprises", "methods: 'acknowledge-notification-set', 'complete-signup', 'create-enrollment-token', 'create-web-token', 'enroll', 'generate-signup-url', 'get', 'get-service-account', 'get-store-layout', 'list', 'pull-notification-set', 'send-test-push-notification', 'set-account', 'set-store-layout' and 'unenroll'", vec![ ("acknowledge-notification-set", Some(r##"Acknowledges notifications that were received from Enterprises.PullNotificationSet to prevent subsequent calls from returning the same notifications."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/enterprises_acknowledge-notification-set", @@ -5302,6 +5362,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("create-enrollment-token", + Some(r##"Returns a token for device enrollment. The DPC can encode this token within the QR/NFC/zero-touch enrollment payload or fetch it before calling the on-device API to authenticate the user. The token can be generated for each device or reused across multiple devices."##), + "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/enterprises_create-enrollment-token", + vec![ + (Some(r##"enterprise-id"##), + None, + Some(r##"The ID of the enterprise."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5582,7 +5664,7 @@ async fn main() { ("entitlements", "methods: 'delete', 'get', 'list' and 'update'", vec![ ("delete", - Some(r##"Removes an entitlement to an app for a user."##), + Some(r##"Removes an entitlement to an app for a user. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/entitlements_delete", vec![ (Some(r##"enterprise-id"##), @@ -5610,7 +5692,7 @@ async fn main() { Some(true)), ]), ("get", - Some(r##"Retrieves details of an entitlement."##), + Some(r##"Retrieves details of an entitlement. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/entitlements_get", vec![ (Some(r##"enterprise-id"##), @@ -5644,7 +5726,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"Lists all entitlements for the specified user. Only the ID is set."##), + Some(r##"Lists all entitlements for the specified user. Only the ID is set. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/entitlements_list", vec![ (Some(r##"enterprise-id"##), @@ -5672,7 +5754,7 @@ async fn main() { Some(false)), ]), ("update", - Some(r##"Adds or updates an entitlement to an app for a user."##), + Some(r##"Adds or updates an entitlement to an app for a user. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/entitlements_update", vec![ (Some(r##"enterprise-id"##), @@ -5715,7 +5797,7 @@ async fn main() { ("grouplicenses", "methods: 'get' and 'list'", vec![ ("get", - Some(r##"Retrieves details of an enterprise's group license for a product."##), + Some(r##"Retrieves details of an enterprise's group license for a product. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/grouplicenses_get", vec![ (Some(r##"enterprise-id"##), @@ -5743,7 +5825,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"Retrieves IDs of all products for which the enterprise has a group license."##), + Some(r##"Retrieves IDs of all products for which the enterprise has a group license. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/grouplicenses_list", vec![ (Some(r##"enterprise-id"##), @@ -5768,7 +5850,7 @@ async fn main() { ("grouplicenseusers", "methods: 'list'", vec![ ("list", - Some(r##"Retrieves the IDs of the users who have been granted entitlements under the license."##), + Some(r##"Retrieves the IDs of the users who have been granted entitlements under the license. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/grouplicenseusers_list", vec![ (Some(r##"enterprise-id"##), @@ -6302,7 +6384,7 @@ async fn main() { ("products", "methods: 'approve', 'generate-approval-url', 'get', 'get-app-restrictions-schema', 'get-permissions', 'list' and 'unapprove'", vec![ ("approve", - Some(r##" Approves the specified product and the relevant app permissions, if any. The maximum number of products that you can approve per enterprise customer is 1,000. To learn how to use managed Google Play to design and create a store layout to display approved products to your users, see Store Layout Design. "##), + Some(r##" Approves the specified product and the relevant app permissions, if any. The maximum number of products that you can approve per enterprise customer is 1,000. To learn how to use managed Google Play to design and create a store layout to display approved products to your users, see Store Layout Design. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations. "##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/products_approve", vec![ (Some(r##"enterprise-id"##), @@ -6330,7 +6412,7 @@ async fn main() { Some(true)), ]), ("generate-approval-url", - Some(r##"Generates a URL that can be rendered in an iframe to display the permissions (if any) of a product. An enterprise admin must view these permissions and accept them on behalf of their organization in order to approve that product. Admins should accept the displayed permissions by interacting with a separate UI element in the EMM console, which in turn should trigger the use of this URL as the approvalUrlInfo.approvalUrl property in a Products.approve call to approve the product. This URL can only be used to display permissions for up to 1 day."##), + Some(r##"Generates a URL that can be rendered in an iframe to display the permissions (if any) of a product. An enterprise admin must view these permissions and accept them on behalf of their organization in order to approve that product. Admins should accept the displayed permissions by interacting with a separate UI element in the EMM console, which in turn should trigger the use of this URL as the approvalUrlInfo.approvalUrl property in a Products.approve call to approve the product. This URL can only be used to display permissions for up to 1 day. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations. "##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/products_generate-approval-url", vec![ (Some(r##"enterprise-id"##), @@ -6442,7 +6524,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"Finds approved products that match a query, or all approved products if there is no query."##), + Some(r##"Finds approved products that match a query, or all approved products if there is no query. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations. "##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/products_list", vec![ (Some(r##"enterprise-id"##), @@ -6464,7 +6546,7 @@ async fn main() { Some(false)), ]), ("unapprove", - Some(r##"Unapproves the specified product (and the relevant app permissions, if any)"##), + Some(r##"Unapproves the specified product (and the relevant app permissions, if any) **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/products_unapprove", vec![ (Some(r##"enterprise-id"##), @@ -6946,7 +7028,7 @@ async fn main() { Some(false)), ]), ("get-available-product-set", - Some(r##"Retrieves the set of products a user is entitled to access."##), + Some(r##"Retrieves the set of products a user is entitled to access. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/users_get-available-product-set", vec![ (Some(r##"enterprise-id"##), @@ -7052,7 +7134,7 @@ async fn main() { Some(true)), ]), ("set-available-product-set", - Some(r##"Modifies the set of products that a user is entitled to access (referred to as *whitelisted* products). Only products that are approved or products that were previously approved (products with revoked approval) can be whitelisted."##), + Some(r##"Modifies the set of products that a user is entitled to access (referred to as *whitelisted* products). Only products that are approved or products that were previously approved (products with revoked approval) can be whitelisted. **Note:** This item has been deprecated. New integrations cannot use this method and can refer to our new recommendations."##), "Details at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli/users_set-available-product-set", vec![ (Some(r##"enterprise-id"##), @@ -7262,7 +7344,7 @@ async fn main() { let mut app = App::new("androidenterprise1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230123") .about("Manages the deployment of apps to Android Enterprise devices.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_androidenterprise1_cli") .arg(Arg::with_name("url") diff --git a/gen/androidenterprise1/Cargo.toml b/gen/androidenterprise1/Cargo.toml index 4b100af836..4d5ab726bf 100644 --- a/gen/androidenterprise1/Cargo.toml +++ b/gen/androidenterprise1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-androidenterprise1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Enterprise (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidenterprise1" homepage = "https://developers.google.com/android/work/play/emm-api" -documentation = "https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-androidenterprise1/5.0.2+20230123" license = "MIT" keywords = ["androidenterprise", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/androidenterprise1/README.md b/gen/androidenterprise1/README.md index 922f8455d0..8c0b70c584 100644 --- a/gen/androidenterprise1/README.md +++ b/gen/androidenterprise1/README.md @@ -5,46 +5,46 @@ DO NOT EDIT ! --> The `google-androidenterprise1` library allows access to all features of the *Google Android Enterprise* service. -This documentation was generated from *Android Enterprise* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *androidenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Android Enterprise* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *androidenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Android Enterprise* *v1* API can be found at the [official documentation site](https://developers.google.com/android/work/play/emm-api). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/AndroidEnterprise) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/AndroidEnterprise) ... -* [devices](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::Device) - * [*force report upload*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::DeviceForceReportUploadCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::DeviceGetCall), [*get state*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::DeviceGetStateCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::DeviceListCall), [*set state*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::DeviceSetStateCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::DeviceUpdateCall) -* [enterprises](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::Enterprise) - * [*acknowledge notification set*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseAcknowledgeNotificationSetCall), [*complete signup*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseCompleteSignupCall), [*create enrollment token*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseCreateEnrollmentTokenCall), [*create web token*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseCreateWebTokenCall), [*enroll*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseEnrollCall), [*generate signup url*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseGenerateSignupUrlCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseGetCall), [*get service account*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseGetServiceAccountCall), [*get store layout*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseGetStoreLayoutCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseListCall), [*pull notification set*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterprisePullNotificationSetCall), [*send test push notification*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseSendTestPushNotificationCall), [*set account*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseSetAccountCall), [*set store layout*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseSetStoreLayoutCall) and [*unenroll*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EnterpriseUnenrollCall) -* [entitlements](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::Entitlement) - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EntitlementDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EntitlementGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EntitlementListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::EntitlementUpdateCall) +* [devices](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::Device) + * [*force report upload*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::DeviceForceReportUploadCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::DeviceGetCall), [*get state*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::DeviceGetStateCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::DeviceListCall), [*set state*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::DeviceSetStateCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::DeviceUpdateCall) +* [enterprises](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::Enterprise) + * [*acknowledge notification set*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseAcknowledgeNotificationSetCall), [*complete signup*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseCompleteSignupCall), [*create enrollment token*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseCreateEnrollmentTokenCall), [*create web token*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseCreateWebTokenCall), [*enroll*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseEnrollCall), [*generate signup url*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseGenerateSignupUrlCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseGetCall), [*get service account*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseGetServiceAccountCall), [*get store layout*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseGetStoreLayoutCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseListCall), [*pull notification set*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterprisePullNotificationSetCall), [*send test push notification*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseSendTestPushNotificationCall), [*set account*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseSetAccountCall), [*set store layout*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseSetStoreLayoutCall) and [*unenroll*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EnterpriseUnenrollCall) +* [entitlements](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::Entitlement) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EntitlementDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EntitlementGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EntitlementListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::EntitlementUpdateCall) * grouplicenses - * [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::GrouplicenseGetCall) and [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::GrouplicenseListCall) + * [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::GrouplicenseGetCall) and [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::GrouplicenseListCall) * grouplicenseusers - * [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::GrouplicenseuserListCall) -* [installs](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::Install) - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::InstallDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::InstallGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::InstallListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::InstallUpdateCall) + * [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::GrouplicenseuserListCall) +* [installs](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::Install) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::InstallDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::InstallGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::InstallListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::InstallUpdateCall) * managedconfigurationsfordevice - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceUpdateCall) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsfordeviceUpdateCall) * managedconfigurationsforuser - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserUpdateCall) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserGetCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationsforuserUpdateCall) * managedconfigurationssettings - * [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ManagedconfigurationssettingListCall) -* [permissions](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::Permission) - * [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::PermissionGetCall) -* [products](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::Product) - * [*approve*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ProductApproveCall), [*generate approval url*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ProductGenerateApprovalUrlCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ProductGetCall), [*get app restrictions schema*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ProductGetAppRestrictionsSchemaCall), [*get permissions*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ProductGetPermissionCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ProductListCall) and [*unapprove*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ProductUnapproveCall) + * [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ManagedconfigurationssettingListCall) +* [permissions](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::Permission) + * [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::PermissionGetCall) +* [products](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::Product) + * [*approve*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ProductApproveCall), [*generate approval url*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ProductGenerateApprovalUrlCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ProductGetCall), [*get app restrictions schema*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ProductGetAppRestrictionsSchemaCall), [*get permissions*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ProductGetPermissionCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ProductListCall) and [*unapprove*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ProductUnapproveCall) * serviceaccountkeys - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ServiceaccountkeyDeleteCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ServiceaccountkeyInsertCall) and [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::ServiceaccountkeyListCall) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ServiceaccountkeyDeleteCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ServiceaccountkeyInsertCall) and [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::ServiceaccountkeyListCall) * storelayoutclusters - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutclusterDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutclusterGetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutclusterInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutclusterListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutclusterUpdateCall) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutclusterDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutclusterGetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutclusterInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutclusterListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutclusterUpdateCall) * storelayoutpages - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutpageDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutpageGetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutpageInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutpageListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::StorelayoutpageUpdateCall) -* [users](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::User) - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserDeleteCall), [*generate authentication token*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserGenerateAuthenticationTokenCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserGetCall), [*get available product set*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserGetAvailableProductSetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserListCall), [*revoke device access*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserRevokeDeviceAccesCall), [*set available product set*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserSetAvailableProductSetCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::UserUpdateCall) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutpageDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutpageGetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutpageInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutpageListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::StorelayoutpageUpdateCall) +* [users](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::User) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserDeleteCall), [*generate authentication token*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserGenerateAuthenticationTokenCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserGetCall), [*get available product set*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserGetAvailableProductSetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserListCall), [*revoke device access*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserRevokeDeviceAccesCall), [*set available product set*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserSetAvailableProductSetCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::UserUpdateCall) * webapps - * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::WebappDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::WebappGetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::WebappInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::WebappListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/api::WebappUpdateCall) + * [*delete*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::WebappDeleteCall), [*get*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::WebappGetCall), [*insert*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::WebappInsertCall), [*list*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::WebappListCall) and [*update*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/api::WebappUpdateCall) @@ -53,17 +53,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/AndroidEnterprise)** +* **[Hub](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/AndroidEnterprise)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::CallBuilder) -* **[Resources](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::CallBuilder) +* **[Resources](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Part)** + * **[Parts](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -164,17 +164,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -184,29 +184,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Delegate) to the -[Method Builder](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Delegate) to the +[Method Builder](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::RequestValue) and -[decodable](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::RequestValue) and +[decodable](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-androidenterprise1/5.0.2-beta-1+20230123/google_androidenterprise1/client::RequestValue) are moved +* [request values](https://docs.rs/google-androidenterprise1/5.0.2+20230123/google_androidenterprise1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/androidenterprise1/src/api.rs b/gen/androidenterprise1/src/api.rs index 574ee23a0f..e50288032e 100644 --- a/gen/androidenterprise1/src/api.rs +++ b/gen/androidenterprise1/src/api.rs @@ -121,7 +121,7 @@ impl<'a, S> AndroidEnterprise { AndroidEnterprise { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://androidenterprise.googleapis.com/".to_string(), _root_url: "https://androidenterprise.googleapis.com/".to_string(), } @@ -177,7 +177,7 @@ impl<'a, S> AndroidEnterprise { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/androidenterprise1/src/client.rs b/gen/androidenterprise1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/androidenterprise1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/androidenterprise1/src/lib.rs b/gen/androidenterprise1/src/lib.rs index ab0c64b3fe..45dd767835 100644 --- a/gen/androidenterprise1/src/lib.rs +++ b/gen/androidenterprise1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Android Enterprise* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *androidenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Android Enterprise* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *androidenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Android Enterprise* *v1* API can be found at the //! [official documentation site](https://developers.google.com/android/work/play/emm-api). diff --git a/gen/androidmanagement1-cli/Cargo.toml b/gen/androidmanagement1-cli/Cargo.toml index a7887de63f..9394480aed 100644 --- a/gen/androidmanagement1-cli/Cargo.toml +++ b/gen/androidmanagement1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-androidmanagement1-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidmanagement1-cli" @@ -20,13 +20,13 @@ name = "androidmanagement1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-androidmanagement1] path = "../androidmanagement1" -version = "4.0.1+20220302" +version = "5.0.2+20230119" + diff --git a/gen/androidmanagement1-cli/README.md b/gen/androidmanagement1-cli/README.md index 0735fbf5da..579f518785 100644 --- a/gen/androidmanagement1-cli/README.md +++ b/gen/androidmanagement1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Android Management* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *Android Management* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash androidmanagement1 [options] @@ -44,6 +44,8 @@ androidmanagement1 [options] devices-patch (-r )... [-p ]... [-o ] enrollment-tokens-create (-r )... [-p ]... [-o ] enrollment-tokens-delete [-p ]... [-o ] + enrollment-tokens-get [-p ]... [-o ] + enrollment-tokens-list [-p ]... [-o ] get [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] diff --git a/gen/androidmanagement1-cli/mkdocs.yml b/gen/androidmanagement1-cli/mkdocs.yml index a46a4ff3f4..6036d07385 100644 --- a/gen/androidmanagement1-cli/mkdocs.yml +++ b/gen/androidmanagement1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Android Management v4.0.1+20220302 +site_name: Android Management v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-androidmanagement1-cli site_description: A complete library to interact with Android Management (protocol v1) @@ -7,36 +7,40 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/androidmanagemen docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['enterprises_applications-get.md', 'Enterprises', 'Applications Get'] -- ['enterprises_create.md', 'Enterprises', 'Create'] -- ['enterprises_delete.md', 'Enterprises', 'Delete'] -- ['enterprises_devices-delete.md', 'Enterprises', 'Devices Delete'] -- ['enterprises_devices-get.md', 'Enterprises', 'Devices Get'] -- ['enterprises_devices-issue-command.md', 'Enterprises', 'Devices Issue Command'] -- ['enterprises_devices-list.md', 'Enterprises', 'Devices List'] -- ['enterprises_devices-operations-cancel.md', 'Enterprises', 'Devices Operations Cancel'] -- ['enterprises_devices-operations-delete.md', 'Enterprises', 'Devices Operations Delete'] -- ['enterprises_devices-operations-get.md', 'Enterprises', 'Devices Operations Get'] -- ['enterprises_devices-operations-list.md', 'Enterprises', 'Devices Operations List'] -- ['enterprises_devices-patch.md', 'Enterprises', 'Devices Patch'] -- ['enterprises_enrollment-tokens-create.md', 'Enterprises', 'Enrollment Tokens Create'] -- ['enterprises_enrollment-tokens-delete.md', 'Enterprises', 'Enrollment Tokens Delete'] -- ['enterprises_get.md', 'Enterprises', 'Get'] -- ['enterprises_list.md', 'Enterprises', 'List'] -- ['enterprises_patch.md', 'Enterprises', 'Patch'] -- ['enterprises_policies-delete.md', 'Enterprises', 'Policies Delete'] -- ['enterprises_policies-get.md', 'Enterprises', 'Policies Get'] -- ['enterprises_policies-list.md', 'Enterprises', 'Policies List'] -- ['enterprises_policies-patch.md', 'Enterprises', 'Policies Patch'] -- ['enterprises_web-apps-create.md', 'Enterprises', 'Web Apps Create'] -- ['enterprises_web-apps-delete.md', 'Enterprises', 'Web Apps Delete'] -- ['enterprises_web-apps-get.md', 'Enterprises', 'Web Apps Get'] -- ['enterprises_web-apps-list.md', 'Enterprises', 'Web Apps List'] -- ['enterprises_web-apps-patch.md', 'Enterprises', 'Web Apps Patch'] -- ['enterprises_web-tokens-create.md', 'Enterprises', 'Web Tokens Create'] -- ['signup-urls_create.md', 'Signup Urls', 'Create'] +nav: +- Home: 'index.md' +- 'Enterprises': + - 'Applications Get': 'enterprises_applications-get.md' + - 'Create': 'enterprises_create.md' + - 'Delete': 'enterprises_delete.md' + - 'Devices Delete': 'enterprises_devices-delete.md' + - 'Devices Get': 'enterprises_devices-get.md' + - 'Devices Issue Command': 'enterprises_devices-issue-command.md' + - 'Devices List': 'enterprises_devices-list.md' + - 'Devices Operations Cancel': 'enterprises_devices-operations-cancel.md' + - 'Devices Operations Delete': 'enterprises_devices-operations-delete.md' + - 'Devices Operations Get': 'enterprises_devices-operations-get.md' + - 'Devices Operations List': 'enterprises_devices-operations-list.md' + - 'Devices Patch': 'enterprises_devices-patch.md' + - 'Enrollment Tokens Create': 'enterprises_enrollment-tokens-create.md' + - 'Enrollment Tokens Delete': 'enterprises_enrollment-tokens-delete.md' + - 'Enrollment Tokens Get': 'enterprises_enrollment-tokens-get.md' + - 'Enrollment Tokens List': 'enterprises_enrollment-tokens-list.md' + - 'Get': 'enterprises_get.md' + - 'List': 'enterprises_list.md' + - 'Patch': 'enterprises_patch.md' + - 'Policies Delete': 'enterprises_policies-delete.md' + - 'Policies Get': 'enterprises_policies-get.md' + - 'Policies List': 'enterprises_policies-list.md' + - 'Policies Patch': 'enterprises_policies-patch.md' + - 'Web Apps Create': 'enterprises_web-apps-create.md' + - 'Web Apps Delete': 'enterprises_web-apps-delete.md' + - 'Web Apps Get': 'enterprises_web-apps-get.md' + - 'Web Apps List': 'enterprises_web-apps-list.md' + - 'Web Apps Patch': 'enterprises_web-apps-patch.md' + - 'Web Tokens Create': 'enterprises_web-tokens-create.md' +- 'Signup Urls': + - 'Create': 'signup-urls_create.md' theme: readthedocs diff --git a/gen/androidmanagement1-cli/src/client.rs b/gen/androidmanagement1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/androidmanagement1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/androidmanagement1-cli/src/main.rs b/gen/androidmanagement1-cli/src/main.rs index 4a11cd7a38..4b92c6e4db 100644 --- a/gen/androidmanagement1-cli/src/main.rs +++ b/gen/androidmanagement1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_androidmanagement1::{api, Error, oauth2}; +use google_androidmanagement1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -170,7 +169,7 @@ where call = call.enterprise_token(value.unwrap_or("")); }, "agreement-accepted" => { - call = call.agreement_accepted(arg_from_str(value.unwrap_or("false"), err, "agreement-accepted", "boolean")); + call = call.agreement_accepted( value.map(|v| arg_from_str(v, err, "agreement-accepted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -405,6 +404,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "clear-apps-data-params.package-names" => Some(("clearAppsDataParams.packageNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "duration" => Some(("duration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error-code" => Some(("errorCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -413,7 +413,7 @@ where "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-name" => Some(("userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "duration", "error-code", "new-password", "reset-password-flags", "type", "user-name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["clear-apps-data-params", "create-time", "duration", "error-code", "new-password", "package-names", "reset-password-flags", "type", "user-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -483,7 +483,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -698,7 +698,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -853,7 +853,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1048,6 +1048,117 @@ where } } + async fn _enterprises_enrollment_tokens_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.enterprises().enrollment_tokens_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _enterprises_enrollment_tokens_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.enterprises().enrollment_tokens_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _enterprises_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.enterprises().get(opt.value_of("name").unwrap_or("")); @@ -1116,7 +1227,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1219,7 +1330,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1382,7 +1493,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1480,6 +1591,7 @@ where "cross-profile-policies.cross-profile-copy-paste" => Some(("crossProfilePolicies.crossProfileCopyPaste", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cross-profile-policies.cross-profile-data-sharing" => Some(("crossProfilePolicies.crossProfileDataSharing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cross-profile-policies.show-work-contacts-in-personal-profile" => Some(("crossProfilePolicies.showWorkContactsInPersonalProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cross-profile-policies.work-profile-widgets-default" => Some(("crossProfilePolicies.workProfileWidgetsDefault", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "data-roaming-disabled" => Some(("dataRoamingDisabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "debugging-features-allowed" => Some(("debuggingFeaturesAllowed", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "default-permission-policy" => Some(("defaultPermissionPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1571,6 +1683,8 @@ where "tethering-config-disabled" => Some(("tetheringConfigDisabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "uninstall-apps-disabled" => Some(("uninstallAppsDisabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "unmute-microphone-disabled" => Some(("unmuteMicrophoneDisabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "usage-log.enabled-log-types" => Some(("usageLog.enabledLogTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "usage-log.upload-on-cellular-allowed" => Some(("usageLog.uploadOnCellularAllowed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "usb-file-transfer-disabled" => Some(("usbFileTransferDisabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "usb-mass-storage-enabled" => Some(("usbMassStorageEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1578,7 +1692,7 @@ where "wifi-config-disabled" => Some(("wifiConfigDisabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "wifi-configs-lockdown-enabled" => Some(("wifiConfigsLockdownEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-types-with-management-disabled", "add-user-disabled", "adjust-volume-disabled", "advanced-security-overrides", "always-on-vpn-package", "android-device-policy-tracks", "app-auto-update-policy", "application-reporting-settings", "application-reports-enabled", "auto-date-and-time-zone", "auto-time-required", "block-applications-enabled", "bluetooth-config-disabled", "bluetooth-contact-sharing-disabled", "bluetooth-disabled", "camera-access", "camera-disabled", "cell-broadcasts-config-disabled", "common-criteria-mode", "common-criteria-mode-enabled", "create-windows-disabled", "credentials-config-disabled", "cross-profile-copy-paste", "cross-profile-data-sharing", "cross-profile-policies", "data-roaming-disabled", "debugging-features-allowed", "default-message", "default-permission-policy", "developer-settings", "device-owner-lock-screen-info", "device-settings", "device-settings-enabled", "display-info-enabled", "encryption-policy", "end-minutes", "ensure-verify-apps-enabled", "excluded-hosts", "factory-reset-disabled", "frp-admin-emails", "fun-disabled", "google-play-protect-verify-apps", "hardware-status-enabled", "host", "include-removed-apps", "install-apps-disabled", "install-unknown-sources-allowed", "keyguard-disabled", "keyguard-disabled-features", "kiosk-custom-launcher-enabled", "kiosk-customization", "localized-messages", "location-mode", "lockdown-enabled", "long-support-message", "max-days-with-work-off", "maximum-failed-passwords-for-wipe", "maximum-time-to-lock", "memory-info-enabled", "microphone-access", "minimum-api-level", "mobile-networks-config-disabled", "modify-accounts-disabled", "mount-physical-media-disabled", "name", "network-escape-hatch-enabled", "network-info-enabled", "network-reset-disabled", "outgoing-beam-disabled", "outgoing-calls-disabled", "pac-uri", "package-name", "package-names", "password-expiration-timeout", "password-history-length", "password-minimum-length", "password-minimum-letters", "password-minimum-lower-case", "password-minimum-non-letter", "password-minimum-numeric", "password-minimum-symbols", "password-minimum-upper-case", "password-quality", "password-requirements", "password-scope", "permitted-accessibility-services", "permitted-input-methods", "personal-apps-that-can-read-work-notifications", "personal-play-store-mode", "personal-usage-policies", "play-store-mode", "port", "power-button-actions", "power-management-events-enabled", "preferential-network-service", "private-key-selection-enabled", "recommended-global-proxy", "remove-user-disabled", "require-password-unlock", "safe-boot-disabled", "screen-capture-disabled", "set-user-icon-disabled", "set-wallpaper-disabled", "share-location-disabled", "short-support-message", "show-work-contacts-in-personal-profile", "skip-first-use-hints-enabled", "sms-disabled", "software-info-enabled", "start-minutes", "status-bar", "status-bar-disabled", "status-reporting-settings", "stay-on-plugged-modes", "system-error-warnings", "system-navigation", "system-properties-enabled", "system-update", "tethering-config-disabled", "type", "unified-lock-settings", "uninstall-apps-disabled", "unmute-microphone-disabled", "untrusted-apps-policy", "usb-file-transfer-disabled", "usb-mass-storage-enabled", "version", "vpn-config-disabled", "wifi-config-disabled", "wifi-configs-lockdown-enabled"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-types-with-management-disabled", "add-user-disabled", "adjust-volume-disabled", "advanced-security-overrides", "always-on-vpn-package", "android-device-policy-tracks", "app-auto-update-policy", "application-reporting-settings", "application-reports-enabled", "auto-date-and-time-zone", "auto-time-required", "block-applications-enabled", "bluetooth-config-disabled", "bluetooth-contact-sharing-disabled", "bluetooth-disabled", "camera-access", "camera-disabled", "cell-broadcasts-config-disabled", "common-criteria-mode", "common-criteria-mode-enabled", "create-windows-disabled", "credentials-config-disabled", "cross-profile-copy-paste", "cross-profile-data-sharing", "cross-profile-policies", "data-roaming-disabled", "debugging-features-allowed", "default-message", "default-permission-policy", "developer-settings", "device-owner-lock-screen-info", "device-settings", "device-settings-enabled", "display-info-enabled", "enabled-log-types", "encryption-policy", "end-minutes", "ensure-verify-apps-enabled", "excluded-hosts", "factory-reset-disabled", "frp-admin-emails", "fun-disabled", "google-play-protect-verify-apps", "hardware-status-enabled", "host", "include-removed-apps", "install-apps-disabled", "install-unknown-sources-allowed", "keyguard-disabled", "keyguard-disabled-features", "kiosk-custom-launcher-enabled", "kiosk-customization", "localized-messages", "location-mode", "lockdown-enabled", "long-support-message", "max-days-with-work-off", "maximum-failed-passwords-for-wipe", "maximum-time-to-lock", "memory-info-enabled", "microphone-access", "minimum-api-level", "mobile-networks-config-disabled", "modify-accounts-disabled", "mount-physical-media-disabled", "name", "network-escape-hatch-enabled", "network-info-enabled", "network-reset-disabled", "outgoing-beam-disabled", "outgoing-calls-disabled", "pac-uri", "package-name", "package-names", "password-expiration-timeout", "password-history-length", "password-minimum-length", "password-minimum-letters", "password-minimum-lower-case", "password-minimum-non-letter", "password-minimum-numeric", "password-minimum-symbols", "password-minimum-upper-case", "password-quality", "password-requirements", "password-scope", "permitted-accessibility-services", "permitted-input-methods", "personal-apps-that-can-read-work-notifications", "personal-play-store-mode", "personal-usage-policies", "play-store-mode", "port", "power-button-actions", "power-management-events-enabled", "preferential-network-service", "private-key-selection-enabled", "recommended-global-proxy", "remove-user-disabled", "require-password-unlock", "safe-boot-disabled", "screen-capture-disabled", "set-user-icon-disabled", "set-wallpaper-disabled", "share-location-disabled", "short-support-message", "show-work-contacts-in-personal-profile", "skip-first-use-hints-enabled", "sms-disabled", "software-info-enabled", "start-minutes", "status-bar", "status-bar-disabled", "status-reporting-settings", "stay-on-plugged-modes", "system-error-warnings", "system-navigation", "system-properties-enabled", "system-update", "tethering-config-disabled", "type", "unified-lock-settings", "uninstall-apps-disabled", "unmute-microphone-disabled", "untrusted-apps-policy", "upload-on-cellular-allowed", "usage-log", "usb-file-transfer-disabled", "usb-mass-storage-enabled", "version", "vpn-config-disabled", "wifi-config-disabled", "wifi-configs-lockdown-enabled", "work-profile-widgets-default"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1593,7 +1707,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1845,7 +1959,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1938,7 +2052,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2184,6 +2298,12 @@ where ("enrollment-tokens-delete", Some(opt)) => { call_result = self._enterprises_enrollment_tokens_delete(opt, dry_run, &mut err).await; }, + ("enrollment-tokens-get", Some(opt)) => { + call_result = self._enterprises_enrollment_tokens_get(opt, dry_run, &mut err).await; + }, + ("enrollment-tokens-list", Some(opt)) => { + call_result = self._enterprises_enrollment_tokens_list(opt, dry_run, &mut err).await; + }, ("get", Some(opt)) => { call_result = self._enterprises_get(opt, dry_run, &mut err).await; }, @@ -2313,7 +2433,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("enterprises", "methods: 'applications-get', 'create', 'delete', 'devices-delete', 'devices-get', 'devices-issue-command', 'devices-list', 'devices-operations-cancel', 'devices-operations-delete', 'devices-operations-get', 'devices-operations-list', 'devices-patch', 'enrollment-tokens-create', 'enrollment-tokens-delete', 'get', 'list', 'patch', 'policies-delete', 'policies-get', 'policies-list', 'policies-patch', 'web-apps-create', 'web-apps-delete', 'web-apps-get', 'web-apps-list', 'web-apps-patch' and 'web-tokens-create'", vec![ + ("enterprises", "methods: 'applications-get', 'create', 'delete', 'devices-delete', 'devices-get', 'devices-issue-command', 'devices-list', 'devices-operations-cancel', 'devices-operations-delete', 'devices-operations-get', 'devices-operations-list', 'devices-patch', 'enrollment-tokens-create', 'enrollment-tokens-delete', 'enrollment-tokens-get', 'enrollment-tokens-list', 'get', 'list', 'patch', 'policies-delete', 'policies-get', 'policies-list', 'policies-patch', 'web-apps-create', 'web-apps-delete', 'web-apps-get', 'web-apps-list', 'web-apps-patch' and 'web-tokens-create'", vec![ ("applications-get", Some(r##"Gets info about an application."##), "Details at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli/enterprises_applications-get", @@ -2381,7 +2501,7 @@ async fn main() { Some(false)), ]), ("devices-delete", - Some(r##"Deletes a device. This operation wipes the device."##), + Some(r##"Deletes a device. This operation wipes the device. Deleted devices do not show up in enterprises.devices.list calls and a 404 is returned from enterprises.devices.get."##), "Details at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli/enterprises_devices-delete", vec![ (Some(r##"name"##), @@ -2403,7 +2523,7 @@ async fn main() { Some(false)), ]), ("devices-get", - Some(r##"Gets a device."##), + Some(r##"Gets a device. Deleted devices will respond with a 404 error."##), "Details at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli/enterprises_devices-get", vec![ (Some(r##"name"##), @@ -2453,7 +2573,7 @@ async fn main() { Some(false)), ]), ("devices-list", - Some(r##"Lists devices for a given enterprise."##), + Some(r##"Lists devices for a given enterprise. Deleted devices are not returned in the response."##), "Details at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli/enterprises_devices-list", vec![ (Some(r##"parent"##), @@ -2591,7 +2711,7 @@ async fn main() { Some(false)), ]), ("enrollment-tokens-create", - Some(r##"Creates an enrollment token for a given enterprise."##), + Some(r##"Creates an enrollment token for a given enterprise. It's up to the caller's responsibility to manage the lifecycle of newly created tokens and deleting them when they're not intended to be used anymore. Once an enrollment token has been created, it's not possible to retrieve the token's content anymore using AM API. It is recommended for EMMs to securely store the token if it's intended to be reused."##), "Details at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli/enterprises_enrollment-tokens-create", vec![ (Some(r##"parent"##), @@ -2634,6 +2754,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("enrollment-tokens-get", + Some(r##"Gets an active, unexpired enrollment token. Only a partial view of EnrollmentToken is returned: all the fields but name and expiration_timestamp are empty. This method is meant to help manage active enrollment tokens lifecycle. For security reasons, it's recommended to delete active enrollment tokens as soon as they're not intended to be used anymore."##), + "Details at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli/enterprises_enrollment-tokens-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the enrollment token in the form enterprises/{enterpriseId}/enrollmentTokens/{enrollmentTokenId}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("enrollment-tokens-list", + Some(r##"Lists active, unexpired enrollment tokens for a given enterprise. The list items contain only a partial view of EnrollmentToken: all the fields but name and expiration_timestamp are empty. This method is meant to help manage active enrollment tokens lifecycle. For security reasons, it's recommended to delete active enrollment tokens as soon as they're not intended to be used anymore."##), + "Details at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli/enterprises_enrollment-tokens-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The name of the enterprise in the form enterprises/{enterpriseId}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2975,7 +3139,7 @@ async fn main() { let mut app = App::new("androidmanagement1") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230119") .about("The Android Management API provides remote enterprise management of Android devices and apps.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_androidmanagement1_cli") .arg(Arg::with_name("url") diff --git a/gen/androidmanagement1/Cargo.toml b/gen/androidmanagement1/Cargo.toml index d244d4a677..fcc731571c 100644 --- a/gen/androidmanagement1/Cargo.toml +++ b/gen/androidmanagement1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-androidmanagement1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidmanagement1" homepage = "https://developers.google.com/android/management" -documentation = "https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-androidmanagement1/5.0.2+20230119" license = "MIT" keywords = ["androidmanagement", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/androidmanagement1/README.md b/gen/androidmanagement1/README.md index 0888122475..377acd84f1 100644 --- a/gen/androidmanagement1/README.md +++ b/gen/androidmanagement1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-androidmanagement1` library allows access to all features of the *Google Android Management* service. -This documentation was generated from *Android Management* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *androidmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Android Management* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *androidmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Android Management* *v1* API can be found at the [official documentation site](https://developers.google.com/android/management). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/AndroidManagement) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/AndroidManagement) ... -* [enterprises](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::Enterprise) - * [*applications get*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseApplicationGetCall), [*create*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseCreateCall), [*delete*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeleteCall), [*devices delete*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceDeleteCall), [*devices get*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceGetCall), [*devices issue command*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceIssueCommandCall), [*devices list*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceListCall), [*devices operations cancel*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationCancelCall), [*devices operations delete*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationDeleteCall), [*devices operations get*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationGetCall), [*devices operations list*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationListCall), [*devices patch*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseDevicePatchCall), [*enrollment tokens create*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenCreateCall), [*enrollment tokens delete*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenDeleteCall), [*enrollment tokens get*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenGetCall), [*enrollment tokens list*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenListCall), [*get*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseGetCall), [*list*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseListCall), [*patch*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterprisePatchCall), [*policies delete*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterprisePolicyDeleteCall), [*policies get*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterprisePolicyGetCall), [*policies list*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterprisePolicyListCall), [*policies patch*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterprisePolicyPatchCall), [*web apps create*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseWebAppCreateCall), [*web apps delete*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseWebAppDeleteCall), [*web apps get*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseWebAppGetCall), [*web apps list*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseWebAppListCall), [*web apps patch*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseWebAppPatchCall) and [*web tokens create*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::EnterpriseWebTokenCreateCall) -* [signup urls](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::SignupUrl) - * [*create*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/api::SignupUrlCreateCall) +* [enterprises](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::Enterprise) + * [*applications get*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseApplicationGetCall), [*create*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseCreateCall), [*delete*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeleteCall), [*devices delete*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceDeleteCall), [*devices get*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceGetCall), [*devices issue command*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceIssueCommandCall), [*devices list*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceListCall), [*devices operations cancel*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationCancelCall), [*devices operations delete*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationDeleteCall), [*devices operations get*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationGetCall), [*devices operations list*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDeviceOperationListCall), [*devices patch*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseDevicePatchCall), [*enrollment tokens create*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenCreateCall), [*enrollment tokens delete*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenDeleteCall), [*enrollment tokens get*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenGetCall), [*enrollment tokens list*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseEnrollmentTokenListCall), [*get*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseGetCall), [*list*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseListCall), [*patch*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterprisePatchCall), [*policies delete*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterprisePolicyDeleteCall), [*policies get*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterprisePolicyGetCall), [*policies list*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterprisePolicyListCall), [*policies patch*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterprisePolicyPatchCall), [*web apps create*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseWebAppCreateCall), [*web apps delete*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseWebAppDeleteCall), [*web apps get*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseWebAppGetCall), [*web apps list*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseWebAppListCall), [*web apps patch*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseWebAppPatchCall) and [*web tokens create*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::EnterpriseWebTokenCreateCall) +* [signup urls](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::SignupUrl) + * [*create*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/api::SignupUrlCreateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/AndroidManagement)** +* **[Hub](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/AndroidManagement)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::CallBuilder) -* **[Resources](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::CallBuilder) +* **[Resources](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Part)** + * **[Parts](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -158,17 +158,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -178,29 +178,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Delegate) to the -[Method Builder](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Delegate) to the +[Method Builder](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::RequestValue) and -[decodable](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::RequestValue) and +[decodable](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-androidmanagement1/5.0.2-beta-1+20230119/google_androidmanagement1/client::RequestValue) are moved +* [request values](https://docs.rs/google-androidmanagement1/5.0.2+20230119/google_androidmanagement1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/androidmanagement1/src/api.rs b/gen/androidmanagement1/src/api.rs index 77e7a3f59c..75723f3d8f 100644 --- a/gen/androidmanagement1/src/api.rs +++ b/gen/androidmanagement1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> AndroidManagement { AndroidManagement { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://androidmanagement.googleapis.com/".to_string(), _root_url: "https://androidmanagement.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> AndroidManagement { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/androidmanagement1/src/client.rs b/gen/androidmanagement1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/androidmanagement1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/androidmanagement1/src/lib.rs b/gen/androidmanagement1/src/lib.rs index 63ef24eafd..d09929f24b 100644 --- a/gen/androidmanagement1/src/lib.rs +++ b/gen/androidmanagement1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Android Management* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *androidmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Android Management* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *androidmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Android Management* *v1* API can be found at the //! [official documentation site](https://developers.google.com/android/management). diff --git a/gen/androidpublisher2-cli/Cargo.toml b/gen/androidpublisher2-cli/Cargo.toml index be724531fd..d25925291b 100644 --- a/gen/androidpublisher2-cli/Cargo.toml +++ b/gen/androidpublisher2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-androidpublisher2-cli" -version = "4.0.1+20200331" +version = "5.0.2+20200331" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Publisher (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidpublisher2-cli" @@ -20,13 +20,13 @@ name = "androidpublisher2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-androidpublisher2] path = "../androidpublisher2" -version = "4.0.1+20200331" +version = "5.0.2+20200331" + diff --git a/gen/androidpublisher2-cli/README.md b/gen/androidpublisher2-cli/README.md index d0e40f4182..471ba1fe4d 100644 --- a/gen/androidpublisher2-cli/README.md +++ b/gen/androidpublisher2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Android Publisher* API at revision *20200331*. The CLI is at version *4.0.1*. +This documentation was generated from the *Android Publisher* API at revision *20200331*. The CLI is at version *5.0.2*. ```bash androidpublisher2 [options] diff --git a/gen/androidpublisher2-cli/mkdocs.yml b/gen/androidpublisher2-cli/mkdocs.yml index c54ebec64d..948446a330 100644 --- a/gen/androidpublisher2-cli/mkdocs.yml +++ b/gen/androidpublisher2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Android Publisher v4.0.1+20200331 +site_name: Android Publisher v5.0.2+20200331 site_url: http://byron.github.io/google-apis-rs/google-androidpublisher2-cli site_description: A complete library to interact with Android Publisher (protocol v2) @@ -7,66 +7,71 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/androidpublisher docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['edits_apklistings-delete.md', 'Edits', 'Apklistings Delete'] -- ['edits_apklistings-deleteall.md', 'Edits', 'Apklistings Deleteall'] -- ['edits_apklistings-get.md', 'Edits', 'Apklistings Get'] -- ['edits_apklistings-list.md', 'Edits', 'Apklistings List'] -- ['edits_apklistings-patch.md', 'Edits', 'Apklistings Patch'] -- ['edits_apklistings-update.md', 'Edits', 'Apklistings Update'] -- ['edits_apks-addexternallyhosted.md', 'Edits', 'Apks Addexternallyhosted'] -- ['edits_apks-list.md', 'Edits', 'Apks List'] -- ['edits_apks-upload.md', 'Edits', 'Apks Upload'] -- ['edits_bundles-list.md', 'Edits', 'Bundles List'] -- ['edits_bundles-upload.md', 'Edits', 'Bundles Upload'] -- ['edits_commit.md', 'Edits', 'Commit'] -- ['edits_delete.md', 'Edits', 'Delete'] -- ['edits_deobfuscationfiles-upload.md', 'Edits', 'Deobfuscationfiles Upload'] -- ['edits_details-get.md', 'Edits', 'Details Get'] -- ['edits_details-patch.md', 'Edits', 'Details Patch'] -- ['edits_details-update.md', 'Edits', 'Details Update'] -- ['edits_expansionfiles-get.md', 'Edits', 'Expansionfiles Get'] -- ['edits_expansionfiles-patch.md', 'Edits', 'Expansionfiles Patch'] -- ['edits_expansionfiles-update.md', 'Edits', 'Expansionfiles Update'] -- ['edits_expansionfiles-upload.md', 'Edits', 'Expansionfiles Upload'] -- ['edits_get.md', 'Edits', 'Get'] -- ['edits_images-delete.md', 'Edits', 'Images Delete'] -- ['edits_images-deleteall.md', 'Edits', 'Images Deleteall'] -- ['edits_images-list.md', 'Edits', 'Images List'] -- ['edits_images-upload.md', 'Edits', 'Images Upload'] -- ['edits_insert.md', 'Edits', 'Insert'] -- ['edits_listings-delete.md', 'Edits', 'Listings Delete'] -- ['edits_listings-deleteall.md', 'Edits', 'Listings Deleteall'] -- ['edits_listings-get.md', 'Edits', 'Listings Get'] -- ['edits_listings-list.md', 'Edits', 'Listings List'] -- ['edits_listings-patch.md', 'Edits', 'Listings Patch'] -- ['edits_listings-update.md', 'Edits', 'Listings Update'] -- ['edits_testers-get.md', 'Edits', 'Testers Get'] -- ['edits_testers-patch.md', 'Edits', 'Testers Patch'] -- ['edits_testers-update.md', 'Edits', 'Testers Update'] -- ['edits_tracks-get.md', 'Edits', 'Tracks Get'] -- ['edits_tracks-list.md', 'Edits', 'Tracks List'] -- ['edits_tracks-patch.md', 'Edits', 'Tracks Patch'] -- ['edits_tracks-update.md', 'Edits', 'Tracks Update'] -- ['edits_validate.md', 'Edits', 'Validate'] -- ['inappproducts_delete.md', 'Inappproducts', 'Delete'] -- ['inappproducts_get.md', 'Inappproducts', 'Get'] -- ['inappproducts_insert.md', 'Inappproducts', 'Insert'] -- ['inappproducts_list.md', 'Inappproducts', 'List'] -- ['inappproducts_patch.md', 'Inappproducts', 'Patch'] -- ['inappproducts_update.md', 'Inappproducts', 'Update'] -- ['orders_refund.md', 'Orders', 'Refund'] -- ['purchases_products-get.md', 'Purchases', 'Products Get'] -- ['purchases_subscriptions-cancel.md', 'Purchases', 'Subscriptions Cancel'] -- ['purchases_subscriptions-defer.md', 'Purchases', 'Subscriptions Defer'] -- ['purchases_subscriptions-get.md', 'Purchases', 'Subscriptions Get'] -- ['purchases_subscriptions-refund.md', 'Purchases', 'Subscriptions Refund'] -- ['purchases_subscriptions-revoke.md', 'Purchases', 'Subscriptions Revoke'] -- ['purchases_voidedpurchases-list.md', 'Purchases', 'Voidedpurchases List'] -- ['reviews_get.md', 'Reviews', 'Get'] -- ['reviews_list.md', 'Reviews', 'List'] -- ['reviews_reply.md', 'Reviews', 'Reply'] +nav: +- Home: 'index.md' +- 'Edits': + - 'Apklistings Delete': 'edits_apklistings-delete.md' + - 'Apklistings Deleteall': 'edits_apklistings-deleteall.md' + - 'Apklistings Get': 'edits_apklistings-get.md' + - 'Apklistings List': 'edits_apklistings-list.md' + - 'Apklistings Patch': 'edits_apklistings-patch.md' + - 'Apklistings Update': 'edits_apklistings-update.md' + - 'Apks Addexternallyhosted': 'edits_apks-addexternallyhosted.md' + - 'Apks List': 'edits_apks-list.md' + - 'Apks Upload': 'edits_apks-upload.md' + - 'Bundles List': 'edits_bundles-list.md' + - 'Bundles Upload': 'edits_bundles-upload.md' + - 'Commit': 'edits_commit.md' + - 'Delete': 'edits_delete.md' + - 'Deobfuscationfiles Upload': 'edits_deobfuscationfiles-upload.md' + - 'Details Get': 'edits_details-get.md' + - 'Details Patch': 'edits_details-patch.md' + - 'Details Update': 'edits_details-update.md' + - 'Expansionfiles Get': 'edits_expansionfiles-get.md' + - 'Expansionfiles Patch': 'edits_expansionfiles-patch.md' + - 'Expansionfiles Update': 'edits_expansionfiles-update.md' + - 'Expansionfiles Upload': 'edits_expansionfiles-upload.md' + - 'Get': 'edits_get.md' + - 'Images Delete': 'edits_images-delete.md' + - 'Images Deleteall': 'edits_images-deleteall.md' + - 'Images List': 'edits_images-list.md' + - 'Images Upload': 'edits_images-upload.md' + - 'Insert': 'edits_insert.md' + - 'Listings Delete': 'edits_listings-delete.md' + - 'Listings Deleteall': 'edits_listings-deleteall.md' + - 'Listings Get': 'edits_listings-get.md' + - 'Listings List': 'edits_listings-list.md' + - 'Listings Patch': 'edits_listings-patch.md' + - 'Listings Update': 'edits_listings-update.md' + - 'Testers Get': 'edits_testers-get.md' + - 'Testers Patch': 'edits_testers-patch.md' + - 'Testers Update': 'edits_testers-update.md' + - 'Tracks Get': 'edits_tracks-get.md' + - 'Tracks List': 'edits_tracks-list.md' + - 'Tracks Patch': 'edits_tracks-patch.md' + - 'Tracks Update': 'edits_tracks-update.md' + - 'Validate': 'edits_validate.md' +- 'Inappproducts': + - 'Delete': 'inappproducts_delete.md' + - 'Get': 'inappproducts_get.md' + - 'Insert': 'inappproducts_insert.md' + - 'List': 'inappproducts_list.md' + - 'Patch': 'inappproducts_patch.md' + - 'Update': 'inappproducts_update.md' +- 'Orders': + - 'Refund': 'orders_refund.md' +- 'Purchases': + - 'Products Get': 'purchases_products-get.md' + - 'Subscriptions Cancel': 'purchases_subscriptions-cancel.md' + - 'Subscriptions Defer': 'purchases_subscriptions-defer.md' + - 'Subscriptions Get': 'purchases_subscriptions-get.md' + - 'Subscriptions Refund': 'purchases_subscriptions-refund.md' + - 'Subscriptions Revoke': 'purchases_subscriptions-revoke.md' + - 'Voidedpurchases List': 'purchases_voidedpurchases-list.md' +- 'Reviews': + - 'Get': 'reviews_get.md' + - 'List': 'reviews_list.md' + - 'Reply': 'reviews_reply.md' theme: readthedocs diff --git a/gen/androidpublisher2-cli/src/client.rs b/gen/androidpublisher2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/androidpublisher2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/androidpublisher2-cli/src/main.rs b/gen/androidpublisher2-cli/src/main.rs index fa80d607f9..ec8dc5ba91 100644 --- a/gen/androidpublisher2-cli/src/main.rs +++ b/gen/androidpublisher2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_androidpublisher2::{api, Error, oauth2}; +use google_androidpublisher2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -685,7 +684,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ack-bundle-installation-warning" => { - call = call.ack_bundle_installation_warning(arg_from_str(value.unwrap_or("false"), err, "ack-bundle-installation-warning", "boolean")); + call = call.ack_bundle_installation_warning( value.map(|v| arg_from_str(v, err, "ack-bundle-installation-warning", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2808,7 +2807,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "auto-convert-missing-prices" => { - call = call.auto_convert_missing_prices(arg_from_str(value.unwrap_or("false"), err, "auto-convert-missing-prices", "boolean")); + call = call.auto_convert_missing_prices( value.map(|v| arg_from_str(v, err, "auto-convert-missing-prices", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2867,10 +2866,10 @@ where call = call.token(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2968,7 +2967,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "auto-convert-missing-prices" => { - call = call.auto_convert_missing_prices(arg_from_str(value.unwrap_or("false"), err, "auto-convert-missing-prices", "boolean")); + call = call.auto_convert_missing_prices( value.map(|v| arg_from_str(v, err, "auto-convert-missing-prices", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3066,7 +3065,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "auto-convert-missing-prices" => { - call = call.auto_convert_missing_prices(arg_from_str(value.unwrap_or("false"), err, "auto-convert-missing-prices", "boolean")); + call = call.auto_convert_missing_prices( value.map(|v| arg_from_str(v, err, "auto-convert-missing-prices", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3122,7 +3121,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "revoke" => { - call = call.revoke(arg_from_str(value.unwrap_or("false"), err, "revoke", "boolean")); + call = call.revoke( value.map(|v| arg_from_str(v, err, "revoke", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3495,16 +3494,16 @@ where call = call.token(value.unwrap_or("")); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "int64")).unwrap_or(-0)); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "end-time" => { - call = call.end_time(value.unwrap_or("")); + call = call.end_time( value.map(|v| arg_from_str(v, err, "end-time", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3622,10 +3621,10 @@ where call = call.token(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -6010,7 +6009,7 @@ async fn main() { let mut app = App::new("androidpublisher2") .author("Sebastian Thiel ") - .version("4.0.1+20200331") + .version("5.0.2+20200331") .about("Accesses Android application developers' Google Play accounts.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_androidpublisher2_cli") .arg(Arg::with_name("url") diff --git a/gen/androidpublisher2/Cargo.toml b/gen/androidpublisher2/Cargo.toml index 12b5884f72..4d6e0e657e 100644 --- a/gen/androidpublisher2/Cargo.toml +++ b/gen/androidpublisher2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-androidpublisher2" -version = "5.0.2-beta-1+20200331" +version = "5.0.2+20200331" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Publisher (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidpublisher2" homepage = "https://developers.google.com/android-publisher" -documentation = "https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331" +documentation = "https://docs.rs/google-androidpublisher2/5.0.2+20200331" license = "MIT" keywords = ["androidpublisher", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/androidpublisher2/README.md b/gen/androidpublisher2/README.md index d2cb2d722c..b9a1ae0067 100644 --- a/gen/androidpublisher2/README.md +++ b/gen/androidpublisher2/README.md @@ -5,33 +5,33 @@ DO NOT EDIT ! --> The `google-androidpublisher2` library allows access to all features of the *Google Android Publisher* service. -This documentation was generated from *Android Publisher* crate version *5.0.2-beta-1+20200331*, where *20200331* is the exact revision of the *androidpublisher:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Android Publisher* crate version *5.0.2+20200331*, where *20200331* is the exact revision of the *androidpublisher:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Android Publisher* *v2* API can be found at the [official documentation site](https://developers.google.com/android-publisher). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/AndroidPublisher) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/AndroidPublisher) ... * edits - * [*apklistings delete*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApklistingDeleteCall), [*apklistings deleteall*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApklistingDeleteallCall), [*apklistings get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApklistingGetCall), [*apklistings list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApklistingListCall), [*apklistings patch*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApklistingPatchCall), [*apklistings update*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApklistingUpdateCall), [*apks addexternallyhosted*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApkAddexternallyhostedCall), [*apks list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApkListCall), [*apks upload*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApkUploadCall), [*bundles list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditBundleListCall), [*bundles upload*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditBundleUploadCall), [*commit*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditCommitCall), [*delete*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditDeleteCall), [*deobfuscationfiles upload*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditDeobfuscationfileUploadCall), [*details get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditDetailGetCall), [*details patch*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditDetailPatchCall), [*details update*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditDetailUpdateCall), [*expansionfiles get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditExpansionfileGetCall), [*expansionfiles patch*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditExpansionfilePatchCall), [*expansionfiles update*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditExpansionfileUpdateCall), [*expansionfiles upload*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditExpansionfileUploadCall), [*get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditGetCall), [*images delete*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditImageDeleteCall), [*images deleteall*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditImageDeleteallCall), [*images list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditImageListCall), [*images upload*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditImageUploadCall), [*insert*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditInsertCall), [*listings delete*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditListingDeleteCall), [*listings deleteall*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditListingDeleteallCall), [*listings get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditListingGetCall), [*listings list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditListingListCall), [*listings patch*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditListingPatchCall), [*listings update*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditListingUpdateCall), [*testers get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditTesterGetCall), [*testers patch*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditTesterPatchCall), [*testers update*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditTesterUpdateCall), [*tracks get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditTrackGetCall), [*tracks list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditTrackListCall), [*tracks patch*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditTrackPatchCall), [*tracks update*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditTrackUpdateCall) and [*validate*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditValidateCall) + * [*apklistings delete*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApklistingDeleteCall), [*apklistings deleteall*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApklistingDeleteallCall), [*apklistings get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApklistingGetCall), [*apklistings list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApklistingListCall), [*apklistings patch*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApklistingPatchCall), [*apklistings update*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApklistingUpdateCall), [*apks addexternallyhosted*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApkAddexternallyhostedCall), [*apks list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApkListCall), [*apks upload*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApkUploadCall), [*bundles list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditBundleListCall), [*bundles upload*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditBundleUploadCall), [*commit*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditCommitCall), [*delete*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditDeleteCall), [*deobfuscationfiles upload*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditDeobfuscationfileUploadCall), [*details get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditDetailGetCall), [*details patch*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditDetailPatchCall), [*details update*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditDetailUpdateCall), [*expansionfiles get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditExpansionfileGetCall), [*expansionfiles patch*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditExpansionfilePatchCall), [*expansionfiles update*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditExpansionfileUpdateCall), [*expansionfiles upload*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditExpansionfileUploadCall), [*get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditGetCall), [*images delete*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditImageDeleteCall), [*images deleteall*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditImageDeleteallCall), [*images list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditImageListCall), [*images upload*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditImageUploadCall), [*insert*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditInsertCall), [*listings delete*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditListingDeleteCall), [*listings deleteall*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditListingDeleteallCall), [*listings get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditListingGetCall), [*listings list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditListingListCall), [*listings patch*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditListingPatchCall), [*listings update*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditListingUpdateCall), [*testers get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditTesterGetCall), [*testers patch*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditTesterPatchCall), [*testers update*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditTesterUpdateCall), [*tracks get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditTrackGetCall), [*tracks list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditTrackListCall), [*tracks patch*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditTrackPatchCall), [*tracks update*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditTrackUpdateCall) and [*validate*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditValidateCall) * inappproducts - * [*delete*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::InappproductDeleteCall), [*get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::InappproductGetCall), [*insert*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::InappproductInsertCall), [*list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::InappproductListCall), [*patch*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::InappproductPatchCall) and [*update*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::InappproductUpdateCall) + * [*delete*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::InappproductDeleteCall), [*get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::InappproductGetCall), [*insert*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::InappproductInsertCall), [*list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::InappproductListCall), [*patch*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::InappproductPatchCall) and [*update*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::InappproductUpdateCall) * orders - * [*refund*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::OrderRefundCall) + * [*refund*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::OrderRefundCall) * purchases - * [*products get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::PurchaseProductGetCall), [*subscriptions cancel*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::PurchaseSubscriptionCancelCall), [*subscriptions defer*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::PurchaseSubscriptionDeferCall), [*subscriptions get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::PurchaseSubscriptionGetCall), [*subscriptions refund*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::PurchaseSubscriptionRefundCall), [*subscriptions revoke*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::PurchaseSubscriptionRevokeCall) and [*voidedpurchases list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::PurchaseVoidedpurchaseListCall) -* [reviews](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::Review) - * [*get*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::ReviewGetCall), [*list*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::ReviewListCall) and [*reply*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::ReviewReplyCall) + * [*products get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::PurchaseProductGetCall), [*subscriptions cancel*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::PurchaseSubscriptionCancelCall), [*subscriptions defer*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::PurchaseSubscriptionDeferCall), [*subscriptions get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::PurchaseSubscriptionGetCall), [*subscriptions refund*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::PurchaseSubscriptionRefundCall), [*subscriptions revoke*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::PurchaseSubscriptionRevokeCall) and [*voidedpurchases list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::PurchaseVoidedpurchaseListCall) +* [reviews](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::Review) + * [*get*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::ReviewGetCall), [*list*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::ReviewListCall) and [*reply*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::ReviewReplyCall) Upload supported by ... -* [*apks upload edits*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditApkUploadCall) -* [*bundles upload edits*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditBundleUploadCall) -* [*deobfuscationfiles upload edits*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditDeobfuscationfileUploadCall) -* [*expansionfiles upload edits*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditExpansionfileUploadCall) -* [*images upload edits*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/api::EditImageUploadCall) +* [*apks upload edits*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditApkUploadCall) +* [*bundles upload edits*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditBundleUploadCall) +* [*deobfuscationfiles upload edits*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditDeobfuscationfileUploadCall) +* [*expansionfiles upload edits*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditExpansionfileUploadCall) +* [*images upload edits*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/api::EditImageUploadCall) @@ -39,17 +39,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/AndroidPublisher)** +* **[Hub](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/AndroidPublisher)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::CallBuilder) -* **[Resources](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::CallBuilder) +* **[Resources](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Part)** + * **[Parts](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -144,17 +144,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -164,29 +164,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Delegate) to the -[Method Builder](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Delegate) to the +[Method Builder](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::RequestValue) and -[decodable](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::RequestValue) and +[decodable](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-androidpublisher2/5.0.2-beta-1+20200331/google_androidpublisher2/client::RequestValue) are moved +* [request values](https://docs.rs/google-androidpublisher2/5.0.2+20200331/google_androidpublisher2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/androidpublisher2/src/api.rs b/gen/androidpublisher2/src/api.rs index 2dd85cfdf2..0102f1344d 100644 --- a/gen/androidpublisher2/src/api.rs +++ b/gen/androidpublisher2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> AndroidPublisher { AndroidPublisher { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/androidpublisher/v2/applications/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> AndroidPublisher { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/androidpublisher2/src/client.rs b/gen/androidpublisher2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/androidpublisher2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/androidpublisher2/src/lib.rs b/gen/androidpublisher2/src/lib.rs index 0afbc08e29..d4aabc219b 100644 --- a/gen/androidpublisher2/src/lib.rs +++ b/gen/androidpublisher2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Android Publisher* crate version *5.0.2-beta-1+20200331*, where *20200331* is the exact revision of the *androidpublisher:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Android Publisher* crate version *5.0.2+20200331*, where *20200331* is the exact revision of the *androidpublisher:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Android Publisher* *v2* API can be found at the //! [official documentation site](https://developers.google.com/android-publisher). diff --git a/gen/androidpublisher3-cli/Cargo.toml b/gen/androidpublisher3-cli/Cargo.toml index f9a115193a..e58b4a8d06 100644 --- a/gen/androidpublisher3-cli/Cargo.toml +++ b/gen/androidpublisher3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-androidpublisher3-cli" -version = "4.0.1+20220307" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Publisher (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidpublisher3-cli" @@ -20,13 +20,13 @@ name = "androidpublisher3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-androidpublisher3] path = "../androidpublisher3" -version = "4.0.1+20220307" +version = "5.0.2+20230124" + diff --git a/gen/androidpublisher3-cli/README.md b/gen/androidpublisher3-cli/README.md index a99200590e..0d528c3f8f 100644 --- a/gen/androidpublisher3-cli/README.md +++ b/gen/androidpublisher3-cli/README.md @@ -25,10 +25,14 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Android Publisher* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *Android Publisher* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash androidpublisher3 [options] + applications + device-tier-configs-create (-r )... [-p ]... [-o ] + device-tier-configs-get [-p ]... [-o ] + device-tier-configs-list [-p ]... [-o ] edits apks-addexternallyhosted (-r )... [-p ]... [-o ] apks-list [-p ]... [-o ] @@ -85,6 +89,23 @@ androidpublisher3 [options] uploadbundle (-u simple -f [-m ]) [-p ]... [-o ] monetization convert-region-prices (-r )... [-p ]... [-o ] + subscriptions-archive (-r )... [-p ]... [-o ] + subscriptions-base-plans-activate (-r )... [-p ]... [-o ] + subscriptions-base-plans-deactivate (-r )... [-p ]... [-o ] + subscriptions-base-plans-delete [-p ]... + subscriptions-base-plans-migrate-prices (-r )... [-p ]... [-o ] + subscriptions-base-plans-offers-activate (-r )... [-p ]... [-o ] + subscriptions-base-plans-offers-create (-r )... [-p ]... [-o ] + subscriptions-base-plans-offers-deactivate (-r )... [-p ]... [-o ] + subscriptions-base-plans-offers-delete [-p ]... + subscriptions-base-plans-offers-get [-p ]... [-o ] + subscriptions-base-plans-offers-list [-p ]... [-o ] + subscriptions-base-plans-offers-patch (-r )... [-p ]... [-o ] + subscriptions-create (-r )... [-p ]... [-o ] + subscriptions-delete [-p ]... + subscriptions-get [-p ]... [-o ] + subscriptions-list [-p ]... [-o ] + subscriptions-patch (-r )... [-p ]... [-o ] orders refund [-p ]... purchases @@ -96,6 +117,7 @@ androidpublisher3 [options] subscriptions-get [-p ]... [-o ] subscriptions-refund [-p ]... subscriptions-revoke [-p ]... + subscriptionsv2-get [-p ]... [-o ] voidedpurchases-list [-p ]... [-o ] reviews get [-p ]... [-o ] diff --git a/gen/androidpublisher3-cli/mkdocs.yml b/gen/androidpublisher3-cli/mkdocs.yml index 7c46ae4382..705ff1f53b 100644 --- a/gen/androidpublisher3-cli/mkdocs.yml +++ b/gen/androidpublisher3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Android Publisher v4.0.1+20220307 +site_name: Android Publisher v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-androidpublisher3-cli site_description: A complete library to interact with Android Publisher (protocol v3) @@ -7,79 +7,112 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/androidpublisher docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['edits_apks-addexternallyhosted.md', 'Edits', 'Apks Addexternallyhosted'] -- ['edits_apks-list.md', 'Edits', 'Apks List'] -- ['edits_apks-upload.md', 'Edits', 'Apks Upload'] -- ['edits_bundles-list.md', 'Edits', 'Bundles List'] -- ['edits_bundles-upload.md', 'Edits', 'Bundles Upload'] -- ['edits_commit.md', 'Edits', 'Commit'] -- ['edits_countryavailability-get.md', 'Edits', 'Countryavailability Get'] -- ['edits_delete.md', 'Edits', 'Delete'] -- ['edits_deobfuscationfiles-upload.md', 'Edits', 'Deobfuscationfiles Upload'] -- ['edits_details-get.md', 'Edits', 'Details Get'] -- ['edits_details-patch.md', 'Edits', 'Details Patch'] -- ['edits_details-update.md', 'Edits', 'Details Update'] -- ['edits_expansionfiles-get.md', 'Edits', 'Expansionfiles Get'] -- ['edits_expansionfiles-patch.md', 'Edits', 'Expansionfiles Patch'] -- ['edits_expansionfiles-update.md', 'Edits', 'Expansionfiles Update'] -- ['edits_expansionfiles-upload.md', 'Edits', 'Expansionfiles Upload'] -- ['edits_get.md', 'Edits', 'Get'] -- ['edits_images-delete.md', 'Edits', 'Images Delete'] -- ['edits_images-deleteall.md', 'Edits', 'Images Deleteall'] -- ['edits_images-list.md', 'Edits', 'Images List'] -- ['edits_images-upload.md', 'Edits', 'Images Upload'] -- ['edits_insert.md', 'Edits', 'Insert'] -- ['edits_listings-delete.md', 'Edits', 'Listings Delete'] -- ['edits_listings-deleteall.md', 'Edits', 'Listings Deleteall'] -- ['edits_listings-get.md', 'Edits', 'Listings Get'] -- ['edits_listings-list.md', 'Edits', 'Listings List'] -- ['edits_listings-patch.md', 'Edits', 'Listings Patch'] -- ['edits_listings-update.md', 'Edits', 'Listings Update'] -- ['edits_testers-get.md', 'Edits', 'Testers Get'] -- ['edits_testers-patch.md', 'Edits', 'Testers Patch'] -- ['edits_testers-update.md', 'Edits', 'Testers Update'] -- ['edits_tracks-get.md', 'Edits', 'Tracks Get'] -- ['edits_tracks-list.md', 'Edits', 'Tracks List'] -- ['edits_tracks-patch.md', 'Edits', 'Tracks Patch'] -- ['edits_tracks-update.md', 'Edits', 'Tracks Update'] -- ['edits_validate.md', 'Edits', 'Validate'] -- ['generatedapks_download.md', 'Generatedapks', 'Download'] -- ['generatedapks_list.md', 'Generatedapks', 'List'] -- ['grants_create.md', 'Grants', 'Create'] -- ['grants_delete.md', 'Grants', 'Delete'] -- ['grants_patch.md', 'Grants', 'Patch'] -- ['inappproducts_delete.md', 'Inappproducts', 'Delete'] -- ['inappproducts_get.md', 'Inappproducts', 'Get'] -- ['inappproducts_insert.md', 'Inappproducts', 'Insert'] -- ['inappproducts_list.md', 'Inappproducts', 'List'] -- ['inappproducts_patch.md', 'Inappproducts', 'Patch'] -- ['inappproducts_update.md', 'Inappproducts', 'Update'] -- ['internalappsharingartifacts_uploadapk.md', 'Internalappsharingartifacts', 'Uploadapk'] -- ['internalappsharingartifacts_uploadbundle.md', 'Internalappsharingartifacts', 'Uploadbundle'] -- ['monetization_convert-region-prices.md', 'Monetization', 'Convert Region Prices'] -- ['orders_refund.md', 'Orders', 'Refund'] -- ['purchases_products-acknowledge.md', 'Purchases', 'Products Acknowledge'] -- ['purchases_products-get.md', 'Purchases', 'Products Get'] -- ['purchases_subscriptions-acknowledge.md', 'Purchases', 'Subscriptions Acknowledge'] -- ['purchases_subscriptions-cancel.md', 'Purchases', 'Subscriptions Cancel'] -- ['purchases_subscriptions-defer.md', 'Purchases', 'Subscriptions Defer'] -- ['purchases_subscriptions-get.md', 'Purchases', 'Subscriptions Get'] -- ['purchases_subscriptions-refund.md', 'Purchases', 'Subscriptions Refund'] -- ['purchases_subscriptions-revoke.md', 'Purchases', 'Subscriptions Revoke'] -- ['purchases_voidedpurchases-list.md', 'Purchases', 'Voidedpurchases List'] -- ['reviews_get.md', 'Reviews', 'Get'] -- ['reviews_list.md', 'Reviews', 'List'] -- ['reviews_reply.md', 'Reviews', 'Reply'] -- ['systemapks_variants-create.md', 'Systemapks', 'Variants Create'] -- ['systemapks_variants-download.md', 'Systemapks', 'Variants Download'] -- ['systemapks_variants-get.md', 'Systemapks', 'Variants Get'] -- ['systemapks_variants-list.md', 'Systemapks', 'Variants List'] -- ['users_create.md', 'Users', 'Create'] -- ['users_delete.md', 'Users', 'Delete'] -- ['users_list.md', 'Users', 'List'] -- ['users_patch.md', 'Users', 'Patch'] +nav: +- Home: 'index.md' +- 'Applications': + - 'Device Tier Configs Create': 'applications_device-tier-configs-create.md' + - 'Device Tier Configs Get': 'applications_device-tier-configs-get.md' + - 'Device Tier Configs List': 'applications_device-tier-configs-list.md' +- 'Edits': + - 'Apks Addexternallyhosted': 'edits_apks-addexternallyhosted.md' + - 'Apks List': 'edits_apks-list.md' + - 'Apks Upload': 'edits_apks-upload.md' + - 'Bundles List': 'edits_bundles-list.md' + - 'Bundles Upload': 'edits_bundles-upload.md' + - 'Commit': 'edits_commit.md' + - 'Countryavailability Get': 'edits_countryavailability-get.md' + - 'Delete': 'edits_delete.md' + - 'Deobfuscationfiles Upload': 'edits_deobfuscationfiles-upload.md' + - 'Details Get': 'edits_details-get.md' + - 'Details Patch': 'edits_details-patch.md' + - 'Details Update': 'edits_details-update.md' + - 'Expansionfiles Get': 'edits_expansionfiles-get.md' + - 'Expansionfiles Patch': 'edits_expansionfiles-patch.md' + - 'Expansionfiles Update': 'edits_expansionfiles-update.md' + - 'Expansionfiles Upload': 'edits_expansionfiles-upload.md' + - 'Get': 'edits_get.md' + - 'Images Delete': 'edits_images-delete.md' + - 'Images Deleteall': 'edits_images-deleteall.md' + - 'Images List': 'edits_images-list.md' + - 'Images Upload': 'edits_images-upload.md' + - 'Insert': 'edits_insert.md' + - 'Listings Delete': 'edits_listings-delete.md' + - 'Listings Deleteall': 'edits_listings-deleteall.md' + - 'Listings Get': 'edits_listings-get.md' + - 'Listings List': 'edits_listings-list.md' + - 'Listings Patch': 'edits_listings-patch.md' + - 'Listings Update': 'edits_listings-update.md' + - 'Testers Get': 'edits_testers-get.md' + - 'Testers Patch': 'edits_testers-patch.md' + - 'Testers Update': 'edits_testers-update.md' + - 'Tracks Get': 'edits_tracks-get.md' + - 'Tracks List': 'edits_tracks-list.md' + - 'Tracks Patch': 'edits_tracks-patch.md' + - 'Tracks Update': 'edits_tracks-update.md' + - 'Validate': 'edits_validate.md' +- 'Generatedapks': + - 'Download': 'generatedapks_download.md' + - 'List': 'generatedapks_list.md' +- 'Grants': + - 'Create': 'grants_create.md' + - 'Delete': 'grants_delete.md' + - 'Patch': 'grants_patch.md' +- 'Inappproducts': + - 'Delete': 'inappproducts_delete.md' + - 'Get': 'inappproducts_get.md' + - 'Insert': 'inappproducts_insert.md' + - 'List': 'inappproducts_list.md' + - 'Patch': 'inappproducts_patch.md' + - 'Update': 'inappproducts_update.md' +- 'Internalappsharingartifacts': + - 'Uploadapk': 'internalappsharingartifacts_uploadapk.md' + - 'Uploadbundle': 'internalappsharingartifacts_uploadbundle.md' +- 'Monetization': + - 'Convert Region Prices': 'monetization_convert-region-prices.md' + - 'Subscriptions Archive': 'monetization_subscriptions-archive.md' + - 'Subscriptions Base Plans Activate': 'monetization_subscriptions-base-plans-activate.md' + - 'Subscriptions Base Plans Deactivate': 'monetization_subscriptions-base-plans-deactivate.md' + - 'Subscriptions Base Plans Delete': 'monetization_subscriptions-base-plans-delete.md' + - 'Subscriptions Base Plans Migrate Prices': 'monetization_subscriptions-base-plans-migrate-prices.md' + - 'Subscriptions Base Plans Offers Activate': 'monetization_subscriptions-base-plans-offers-activate.md' + - 'Subscriptions Base Plans Offers Create': 'monetization_subscriptions-base-plans-offers-create.md' + - 'Subscriptions Base Plans Offers Deactivate': 'monetization_subscriptions-base-plans-offers-deactivate.md' + - 'Subscriptions Base Plans Offers Delete': 'monetization_subscriptions-base-plans-offers-delete.md' + - 'Subscriptions Base Plans Offers Get': 'monetization_subscriptions-base-plans-offers-get.md' + - 'Subscriptions Base Plans Offers List': 'monetization_subscriptions-base-plans-offers-list.md' + - 'Subscriptions Base Plans Offers Patch': 'monetization_subscriptions-base-plans-offers-patch.md' + - 'Subscriptions Create': 'monetization_subscriptions-create.md' + - 'Subscriptions Delete': 'monetization_subscriptions-delete.md' + - 'Subscriptions Get': 'monetization_subscriptions-get.md' + - 'Subscriptions List': 'monetization_subscriptions-list.md' + - 'Subscriptions Patch': 'monetization_subscriptions-patch.md' +- 'Orders': + - 'Refund': 'orders_refund.md' +- 'Purchases': + - 'Products Acknowledge': 'purchases_products-acknowledge.md' + - 'Products Get': 'purchases_products-get.md' + - 'Subscriptions Acknowledge': 'purchases_subscriptions-acknowledge.md' + - 'Subscriptions Cancel': 'purchases_subscriptions-cancel.md' + - 'Subscriptions Defer': 'purchases_subscriptions-defer.md' + - 'Subscriptions Get': 'purchases_subscriptions-get.md' + - 'Subscriptions Refund': 'purchases_subscriptions-refund.md' + - 'Subscriptions Revoke': 'purchases_subscriptions-revoke.md' + - 'Subscriptionsv2 Get': 'purchases_subscriptionsv2-get.md' + - 'Voidedpurchases List': 'purchases_voidedpurchases-list.md' +- 'Reviews': + - 'Get': 'reviews_get.md' + - 'List': 'reviews_list.md' + - 'Reply': 'reviews_reply.md' +- 'Systemapks': + - 'Variants Create': 'systemapks_variants-create.md' + - 'Variants Download': 'systemapks_variants-download.md' + - 'Variants Get': 'systemapks_variants-get.md' + - 'Variants List': 'systemapks_variants-list.md' +- 'Users': + - 'Create': 'users_create.md' + - 'Delete': 'users_delete.md' + - 'List': 'users_list.md' + - 'Patch': 'users_patch.md' theme: readthedocs diff --git a/gen/androidpublisher3-cli/src/client.rs b/gen/androidpublisher3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/androidpublisher3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/androidpublisher3-cli/src/main.rs b/gen/androidpublisher3-cli/src/main.rs index eec74b2072..df8631a150 100644 --- a/gen/androidpublisher3-cli/src/main.rs +++ b/gen/androidpublisher3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_androidpublisher3::{api, Error, oauth2}; +use google_androidpublisher3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,206 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _applications_device_tier_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "device-tier-config-id" => Some(("deviceTierConfigId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["device-tier-config-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DeviceTierConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.applications().device_tier_configs_create(request, opt.value_of("package-name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "allow-unknown-devices" => { + call = call.allow_unknown_devices( value.map(|v| arg_from_str(v, err, "allow-unknown-devices", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-unknown-devices"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _applications_device_tier_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.applications().device_tier_configs_get(opt.value_of("package-name").unwrap_or(""), opt.value_of("device-tier-config-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _applications_device_tier_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.applications().device_tier_configs_list(opt.value_of("package-name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _edits_apks_addexternallyhosted(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -314,8 +513,11 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "device-tier-config-id" => { + call = call.device_tier_config_id(value.unwrap_or("")); + }, "ack-bundle-installation-warning" => { - call = call.ack_bundle_installation_warning(arg_from_str(value.unwrap_or("false"), err, "ack-bundle-installation-warning", "boolean")); + call = call.ack_bundle_installation_warning( value.map(|v| arg_from_str(v, err, "ack-bundle-installation-warning", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -330,7 +532,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["ack-bundle-installation-warning"].iter().map(|v|*v)); + v.extend(["ack-bundle-installation-warning", "device-tier-config-id"].iter().map(|v|*v)); v } )); } } @@ -374,7 +576,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "changes-not-sent-for-review" => { - call = call.changes_not_sent_for_review(arg_from_str(value.unwrap_or("false"), err, "changes-not-sent-for-review", "boolean")); + call = call.changes_not_sent_for_review( value.map(|v| arg_from_str(v, err, "changes-not-sent-for-review", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2630,7 +2832,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2826,7 +3028,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "auto-convert-missing-prices" => { - call = call.auto_convert_missing_prices(arg_from_str(value.unwrap_or("false"), err, "auto-convert-missing-prices", "boolean")); + call = call.auto_convert_missing_prices( value.map(|v| arg_from_str(v, err, "auto-convert-missing-prices", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2885,10 +3087,10 @@ where call = call.token(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2988,7 +3190,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "auto-convert-missing-prices" => { - call = call.auto_convert_missing_prices(arg_from_str(value.unwrap_or("false"), err, "auto-convert-missing-prices", "boolean")); + call = call.auto_convert_missing_prices( value.map(|v| arg_from_str(v, err, "auto-convert-missing-prices", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3088,10 +3290,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "auto-convert-missing-prices" => { - call = call.auto_convert_missing_prices(arg_from_str(value.unwrap_or("false"), err, "auto-convert-missing-prices", "boolean")); + call = call.auto_convert_missing_prices( value.map(|v| arg_from_str(v, err, "auto-convert-missing-prices", "boolean")).unwrap_or(false)); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3337,6 +3539,1260 @@ where } } + async fn _monetization_subscriptions_archive(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ArchiveSubscriptionRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_archive(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_activate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ActivateBasePlanRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_base_plans_activate(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_deactivate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DeactivateBasePlanRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_base_plans_deactivate(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.monetization().subscriptions_base_plans_delete(opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok(mut response) => { + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_migrate_prices(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "regions-version.version" => Some(("regionsVersion.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["regions-version", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::MigrateBasePlanPricesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_base_plans_migrate_prices(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_offers_activate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ActivateSubscriptionOfferRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_base_plans_offers_activate(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or(""), opt.value_of("offer-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_offers_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "base-plan-id" => Some(("basePlanId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "offer-id" => Some(("offerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "other-regions-config.other-regions-new-subscriber-availability" => Some(("otherRegionsConfig.otherRegionsNewSubscriberAvailability", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package-name" => Some(("packageName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "product-id" => Some(("productId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "targeting.acquisition-rule.scope.specific-subscription-in-app" => Some(("targeting.acquisitionRule.scope.specificSubscriptionInApp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "targeting.upgrade-rule.billing-period-duration" => Some(("targeting.upgradeRule.billingPeriodDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "targeting.upgrade-rule.once-per-user" => Some(("targeting.upgradeRule.oncePerUser", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "targeting.upgrade-rule.scope.specific-subscription-in-app" => Some(("targeting.upgradeRule.scope.specificSubscriptionInApp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["acquisition-rule", "base-plan-id", "billing-period-duration", "offer-id", "once-per-user", "other-regions-config", "other-regions-new-subscriber-availability", "package-name", "product-id", "scope", "specific-subscription-in-app", "state", "targeting", "upgrade-rule"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SubscriptionOffer = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_base_plans_offers_create(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "regions-version-version" => { + call = call.regions_version_version(value.unwrap_or("")); + }, + "offer-id" => { + call = call.offer_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["offer-id", "regions-version-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_offers_deactivate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DeactivateSubscriptionOfferRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_base_plans_offers_deactivate(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or(""), opt.value_of("offer-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_offers_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.monetization().subscriptions_base_plans_offers_delete(opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or(""), opt.value_of("offer-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok(mut response) => { + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_offers_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.monetization().subscriptions_base_plans_offers_get(opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or(""), opt.value_of("offer-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_offers_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.monetization().subscriptions_base_plans_offers_list(opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_base_plans_offers_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "base-plan-id" => Some(("basePlanId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "offer-id" => Some(("offerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "other-regions-config.other-regions-new-subscriber-availability" => Some(("otherRegionsConfig.otherRegionsNewSubscriberAvailability", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package-name" => Some(("packageName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "product-id" => Some(("productId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "targeting.acquisition-rule.scope.specific-subscription-in-app" => Some(("targeting.acquisitionRule.scope.specificSubscriptionInApp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "targeting.upgrade-rule.billing-period-duration" => Some(("targeting.upgradeRule.billingPeriodDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "targeting.upgrade-rule.once-per-user" => Some(("targeting.upgradeRule.oncePerUser", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "targeting.upgrade-rule.scope.specific-subscription-in-app" => Some(("targeting.upgradeRule.scope.specificSubscriptionInApp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["acquisition-rule", "base-plan-id", "billing-period-duration", "offer-id", "once-per-user", "other-regions-config", "other-regions-new-subscriber-availability", "package-name", "product-id", "scope", "specific-subscription-in-app", "state", "targeting", "upgrade-rule"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SubscriptionOffer = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_base_plans_offers_patch(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or(""), opt.value_of("base-plan-id").unwrap_or(""), opt.value_of("offer-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "regions-version-version" => { + call = call.regions_version_version(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["regions-version-version", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "archived" => Some(("archived", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package-name" => Some(("packageName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "product-id" => Some(("productId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tax-and-compliance-settings.eea-withdrawal-right-type" => Some(("taxAndComplianceSettings.eeaWithdrawalRightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["archived", "eea-withdrawal-right-type", "package-name", "product-id", "tax-and-compliance-settings"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Subscription = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_create(request, opt.value_of("package-name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "regions-version-version" => { + call = call.regions_version_version(value.unwrap_or("")); + }, + "product-id" => { + call = call.product_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["product-id", "regions-version-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.monetization().subscriptions_delete(opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok(mut response) => { + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.monetization().subscriptions_get(opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.monetization().subscriptions_list(opt.value_of("package-name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "show-archived" => { + call = call.show_archived( value.map(|v| arg_from_str(v, err, "show-archived", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-archived"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _monetization_subscriptions_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "archived" => Some(("archived", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package-name" => Some(("packageName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "product-id" => Some(("productId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tax-and-compliance-settings.eea-withdrawal-right-type" => Some(("taxAndComplianceSettings.eeaWithdrawalRightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["archived", "eea-withdrawal-right-type", "package-name", "product-id", "tax-and-compliance-settings"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Subscription = json::value::from_value(object).unwrap(); + let mut call = self.hub.monetization().subscriptions_patch(request, opt.value_of("package-name").unwrap_or(""), opt.value_of("product-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "regions-version-version" => { + call = call.regions_version_version(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["regions-version-version", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _orders_refund(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.orders().refund(opt.value_of("package-name").unwrap_or(""), opt.value_of("order-id").unwrap_or("")); @@ -3344,7 +4800,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "revoke" => { - call = call.revoke(arg_from_str(value.unwrap_or("false"), err, "revoke", "boolean")); + call = call.revoke( value.map(|v| arg_from_str(v, err, "revoke", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3861,6 +5317,58 @@ where } } + async fn _purchases_subscriptionsv2_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.purchases().subscriptionsv2_get(opt.value_of("package-name").unwrap_or(""), opt.value_of("token").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _purchases_voidedpurchases_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.purchases().voidedpurchases_list(opt.value_of("package-name").unwrap_or("")); @@ -3868,22 +5376,22 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "type" => { - call = call.type_(arg_from_str(value.unwrap_or("-0"), err, "type", "integer")); + call = call.type_( value.map(|v| arg_from_str(v, err, "type", "int32")).unwrap_or(-0)); }, "token" => { call = call.token(value.unwrap_or("")); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "int64")).unwrap_or(-0)); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "end-time" => { - call = call.end_time(value.unwrap_or("")); + call = call.end_time( value.map(|v| arg_from_str(v, err, "end-time", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4001,10 +5509,10 @@ where call = call.token(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -4534,7 +6042,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4628,7 +6136,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4682,6 +6190,23 @@ where let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option = None; match self.opt.subcommand() { + ("applications", Some(opt)) => { + match opt.subcommand() { + ("device-tier-configs-create", Some(opt)) => { + call_result = self._applications_device_tier_configs_create(opt, dry_run, &mut err).await; + }, + ("device-tier-configs-get", Some(opt)) => { + call_result = self._applications_device_tier_configs_get(opt, dry_run, &mut err).await; + }, + ("device-tier-configs-list", Some(opt)) => { + call_result = self._applications_device_tier_configs_list(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("applications".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("edits", Some(opt)) => { match opt.subcommand() { ("apks-addexternallyhosted", Some(opt)) => { @@ -4874,6 +6399,57 @@ where ("convert-region-prices", Some(opt)) => { call_result = self._monetization_convert_region_prices(opt, dry_run, &mut err).await; }, + ("subscriptions-archive", Some(opt)) => { + call_result = self._monetization_subscriptions_archive(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-activate", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_activate(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-deactivate", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_deactivate(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-delete", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_delete(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-migrate-prices", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_migrate_prices(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-offers-activate", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_offers_activate(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-offers-create", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_offers_create(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-offers-deactivate", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_offers_deactivate(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-offers-delete", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_offers_delete(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-offers-get", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_offers_get(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-offers-list", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_offers_list(opt, dry_run, &mut err).await; + }, + ("subscriptions-base-plans-offers-patch", Some(opt)) => { + call_result = self._monetization_subscriptions_base_plans_offers_patch(opt, dry_run, &mut err).await; + }, + ("subscriptions-create", Some(opt)) => { + call_result = self._monetization_subscriptions_create(opt, dry_run, &mut err).await; + }, + ("subscriptions-delete", Some(opt)) => { + call_result = self._monetization_subscriptions_delete(opt, dry_run, &mut err).await; + }, + ("subscriptions-get", Some(opt)) => { + call_result = self._monetization_subscriptions_get(opt, dry_run, &mut err).await; + }, + ("subscriptions-list", Some(opt)) => { + call_result = self._monetization_subscriptions_list(opt, dry_run, &mut err).await; + }, + ("subscriptions-patch", Some(opt)) => { + call_result = self._monetization_subscriptions_patch(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("monetization".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -4917,6 +6493,9 @@ where ("subscriptions-revoke", Some(opt)) => { call_result = self._purchases_subscriptions_revoke(opt, dry_run, &mut err).await; }, + ("subscriptionsv2-get", Some(opt)) => { + call_result = self._purchases_subscriptionsv2_get(opt, dry_run, &mut err).await; + }, ("voidedpurchases-list", Some(opt)) => { call_result = self._purchases_voidedpurchases_list(opt, dry_run, &mut err).await; }, @@ -5057,6 +6636,87 @@ async fn main() { let mut exit_status = 0i32; let upload_value_names = ["mode", "file"]; let arg_data = [ + ("applications", "methods: 'device-tier-configs-create', 'device-tier-configs-get' and 'device-tier-configs-list'", vec![ + ("device-tier-configs-create", + Some(r##"Creates a new device tier config for an app."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/applications_device-tier-configs-create", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Package name of the app."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("device-tier-configs-get", + Some(r##"Returns a particular device tier config."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/applications_device-tier-configs-get", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Package name of the app."##), + Some(true), + Some(false)), + + (Some(r##"device-tier-config-id"##), + None, + Some(r##"Required. Id of an existing device tier config."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("device-tier-configs-list", + Some(r##"Returns created device tier configs, ordered by descending creation time."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/applications_device-tier-configs-list", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Package name of the app."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("edits", "methods: 'apks-addexternallyhosted', 'apks-list', 'apks-upload', 'bundles-list', 'bundles-upload', 'commit', 'countryavailability-get', 'delete', 'deobfuscationfiles-upload', 'details-get', 'details-patch', 'details-update', 'expansionfiles-get', 'expansionfiles-patch', 'expansionfiles-update', 'expansionfiles-upload', 'get', 'images-delete', 'images-deleteall', 'images-list', 'images-upload', 'insert', 'listings-delete', 'listings-deleteall', 'listings-get', 'listings-list', 'listings-patch', 'listings-update', 'testers-get', 'testers-patch', 'testers-update', 'tracks-get', 'tracks-list', 'tracks-patch', 'tracks-update' and 'validate'", vec![ ("apks-addexternallyhosted", Some(r##"Creates a new APK without uploading the APK itself to Google Play, instead hosting the APK at a specified URL. This function is only available to organizations using Managed Play whose application is configured to restrict distribution to the organizations."##), @@ -6035,7 +7695,7 @@ async fn main() { Some(false)), ]), ("testers-get", - Some(r##"Gets testers."##), + Some(r##"Gets testers. Note: Testers resource does not support email lists."##), "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/edits_testers-get", vec![ (Some(r##"package-name"##), @@ -6069,7 +7729,7 @@ async fn main() { Some(false)), ]), ("testers-patch", - Some(r##"Patches testers."##), + Some(r##"Patches testers. Note: Testers resource does not support email lists."##), "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/edits_testers-patch", vec![ (Some(r##"package-name"##), @@ -6109,7 +7769,7 @@ async fn main() { Some(false)), ]), ("testers-update", - Some(r##"Updates testers."##), + Some(r##"Updates testers. Note: Testers resource does not support email lists."##), "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/edits_testers-update", vec![ (Some(r##"package-name"##), @@ -6436,7 +8096,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Resource name for this grant, following the pattern "developers/{developer}/users/{email}/grants/{package_name}"."##), + Some(r##"Required. Resource name for this grant, following the pattern "developers/{developer}/users/{email}/grants/{package_name}". If this grant is for a draft app, the app ID will be used in this resource name instead of the package name."##), Some(true), Some(false)), @@ -6690,7 +8350,7 @@ async fn main() { ]), ]), - ("monetization", "methods: 'convert-region-prices'", vec![ + ("monetization", "methods: 'convert-region-prices', 'subscriptions-archive', 'subscriptions-base-plans-activate', 'subscriptions-base-plans-deactivate', 'subscriptions-base-plans-delete', 'subscriptions-base-plans-migrate-prices', 'subscriptions-base-plans-offers-activate', 'subscriptions-base-plans-offers-create', 'subscriptions-base-plans-offers-deactivate', 'subscriptions-base-plans-offers-delete', 'subscriptions-base-plans-offers-get', 'subscriptions-base-plans-offers-list', 'subscriptions-base-plans-offers-patch', 'subscriptions-create', 'subscriptions-delete', 'subscriptions-get', 'subscriptions-list' and 'subscriptions-patch'", vec![ ("convert-region-prices", Some(r##"Calculates the region prices, using today's exchange rate and country-specific pricing patterns, based on the price in the request for a set of regions."##), "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_convert-region-prices", @@ -6713,6 +8373,608 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-archive", + Some(r##"Archives a subscription. Can only be done if at least one base plan was active in the past, and no base plan is available for new or existing subscribers currently. This action is irreversible, and the subscription ID will remain reserved."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-archive", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the app of the subscription to delete."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The unique product ID of the subscription to delete."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-activate", + Some(r##"Activates a base plan. Once activated, base plans will be available to new subscribers."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-activate", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the base plan to activate."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) of the base plan to activate."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The unique base plan ID of the base plan to activate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-deactivate", + Some(r##"Deactivates a base plan. Once deactivated, the base plan will become unavailable to new subscribers, but existing subscribers will maintain their subscription"##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-deactivate", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the base plan to deactivate."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) of the base plan to deactivate."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The unique base plan ID of the base plan to deactivate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-delete", + Some(r##"Deletes a base plan. Can only be done for draft base plans. This action is irreversible."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-delete", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the base plan to delete."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) of the base plan to delete."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The unique offer ID of the base plan to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + ]), + ("subscriptions-base-plans-migrate-prices", + Some(r##"Migrates subscribers who are receiving an historical subscription price to the currently-offered price for the specified region. Requests will cause price change notifications to be sent to users who are currently receiving an historical price older than the supplied timestamp. Subscribers who do not agree to the new price will have their subscription ended at the next renewal."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-migrate-prices", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. Package name of the parent app. Must be equal to the package_name field on the Subscription resource."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The ID of the subscription to update. Must be equal to the product_id field on the Subscription resource."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The unique base plan ID of the base plan to update prices on."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-offers-activate", + Some(r##"Activates a subscription offer. Once activated, subscription offers will be available to new subscribers."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-offers-activate", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the offer to activate."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) of the offer to activate."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The parent base plan (ID) of the offer to activate."##), + Some(true), + Some(false)), + + (Some(r##"offer-id"##), + None, + Some(r##"Required. The unique offer ID of the offer to activate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-offers-create", + Some(r##"Creates a new subscription offer. Only auto-renewing base plans can have subscription offers. The offer state will be DRAFT until it is activated."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-offers-create", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) for which the offer should be created. Must be equal to the package_name field on the Subscription resource."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) for which the offer should be created. Must be equal to the product_id field on the SubscriptionOffer resource."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The parent base plan (ID) for which the offer should be created. Must be equal to the base_plan_id field on the SubscriptionOffer resource."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-offers-deactivate", + Some(r##"Deactivates a subscription offer. Once deactivated, existing subscribers will maintain their subscription, but the offer will become unavailable to new subscribers."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-offers-deactivate", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the offer to deactivate."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) of the offer to deactivate."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The parent base plan (ID) of the offer to deactivate."##), + Some(true), + Some(false)), + + (Some(r##"offer-id"##), + None, + Some(r##"Required. The unique offer ID of the offer to deactivate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-offers-delete", + Some(r##"Deletes a subscription offer. Can only be done for draft offers. This action is irreversible."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-offers-delete", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the offer to delete."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) of the offer to delete."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The parent base plan (ID) of the offer to delete."##), + Some(true), + Some(false)), + + (Some(r##"offer-id"##), + None, + Some(r##"Required. The unique offer ID of the offer to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + ]), + ("subscriptions-base-plans-offers-get", + Some(r##"Reads a single offer"##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-offers-get", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the offer to get."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) of the offer to get."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The parent base plan (ID) of the offer to get."##), + Some(true), + Some(false)), + + (Some(r##"offer-id"##), + None, + Some(r##"Required. The unique offer ID of the offer to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-offers-list", + Some(r##"Lists all offers under a given subscription."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-offers-list", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) for which the subscriptions should be read."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The parent subscription (ID) for which the offers should be read."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. The parent base plan (ID) for which the offers should be read. May be specified as '-' to read all offers under a subscription."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-base-plans-offers-patch", + Some(r##"Updates an existing subscription offer."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-base-plans-offers-patch", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. Immutable. The package name of the app the parent subscription belongs to."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. Immutable. The ID of the parent subscription this offer belongs to."##), + Some(true), + Some(false)), + + (Some(r##"base-plan-id"##), + None, + Some(r##"Required. Immutable. The ID of the base plan to which this offer is an extension."##), + Some(true), + Some(false)), + + (Some(r##"offer-id"##), + None, + Some(r##"Required. Immutable. Unique ID of this subscription offer. Must be unique within the base plan."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-create", + Some(r##"Creates a new subscription. Newly added base plans will remain in draft state until activated."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-create", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) for which the subscription should be created. Must be equal to the package_name field on the Subscription resource."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-delete", + Some(r##"Deletes a subscription. A subscription can only be deleted if it has never had a base plan published."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-delete", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the app of the subscription to delete."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The unique product ID of the subscription to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + ]), + ("subscriptions-get", + Some(r##"Reads a single subscription."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-get", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) of the subscription to get."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Required. The unique product ID of the subscription to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-list", + Some(r##"Lists all subscriptions under a given app."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-list", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Required. The parent app (package name) for which the subscriptions should be read."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("subscriptions-patch", + Some(r##"Updates an existing subscription."##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/monetization_subscriptions-patch", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"Immutable. Package name of the parent app."##), + Some(true), + Some(false)), + + (Some(r##"product-id"##), + None, + Some(r##"Immutable. Unique product ID of the product. Unique within the parent app. Product IDs must be composed of lower-case letters (a-z), numbers (0-9), underscores (_) and dots (.). It must start with a lower-case letter or number, and be between 1 and 40 (inclusive) characters in length."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -6746,7 +9008,7 @@ async fn main() { ]), ]), - ("purchases", "methods: 'products-acknowledge', 'products-get', 'subscriptions-acknowledge', 'subscriptions-cancel', 'subscriptions-defer', 'subscriptions-get', 'subscriptions-refund', 'subscriptions-revoke' and 'voidedpurchases-list'", vec![ + ("purchases", "methods: 'products-acknowledge', 'products-get', 'subscriptions-acknowledge', 'subscriptions-cancel', 'subscriptions-defer', 'subscriptions-get', 'subscriptions-refund', 'subscriptions-revoke', 'subscriptionsv2-get' and 'voidedpurchases-list'", vec![ ("products-acknowledge", Some(r##"Acknowledges a purchase of an inapp item."##), "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/purchases_products-acknowledge", @@ -7007,6 +9269,34 @@ async fn main() { Some(false), Some(true)), ]), + ("subscriptionsv2-get", + Some(r##"Get metadata about a subscription"##), + "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/purchases_subscriptionsv2-get", + vec![ + (Some(r##"package-name"##), + None, + Some(r##"The package of the application for which this subscription was purchased (for example, 'com.some.thing')."##), + Some(true), + Some(false)), + + (Some(r##"token"##), + None, + Some(r##"Required. The token provided to the user's device when the subscription was purchased."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("voidedpurchases-list", Some(r##"Lists the purchases that were canceled, refunded or charged-back."##), "Details at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli/purchases_voidedpurchases-list", @@ -7352,8 +9642,8 @@ async fn main() { let mut app = App::new("androidpublisher3") .author("Sebastian Thiel ") - .version("4.0.1+20220307") - .about("Lets Android application developers access their Google Play accounts.") + .version("5.0.2+20230124") + .about("Lets Android application developers access their Google Play accounts. At a high level, the expected workflow is to \"insert\" an Edit, make changes as necessary, and then \"commit\" it. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_androidpublisher3_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/androidpublisher3/Cargo.toml b/gen/androidpublisher3/Cargo.toml index 3b27273182..dd9ff28730 100644 --- a/gen/androidpublisher3/Cargo.toml +++ b/gen/androidpublisher3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-androidpublisher3" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Android Publisher (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/androidpublisher3" homepage = "https://developers.google.com/android-publisher" -documentation = "https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-androidpublisher3/5.0.2+20230124" license = "MIT" keywords = ["androidpublisher", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/androidpublisher3/README.md b/gen/androidpublisher3/README.md index 9ab98530d0..56ea7e7198 100644 --- a/gen/androidpublisher3/README.md +++ b/gen/androidpublisher3/README.md @@ -5,54 +5,54 @@ DO NOT EDIT ! --> The `google-androidpublisher3` library allows access to all features of the *Google Android Publisher* service. -This documentation was generated from *Android Publisher* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *androidpublisher:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Android Publisher* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *androidpublisher:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Android Publisher* *v3* API can be found at the [official documentation site](https://developers.google.com/android-publisher). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/AndroidPublisher) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/AndroidPublisher) ... * applications - * [*device tier configs create*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::ApplicationDeviceTierConfigCreateCall), [*device tier configs get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::ApplicationDeviceTierConfigGetCall) and [*device tier configs list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::ApplicationDeviceTierConfigListCall) + * [*device tier configs create*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::ApplicationDeviceTierConfigCreateCall), [*device tier configs get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::ApplicationDeviceTierConfigGetCall) and [*device tier configs list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::ApplicationDeviceTierConfigListCall) * edits - * [*apks addexternallyhosted*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditApkAddexternallyhostedCall), [*apks list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditApkListCall), [*apks upload*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditApkUploadCall), [*bundles list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditBundleListCall), [*bundles upload*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditBundleUploadCall), [*commit*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditCommitCall), [*countryavailability get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditCountryavailabilityGetCall), [*delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditDeleteCall), [*deobfuscationfiles upload*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditDeobfuscationfileUploadCall), [*details get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditDetailGetCall), [*details patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditDetailPatchCall), [*details update*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditDetailUpdateCall), [*expansionfiles get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditExpansionfileGetCall), [*expansionfiles patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditExpansionfilePatchCall), [*expansionfiles update*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditExpansionfileUpdateCall), [*expansionfiles upload*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditExpansionfileUploadCall), [*get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditGetCall), [*images delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditImageDeleteCall), [*images deleteall*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditImageDeleteallCall), [*images list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditImageListCall), [*images upload*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditImageUploadCall), [*insert*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditInsertCall), [*listings delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditListingDeleteCall), [*listings deleteall*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditListingDeleteallCall), [*listings get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditListingGetCall), [*listings list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditListingListCall), [*listings patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditListingPatchCall), [*listings update*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditListingUpdateCall), [*testers get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditTesterGetCall), [*testers patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditTesterPatchCall), [*testers update*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditTesterUpdateCall), [*tracks get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditTrackGetCall), [*tracks list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditTrackListCall), [*tracks patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditTrackPatchCall), [*tracks update*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditTrackUpdateCall) and [*validate*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditValidateCall) + * [*apks addexternallyhosted*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditApkAddexternallyhostedCall), [*apks list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditApkListCall), [*apks upload*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditApkUploadCall), [*bundles list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditBundleListCall), [*bundles upload*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditBundleUploadCall), [*commit*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditCommitCall), [*countryavailability get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditCountryavailabilityGetCall), [*delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditDeleteCall), [*deobfuscationfiles upload*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditDeobfuscationfileUploadCall), [*details get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditDetailGetCall), [*details patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditDetailPatchCall), [*details update*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditDetailUpdateCall), [*expansionfiles get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditExpansionfileGetCall), [*expansionfiles patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditExpansionfilePatchCall), [*expansionfiles update*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditExpansionfileUpdateCall), [*expansionfiles upload*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditExpansionfileUploadCall), [*get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditGetCall), [*images delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditImageDeleteCall), [*images deleteall*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditImageDeleteallCall), [*images list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditImageListCall), [*images upload*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditImageUploadCall), [*insert*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditInsertCall), [*listings delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditListingDeleteCall), [*listings deleteall*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditListingDeleteallCall), [*listings get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditListingGetCall), [*listings list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditListingListCall), [*listings patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditListingPatchCall), [*listings update*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditListingUpdateCall), [*testers get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditTesterGetCall), [*testers patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditTesterPatchCall), [*testers update*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditTesterUpdateCall), [*tracks get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditTrackGetCall), [*tracks list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditTrackListCall), [*tracks patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditTrackPatchCall), [*tracks update*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditTrackUpdateCall) and [*validate*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditValidateCall) * generatedapks - * [*download*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::GeneratedapkDownloadCall) and [*list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::GeneratedapkListCall) -* [grants](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::Grant) - * [*create*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::GrantCreateCall), [*delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::GrantDeleteCall) and [*patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::GrantPatchCall) + * [*download*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::GeneratedapkDownloadCall) and [*list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::GeneratedapkListCall) +* [grants](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::Grant) + * [*create*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::GrantCreateCall), [*delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::GrantDeleteCall) and [*patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::GrantPatchCall) * inappproducts - * [*delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InappproductDeleteCall), [*get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InappproductGetCall), [*insert*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InappproductInsertCall), [*list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InappproductListCall), [*patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InappproductPatchCall) and [*update*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InappproductUpdateCall) + * [*delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InappproductDeleteCall), [*get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InappproductGetCall), [*insert*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InappproductInsertCall), [*list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InappproductListCall), [*patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InappproductPatchCall) and [*update*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InappproductUpdateCall) * internalappsharingartifacts - * [*uploadapk*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadapkCall) and [*uploadbundle*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadbundleCall) + * [*uploadapk*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadapkCall) and [*uploadbundle*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadbundleCall) * monetization - * [*convert region prices*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationConvertRegionPriceCall), [*subscriptions archive*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionArchiveCall), [*subscriptions base plans activate*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanActivateCall), [*subscriptions base plans deactivate*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanDeactivateCall), [*subscriptions base plans delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanDeleteCall), [*subscriptions base plans migrate prices*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanMigratePriceCall), [*subscriptions base plans offers activate*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferActivateCall), [*subscriptions base plans offers create*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferCreateCall), [*subscriptions base plans offers deactivate*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferDeactivateCall), [*subscriptions base plans offers delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferDeleteCall), [*subscriptions base plans offers get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferGetCall), [*subscriptions base plans offers list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferListCall), [*subscriptions base plans offers patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferPatchCall), [*subscriptions create*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionCreateCall), [*subscriptions delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionDeleteCall), [*subscriptions get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionGetCall), [*subscriptions list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionListCall) and [*subscriptions patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::MonetizationSubscriptionPatchCall) + * [*convert region prices*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationConvertRegionPriceCall), [*subscriptions archive*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionArchiveCall), [*subscriptions base plans activate*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanActivateCall), [*subscriptions base plans deactivate*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanDeactivateCall), [*subscriptions base plans delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanDeleteCall), [*subscriptions base plans migrate prices*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanMigratePriceCall), [*subscriptions base plans offers activate*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferActivateCall), [*subscriptions base plans offers create*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferCreateCall), [*subscriptions base plans offers deactivate*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferDeactivateCall), [*subscriptions base plans offers delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferDeleteCall), [*subscriptions base plans offers get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferGetCall), [*subscriptions base plans offers list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferListCall), [*subscriptions base plans offers patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionBasePlanOfferPatchCall), [*subscriptions create*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionCreateCall), [*subscriptions delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionDeleteCall), [*subscriptions get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionGetCall), [*subscriptions list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionListCall) and [*subscriptions patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::MonetizationSubscriptionPatchCall) * orders - * [*refund*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::OrderRefundCall) + * [*refund*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::OrderRefundCall) * purchases - * [*products acknowledge*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseProductAcknowledgeCall), [*products get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseProductGetCall), [*subscriptions acknowledge*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseSubscriptionAcknowledgeCall), [*subscriptions cancel*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseSubscriptionCancelCall), [*subscriptions defer*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseSubscriptionDeferCall), [*subscriptions get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseSubscriptionGetCall), [*subscriptions refund*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseSubscriptionRefundCall), [*subscriptions revoke*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseSubscriptionRevokeCall), [*subscriptionsv2 get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseSubscriptionsv2GetCall) and [*voidedpurchases list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::PurchaseVoidedpurchaseListCall) -* [reviews](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::Review) - * [*get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::ReviewGetCall), [*list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::ReviewListCall) and [*reply*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::ReviewReplyCall) + * [*products acknowledge*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseProductAcknowledgeCall), [*products get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseProductGetCall), [*subscriptions acknowledge*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseSubscriptionAcknowledgeCall), [*subscriptions cancel*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseSubscriptionCancelCall), [*subscriptions defer*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseSubscriptionDeferCall), [*subscriptions get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseSubscriptionGetCall), [*subscriptions refund*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseSubscriptionRefundCall), [*subscriptions revoke*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseSubscriptionRevokeCall), [*subscriptionsv2 get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseSubscriptionsv2GetCall) and [*voidedpurchases list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::PurchaseVoidedpurchaseListCall) +* [reviews](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::Review) + * [*get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::ReviewGetCall), [*list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::ReviewListCall) and [*reply*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::ReviewReplyCall) * systemapks - * [*variants create*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::SystemapkVariantCreateCall), [*variants download*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::SystemapkVariantDownloadCall), [*variants get*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::SystemapkVariantGetCall) and [*variants list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::SystemapkVariantListCall) -* [users](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::User) - * [*create*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::UserCreateCall), [*delete*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::UserDeleteCall), [*list*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::UserListCall) and [*patch*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::UserPatchCall) + * [*variants create*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::SystemapkVariantCreateCall), [*variants download*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::SystemapkVariantDownloadCall), [*variants get*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::SystemapkVariantGetCall) and [*variants list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::SystemapkVariantListCall) +* [users](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::User) + * [*create*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::UserCreateCall), [*delete*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::UserDeleteCall), [*list*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::UserListCall) and [*patch*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::UserPatchCall) Upload supported by ... -* [*apks upload edits*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditApkUploadCall) -* [*bundles upload edits*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditBundleUploadCall) -* [*deobfuscationfiles upload edits*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditDeobfuscationfileUploadCall) -* [*expansionfiles upload edits*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditExpansionfileUploadCall) -* [*images upload edits*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::EditImageUploadCall) -* [*uploadapk internalappsharingartifacts*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadapkCall) -* [*uploadbundle internalappsharingartifacts*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadbundleCall) +* [*apks upload edits*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditApkUploadCall) +* [*bundles upload edits*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditBundleUploadCall) +* [*deobfuscationfiles upload edits*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditDeobfuscationfileUploadCall) +* [*expansionfiles upload edits*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditExpansionfileUploadCall) +* [*images upload edits*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::EditImageUploadCall) +* [*uploadapk internalappsharingartifacts*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadapkCall) +* [*uploadbundle internalappsharingartifacts*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::InternalappsharingartifactUploadbundleCall) Download supported by ... -* [*download generatedapks*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::GeneratedapkDownloadCall) -* [*variants download systemapks*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/api::SystemapkVariantDownloadCall) +* [*download generatedapks*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::GeneratedapkDownloadCall) +* [*variants download systemapks*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/api::SystemapkVariantDownloadCall) @@ -60,17 +60,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/AndroidPublisher)** +* **[Hub](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/AndroidPublisher)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::CallBuilder) -* **[Resources](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::CallBuilder) +* **[Resources](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Part)** + * **[Parts](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -168,17 +168,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -188,29 +188,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Delegate) to the -[Method Builder](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Delegate) to the +[Method Builder](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::RequestValue) and -[decodable](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::RequestValue) and +[decodable](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-androidpublisher3/5.0.2-beta-1+20230124/google_androidpublisher3/client::RequestValue) are moved +* [request values](https://docs.rs/google-androidpublisher3/5.0.2+20230124/google_androidpublisher3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/androidpublisher3/src/api.rs b/gen/androidpublisher3/src/api.rs index 9055448cdc..4cfb95f35f 100644 --- a/gen/androidpublisher3/src/api.rs +++ b/gen/androidpublisher3/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> AndroidPublisher { AndroidPublisher { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://androidpublisher.googleapis.com/".to_string(), _root_url: "https://androidpublisher.googleapis.com/".to_string(), } @@ -171,7 +171,7 @@ impl<'a, S> AndroidPublisher { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/androidpublisher3/src/client.rs b/gen/androidpublisher3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/androidpublisher3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/androidpublisher3/src/lib.rs b/gen/androidpublisher3/src/lib.rs index 5c3ef41503..ab4bdf3c99 100644 --- a/gen/androidpublisher3/src/lib.rs +++ b/gen/androidpublisher3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Android Publisher* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *androidpublisher:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Android Publisher* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *androidpublisher:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Android Publisher* *v3* API can be found at the //! [official documentation site](https://developers.google.com/android-publisher). diff --git a/gen/apigateway1-cli/Cargo.toml b/gen/apigateway1-cli/Cargo.toml index 73e62f4f12..2ea86666cb 100644 --- a/gen/apigateway1-cli/Cargo.toml +++ b/gen/apigateway1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-apigateway1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with apigateway (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/apigateway1-cli" @@ -20,13 +20,13 @@ name = "apigateway1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-apigateway1] path = "../apigateway1" -version = "4.0.1+20220223" +version = "5.0.2+20221213" + diff --git a/gen/apigateway1-cli/README.md b/gen/apigateway1-cli/README.md index 94f0a302ec..51a1daf0db 100644 --- a/gen/apigateway1-cli/README.md +++ b/gen/apigateway1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *apigateway* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *apigateway* API at revision *20221213*. The CLI is at version *5.0.2*. ```bash apigateway1 [options] diff --git a/gen/apigateway1-cli/mkdocs.yml b/gen/apigateway1-cli/mkdocs.yml index bcc6924ac4..66bc445bff 100644 --- a/gen/apigateway1-cli/mkdocs.yml +++ b/gen/apigateway1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: apigateway v4.0.1+20220223 +site_name: apigateway v5.0.2+20221213 site_url: http://byron.github.io/google-apis-rs/google-apigateway1-cli site_description: A complete library to interact with apigateway (protocol v1) @@ -7,38 +7,39 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/apigateway1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-apis-configs-create.md', 'Projects', 'Locations Apis Configs Create'] -- ['projects_locations-apis-configs-delete.md', 'Projects', 'Locations Apis Configs Delete'] -- ['projects_locations-apis-configs-get.md', 'Projects', 'Locations Apis Configs Get'] -- ['projects_locations-apis-configs-get-iam-policy.md', 'Projects', 'Locations Apis Configs Get Iam Policy'] -- ['projects_locations-apis-configs-list.md', 'Projects', 'Locations Apis Configs List'] -- ['projects_locations-apis-configs-patch.md', 'Projects', 'Locations Apis Configs Patch'] -- ['projects_locations-apis-configs-set-iam-policy.md', 'Projects', 'Locations Apis Configs Set Iam Policy'] -- ['projects_locations-apis-configs-test-iam-permissions.md', 'Projects', 'Locations Apis Configs Test Iam Permissions'] -- ['projects_locations-apis-create.md', 'Projects', 'Locations Apis Create'] -- ['projects_locations-apis-delete.md', 'Projects', 'Locations Apis Delete'] -- ['projects_locations-apis-get.md', 'Projects', 'Locations Apis Get'] -- ['projects_locations-apis-get-iam-policy.md', 'Projects', 'Locations Apis Get Iam Policy'] -- ['projects_locations-apis-list.md', 'Projects', 'Locations Apis List'] -- ['projects_locations-apis-patch.md', 'Projects', 'Locations Apis Patch'] -- ['projects_locations-apis-set-iam-policy.md', 'Projects', 'Locations Apis Set Iam Policy'] -- ['projects_locations-apis-test-iam-permissions.md', 'Projects', 'Locations Apis Test Iam Permissions'] -- ['projects_locations-gateways-create.md', 'Projects', 'Locations Gateways Create'] -- ['projects_locations-gateways-delete.md', 'Projects', 'Locations Gateways Delete'] -- ['projects_locations-gateways-get.md', 'Projects', 'Locations Gateways Get'] -- ['projects_locations-gateways-get-iam-policy.md', 'Projects', 'Locations Gateways Get Iam Policy'] -- ['projects_locations-gateways-list.md', 'Projects', 'Locations Gateways List'] -- ['projects_locations-gateways-patch.md', 'Projects', 'Locations Gateways Patch'] -- ['projects_locations-gateways-set-iam-policy.md', 'Projects', 'Locations Gateways Set Iam Policy'] -- ['projects_locations-gateways-test-iam-permissions.md', 'Projects', 'Locations Gateways Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Apis Configs Create': 'projects_locations-apis-configs-create.md' + - 'Locations Apis Configs Delete': 'projects_locations-apis-configs-delete.md' + - 'Locations Apis Configs Get': 'projects_locations-apis-configs-get.md' + - 'Locations Apis Configs Get Iam Policy': 'projects_locations-apis-configs-get-iam-policy.md' + - 'Locations Apis Configs List': 'projects_locations-apis-configs-list.md' + - 'Locations Apis Configs Patch': 'projects_locations-apis-configs-patch.md' + - 'Locations Apis Configs Set Iam Policy': 'projects_locations-apis-configs-set-iam-policy.md' + - 'Locations Apis Configs Test Iam Permissions': 'projects_locations-apis-configs-test-iam-permissions.md' + - 'Locations Apis Create': 'projects_locations-apis-create.md' + - 'Locations Apis Delete': 'projects_locations-apis-delete.md' + - 'Locations Apis Get': 'projects_locations-apis-get.md' + - 'Locations Apis Get Iam Policy': 'projects_locations-apis-get-iam-policy.md' + - 'Locations Apis List': 'projects_locations-apis-list.md' + - 'Locations Apis Patch': 'projects_locations-apis-patch.md' + - 'Locations Apis Set Iam Policy': 'projects_locations-apis-set-iam-policy.md' + - 'Locations Apis Test Iam Permissions': 'projects_locations-apis-test-iam-permissions.md' + - 'Locations Gateways Create': 'projects_locations-gateways-create.md' + - 'Locations Gateways Delete': 'projects_locations-gateways-delete.md' + - 'Locations Gateways Get': 'projects_locations-gateways-get.md' + - 'Locations Gateways Get Iam Policy': 'projects_locations-gateways-get-iam-policy.md' + - 'Locations Gateways List': 'projects_locations-gateways-list.md' + - 'Locations Gateways Patch': 'projects_locations-gateways-patch.md' + - 'Locations Gateways Set Iam Policy': 'projects_locations-gateways-set-iam-policy.md' + - 'Locations Gateways Test Iam Permissions': 'projects_locations-gateways-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/apigateway1-cli/src/client.rs b/gen/apigateway1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/apigateway1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/apigateway1-cli/src/main.rs b/gen/apigateway1-cli/src/main.rs index 96186f524b..75709fb9e8 100644 --- a/gen/apigateway1-cli/src/main.rs +++ b/gen/apigateway1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_apigateway1::{api, Error, oauth2}; +use google_apigateway1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -262,7 +261,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -321,7 +320,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -423,7 +422,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -850,7 +849,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -909,7 +908,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1010,7 +1009,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1438,7 +1437,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1497,7 +1496,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1599,7 +1598,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1882,7 +1881,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2132,7 +2131,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2439,7 +2438,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2511,7 +2510,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2539,7 +2538,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2639,7 +2638,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2711,7 +2710,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2739,7 +2738,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2839,7 +2838,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2911,7 +2910,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2939,7 +2938,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3105,7 +3104,7 @@ async fn main() { let mut app = App::new("apigateway1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20221213") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_apigateway1_cli") .arg(Arg::with_name("url") diff --git a/gen/apigateway1/Cargo.toml b/gen/apigateway1/Cargo.toml index 7b5f1a5e0b..dbdfadbe65 100644 --- a/gen/apigateway1/Cargo.toml +++ b/gen/apigateway1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-apigateway1" -version = "5.0.2-beta-1+20221213" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with apigateway (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/apigateway1" homepage = "https://cloud.google.com/api-gateway/docs" -documentation = "https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213" +documentation = "https://docs.rs/google-apigateway1/5.0.2+20221213" license = "MIT" keywords = ["apigateway", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/apigateway1/README.md b/gen/apigateway1/README.md index 3106dc8d63..77568246d2 100644 --- a/gen/apigateway1/README.md +++ b/gen/apigateway1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-apigateway1` library allows access to all features of the *Google apigateway* service. -This documentation was generated from *apigateway* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *apigateway:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *apigateway* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *apigateway:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *apigateway* *v1* API can be found at the [official documentation site](https://cloud.google.com/api-gateway/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/Apigateway) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/Apigateway) ... * projects - * [*locations apis configs create*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigCreateCall), [*locations apis configs delete*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigDeleteCall), [*locations apis configs get*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigGetCall), [*locations apis configs get iam policy*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigGetIamPolicyCall), [*locations apis configs list*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigListCall), [*locations apis configs patch*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigPatchCall), [*locations apis configs set iam policy*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigSetIamPolicyCall), [*locations apis configs test iam permissions*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiConfigTestIamPermissionCall), [*locations apis create*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiCreateCall), [*locations apis delete*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiDeleteCall), [*locations apis get*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiGetCall), [*locations apis get iam policy*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiGetIamPolicyCall), [*locations apis list*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiListCall), [*locations apis patch*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiPatchCall), [*locations apis set iam policy*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiSetIamPolicyCall), [*locations apis test iam permissions*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationApiTestIamPermissionCall), [*locations gateways create*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewayCreateCall), [*locations gateways delete*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewayDeleteCall), [*locations gateways get*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewayGetCall), [*locations gateways get iam policy*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewayGetIamPolicyCall), [*locations gateways list*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewayListCall), [*locations gateways patch*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewayPatchCall), [*locations gateways set iam policy*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewaySetIamPolicyCall), [*locations gateways test iam permissions*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGatewayTestIamPermissionCall), [*locations get*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/api::ProjectLocationOperationListCall) + * [*locations apis configs create*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigCreateCall), [*locations apis configs delete*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigDeleteCall), [*locations apis configs get*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigGetCall), [*locations apis configs get iam policy*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigGetIamPolicyCall), [*locations apis configs list*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigListCall), [*locations apis configs patch*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigPatchCall), [*locations apis configs set iam policy*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigSetIamPolicyCall), [*locations apis configs test iam permissions*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiConfigTestIamPermissionCall), [*locations apis create*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiCreateCall), [*locations apis delete*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiDeleteCall), [*locations apis get*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiGetCall), [*locations apis get iam policy*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiGetIamPolicyCall), [*locations apis list*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiListCall), [*locations apis patch*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiPatchCall), [*locations apis set iam policy*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiSetIamPolicyCall), [*locations apis test iam permissions*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationApiTestIamPermissionCall), [*locations gateways create*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewayCreateCall), [*locations gateways delete*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewayDeleteCall), [*locations gateways get*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewayGetCall), [*locations gateways get iam policy*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewayGetIamPolicyCall), [*locations gateways list*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewayListCall), [*locations gateways patch*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewayPatchCall), [*locations gateways set iam policy*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewaySetIamPolicyCall), [*locations gateways test iam permissions*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGatewayTestIamPermissionCall), [*locations get*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/Apigateway)** +* **[Hub](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/Apigateway)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::CallBuilder) -* **[Resources](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::CallBuilder) +* **[Resources](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Part)** + * **[Parts](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Delegate) to the -[Method Builder](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Delegate) to the +[Method Builder](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::RequestValue) and -[decodable](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::RequestValue) and +[decodable](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-apigateway1/5.0.2-beta-1+20221213/google_apigateway1/client::RequestValue) are moved +* [request values](https://docs.rs/google-apigateway1/5.0.2+20221213/google_apigateway1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/apigateway1/src/api.rs b/gen/apigateway1/src/api.rs index 33fc31e7a7..57f8c52a4a 100644 --- a/gen/apigateway1/src/api.rs +++ b/gen/apigateway1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Apigateway { Apigateway { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://apigateway.googleapis.com/".to_string(), _root_url: "https://apigateway.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Apigateway { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/apigateway1/src/client.rs b/gen/apigateway1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/apigateway1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/apigateway1/src/lib.rs b/gen/apigateway1/src/lib.rs index 180001f826..92bab8214b 100644 --- a/gen/apigateway1/src/lib.rs +++ b/gen/apigateway1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *apigateway* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *apigateway:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *apigateway* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *apigateway:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *apigateway* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/api-gateway/docs). diff --git a/gen/apigee1-cli/Cargo.toml b/gen/apigee1-cli/Cargo.toml index 531623402d..40f711f73d 100644 --- a/gen/apigee1-cli/Cargo.toml +++ b/gen/apigee1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-apigee1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Apigee (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/apigee1-cli" @@ -20,13 +20,13 @@ name = "apigee1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-apigee1] path = "../apigee1" -version = "4.0.1+20220301" +version = "5.0.2+20230104" + diff --git a/gen/apigee1-cli/README.md b/gen/apigee1-cli/README.md index ec5a9e4a8a..50c696b3b4 100644 --- a/gen/apigee1-cli/README.md +++ b/gen/apigee1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Apigee* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Apigee* API at revision *20230104*. The CLI is at version *5.0.2*. ```bash apigee1 [options] @@ -59,6 +59,10 @@ apigee1 [options] apis-get [-p ]... [-o ] apis-keyvaluemaps-create (-r )... [-p ]... [-o ] apis-keyvaluemaps-delete [-p ]... [-o ] + apis-keyvaluemaps-entries-create (-r )... [-p ]... [-o ] + apis-keyvaluemaps-entries-delete [-p ]... [-o ] + apis-keyvaluemaps-entries-get [-p ]... [-o ] + apis-keyvaluemaps-entries-list [-p ]... [-o ] apis-list [-p ]... [-o ] apis-patch (-r )... [-p ]... [-o ] apis-revisions-delete [-p ]... [-o ] @@ -125,6 +129,7 @@ apigee1 [options] envgroups-create (-r )... [-p ]... [-o ] envgroups-delete [-p ]... [-o ] envgroups-get [-p ]... [-o ] + envgroups-get-deployed-ingress-config [-p ]... [-o ] envgroups-list [-p ]... [-o ] envgroups-patch (-r )... [-p ]... [-o ] environments-analytics-admin-get-schemav2 [-p ]... [-o ] @@ -157,6 +162,7 @@ apigee1 [options] environments-flowhooks-detach-shared-flow-from-flow-hook [-p ]... [-o ] environments-flowhooks-get [-p ]... [-o ] environments-get [-p ]... [-o ] + environments-get-api-security-runtime-config [-p ]... [-o ] environments-get-debugmask [-p ]... [-o ] environments-get-deployed-config [-p ]... [-o ] environments-get-iam-policy [-p ]... [-o ] @@ -172,10 +178,16 @@ apigee1 [options] environments-keystores-get [-p ]... [-o ] environments-keyvaluemaps-create (-r )... [-p ]... [-o ] environments-keyvaluemaps-delete [-p ]... [-o ] + environments-keyvaluemaps-entries-create (-r )... [-p ]... [-o ] + environments-keyvaluemaps-entries-delete [-p ]... [-o ] + environments-keyvaluemaps-entries-get [-p ]... [-o ] + environments-keyvaluemaps-entries-list [-p ]... [-o ] + environments-modify-environment (-r )... [-p ]... [-o ] environments-optimized-stats-get [-p ]... [-o ] environments-queries-create (-r )... [-p ]... [-o ] environments-queries-get [-p ]... [-o ] environments-queries-get-result [-p ]... [-o ] + environments-queries-get-resulturl [-p ]... [-o ] environments-queries-list [-p ]... [-o ] environments-references-create (-r )... [-p ]... [-o ] environments-references-delete [-p ]... [-o ] @@ -187,6 +199,13 @@ apigee1 [options] environments-resourcefiles-list [-p ]... [-o ] environments-resourcefiles-list-environment-resources [-p ]... [-o ] environments-resourcefiles-update (-r )... [-p ]... [-o ] + environments-security-reports-create (-r )... [-p ]... [-o ] + environments-security-reports-get [-p ]... [-o ] + environments-security-reports-get-result [-p ]... [-o ] + environments-security-reports-get-result-view [-p ]... [-o ] + environments-security-reports-list [-p ]... [-o ] + environments-security-stats-query-tabular-stats (-r )... [-p ]... [-o ] + environments-security-stats-query-time-series-stats (-r )... [-p ]... [-o ] environments-set-iam-policy (-r )... [-p ]... [-o ] environments-sharedflows-deployments-list [-p ]... [-o ] environments-sharedflows-revisions-deploy [-p ]... [-o ] @@ -211,6 +230,7 @@ apigee1 [options] environments-update-trace-config (-r )... [-p ]... [-o ] get [-p ]... [-o ] get-deployed-ingress-config [-p ]... [-o ] + get-project-mapping [-p ]... [-o ] get-runtime-config [-p ]... [-o ] get-sync-authorization (-r )... [-p ]... [-o ] host-queries-create (-r )... [-p ]... [-o ] @@ -218,6 +238,11 @@ apigee1 [options] host-queries-get-result [-p ]... [-o ] host-queries-get-result-view [-p ]... [-o ] host-queries-list [-p ]... [-o ] + host-security-reports-create (-r )... [-p ]... [-o ] + host-security-reports-get [-p ]... [-o ] + host-security-reports-get-result [-p ]... [-o ] + host-security-reports-get-result-view [-p ]... [-o ] + host-security-reports-list [-p ]... [-o ] host-stats-get [-p ]... [-o ] instances-attachments-create (-r )... [-p ]... [-o ] instances-attachments-delete [-p ]... [-o ] @@ -238,6 +263,10 @@ apigee1 [options] instances-report-status (-r )... [-p ]... [-o ] keyvaluemaps-create (-r )... [-p ]... [-o ] keyvaluemaps-delete [-p ]... [-o ] + keyvaluemaps-entries-create (-r )... [-p ]... [-o ] + keyvaluemaps-entries-delete [-p ]... [-o ] + keyvaluemaps-entries-get [-p ]... [-o ] + keyvaluemaps-entries-list [-p ]... [-o ] list [-p ]... [-o ] operations-get [-p ]... [-o ] operations-list [-p ]... [-o ] @@ -247,6 +276,12 @@ apigee1 [options] reports-get [-p ]... [-o ] reports-list [-p ]... [-o ] reports-update (-r )... [-p ]... [-o ] + security-profiles-environments-compute-environment-scores (-r )... [-p ]... [-o ] + security-profiles-environments-create (-r )... [-p ]... [-o ] + security-profiles-environments-delete [-p ]... [-o ] + security-profiles-get [-p ]... [-o ] + security-profiles-list [-p ]... [-o ] + security-profiles-list-revisions [-p ]... [-o ] set-addons (-r )... [-p ]... [-o ] set-sync-authorization (-r )... [-p ]... [-o ] sharedflows-create (-r )... [-p ]... [-o ] diff --git a/gen/apigee1-cli/mkdocs.yml b/gen/apigee1-cli/mkdocs.yml index 6ebde46213..a6b4b5a5c6 100644 --- a/gen/apigee1-cli/mkdocs.yml +++ b/gen/apigee1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Apigee v4.0.1+20220301 +site_name: Apigee v5.0.2+20230104 site_url: http://byron.github.io/google-apis-rs/google-apigee1-cli site_description: A complete library to interact with Apigee (protocol v1) @@ -7,242 +7,280 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/apigee1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['hybrid_issuers-list.md', 'Hybrid', 'Issuers List'] -- ['organizations_analytics-datastores-create.md', 'Organizations', 'Analytics Datastores Create'] -- ['organizations_analytics-datastores-delete.md', 'Organizations', 'Analytics Datastores Delete'] -- ['organizations_analytics-datastores-get.md', 'Organizations', 'Analytics Datastores Get'] -- ['organizations_analytics-datastores-list.md', 'Organizations', 'Analytics Datastores List'] -- ['organizations_analytics-datastores-test.md', 'Organizations', 'Analytics Datastores Test'] -- ['organizations_analytics-datastores-update.md', 'Organizations', 'Analytics Datastores Update'] -- ['organizations_apiproducts-attributes.md', 'Organizations', 'Apiproducts Attributes'] -- ['organizations_apiproducts-attributes-delete.md', 'Organizations', 'Apiproducts Attributes Delete'] -- ['organizations_apiproducts-attributes-get.md', 'Organizations', 'Apiproducts Attributes Get'] -- ['organizations_apiproducts-attributes-list.md', 'Organizations', 'Apiproducts Attributes List'] -- ['organizations_apiproducts-attributes-update-api-product-attribute.md', 'Organizations', 'Apiproducts Attributes Update Api Product Attribute'] -- ['organizations_apiproducts-create.md', 'Organizations', 'Apiproducts Create'] -- ['organizations_apiproducts-delete.md', 'Organizations', 'Apiproducts Delete'] -- ['organizations_apiproducts-get.md', 'Organizations', 'Apiproducts Get'] -- ['organizations_apiproducts-list.md', 'Organizations', 'Apiproducts List'] -- ['organizations_apiproducts-rateplans-create.md', 'Organizations', 'Apiproducts Rateplans Create'] -- ['organizations_apiproducts-rateplans-delete.md', 'Organizations', 'Apiproducts Rateplans Delete'] -- ['organizations_apiproducts-rateplans-get.md', 'Organizations', 'Apiproducts Rateplans Get'] -- ['organizations_apiproducts-rateplans-list.md', 'Organizations', 'Apiproducts Rateplans List'] -- ['organizations_apiproducts-rateplans-update.md', 'Organizations', 'Apiproducts Rateplans Update'] -- ['organizations_apiproducts-update.md', 'Organizations', 'Apiproducts Update'] -- ['organizations_apis-create.md', 'Organizations', 'Apis Create'] -- ['organizations_apis-delete.md', 'Organizations', 'Apis Delete'] -- ['organizations_apis-deployments-list.md', 'Organizations', 'Apis Deployments List'] -- ['organizations_apis-get.md', 'Organizations', 'Apis Get'] -- ['organizations_apis-keyvaluemaps-create.md', 'Organizations', 'Apis Keyvaluemaps Create'] -- ['organizations_apis-keyvaluemaps-delete.md', 'Organizations', 'Apis Keyvaluemaps Delete'] -- ['organizations_apis-list.md', 'Organizations', 'Apis List'] -- ['organizations_apis-patch.md', 'Organizations', 'Apis Patch'] -- ['organizations_apis-revisions-delete.md', 'Organizations', 'Apis Revisions Delete'] -- ['organizations_apis-revisions-deployments-list.md', 'Organizations', 'Apis Revisions Deployments List'] -- ['organizations_apis-revisions-get.md', 'Organizations', 'Apis Revisions Get'] -- ['organizations_apis-revisions-update-api-proxy-revision.md', 'Organizations', 'Apis Revisions Update Api Proxy Revision'] -- ['organizations_apps-get.md', 'Organizations', 'Apps Get'] -- ['organizations_apps-list.md', 'Organizations', 'Apps List'] -- ['organizations_create.md', 'Organizations', 'Create'] -- ['organizations_datacollectors-create.md', 'Organizations', 'Datacollectors Create'] -- ['organizations_datacollectors-delete.md', 'Organizations', 'Datacollectors Delete'] -- ['organizations_datacollectors-get.md', 'Organizations', 'Datacollectors Get'] -- ['organizations_datacollectors-list.md', 'Organizations', 'Datacollectors List'] -- ['organizations_datacollectors-patch.md', 'Organizations', 'Datacollectors Patch'] -- ['organizations_delete.md', 'Organizations', 'Delete'] -- ['organizations_deployments-list.md', 'Organizations', 'Deployments List'] -- ['organizations_developers-apps-attributes.md', 'Organizations', 'Developers Apps Attributes'] -- ['organizations_developers-apps-attributes-delete.md', 'Organizations', 'Developers Apps Attributes Delete'] -- ['organizations_developers-apps-attributes-get.md', 'Organizations', 'Developers Apps Attributes Get'] -- ['organizations_developers-apps-attributes-list.md', 'Organizations', 'Developers Apps Attributes List'] -- ['organizations_developers-apps-attributes-update-developer-app-attribute.md', 'Organizations', 'Developers Apps Attributes Update Developer App Attribute'] -- ['organizations_developers-apps-create.md', 'Organizations', 'Developers Apps Create'] -- ['organizations_developers-apps-delete.md', 'Organizations', 'Developers Apps Delete'] -- ['organizations_developers-apps-generate-key-pair-or-update-developer-app-status.md', 'Organizations', 'Developers Apps Generate Key Pair Or Update Developer App Status'] -- ['organizations_developers-apps-get.md', 'Organizations', 'Developers Apps Get'] -- ['organizations_developers-apps-keys-apiproducts-delete.md', 'Organizations', 'Developers Apps Keys Apiproducts Delete'] -- ['organizations_developers-apps-keys-apiproducts-update-developer-app-key-api-product.md', 'Organizations', 'Developers Apps Keys Apiproducts Update Developer App Key Api Product'] -- ['organizations_developers-apps-keys-create.md', 'Organizations', 'Developers Apps Keys Create'] -- ['organizations_developers-apps-keys-create-create.md', 'Organizations', 'Developers Apps Keys Create Create'] -- ['organizations_developers-apps-keys-delete.md', 'Organizations', 'Developers Apps Keys Delete'] -- ['organizations_developers-apps-keys-get.md', 'Organizations', 'Developers Apps Keys Get'] -- ['organizations_developers-apps-keys-replace-developer-app-key.md', 'Organizations', 'Developers Apps Keys Replace Developer App Key'] -- ['organizations_developers-apps-keys-update-developer-app-key.md', 'Organizations', 'Developers Apps Keys Update Developer App Key'] -- ['organizations_developers-apps-list.md', 'Organizations', 'Developers Apps List'] -- ['organizations_developers-apps-update.md', 'Organizations', 'Developers Apps Update'] -- ['organizations_developers-attributes.md', 'Organizations', 'Developers Attributes'] -- ['organizations_developers-attributes-delete.md', 'Organizations', 'Developers Attributes Delete'] -- ['organizations_developers-attributes-get.md', 'Organizations', 'Developers Attributes Get'] -- ['organizations_developers-attributes-list.md', 'Organizations', 'Developers Attributes List'] -- ['organizations_developers-attributes-update-developer-attribute.md', 'Organizations', 'Developers Attributes Update Developer Attribute'] -- ['organizations_developers-balance-adjust.md', 'Organizations', 'Developers Balance Adjust'] -- ['organizations_developers-balance-credit.md', 'Organizations', 'Developers Balance Credit'] -- ['organizations_developers-create.md', 'Organizations', 'Developers Create'] -- ['organizations_developers-delete.md', 'Organizations', 'Developers Delete'] -- ['organizations_developers-get.md', 'Organizations', 'Developers Get'] -- ['organizations_developers-get-balance.md', 'Organizations', 'Developers Get Balance'] -- ['organizations_developers-get-monetization-config.md', 'Organizations', 'Developers Get Monetization Config'] -- ['organizations_developers-list.md', 'Organizations', 'Developers List'] -- ['organizations_developers-set-developer-status.md', 'Organizations', 'Developers Set Developer Status'] -- ['organizations_developers-subscriptions-create.md', 'Organizations', 'Developers Subscriptions Create'] -- ['organizations_developers-subscriptions-expire.md', 'Organizations', 'Developers Subscriptions Expire'] -- ['organizations_developers-subscriptions-get.md', 'Organizations', 'Developers Subscriptions Get'] -- ['organizations_developers-subscriptions-list.md', 'Organizations', 'Developers Subscriptions List'] -- ['organizations_developers-update.md', 'Organizations', 'Developers Update'] -- ['organizations_developers-update-monetization-config.md', 'Organizations', 'Developers Update Monetization Config'] -- ['organizations_endpoint-attachments-create.md', 'Organizations', 'Endpoint Attachments Create'] -- ['organizations_endpoint-attachments-delete.md', 'Organizations', 'Endpoint Attachments Delete'] -- ['organizations_endpoint-attachments-get.md', 'Organizations', 'Endpoint Attachments Get'] -- ['organizations_endpoint-attachments-list.md', 'Organizations', 'Endpoint Attachments List'] -- ['organizations_envgroups-attachments-create.md', 'Organizations', 'Envgroups Attachments Create'] -- ['organizations_envgroups-attachments-delete.md', 'Organizations', 'Envgroups Attachments Delete'] -- ['organizations_envgroups-attachments-get.md', 'Organizations', 'Envgroups Attachments Get'] -- ['organizations_envgroups-attachments-list.md', 'Organizations', 'Envgroups Attachments List'] -- ['organizations_envgroups-create.md', 'Organizations', 'Envgroups Create'] -- ['organizations_envgroups-delete.md', 'Organizations', 'Envgroups Delete'] -- ['organizations_envgroups-get.md', 'Organizations', 'Envgroups Get'] -- ['organizations_envgroups-list.md', 'Organizations', 'Envgroups List'] -- ['organizations_envgroups-patch.md', 'Organizations', 'Envgroups Patch'] -- ['organizations_environments-analytics-admin-get-schemav2.md', 'Organizations', 'Environments Analytics Admin Get Schemav2'] -- ['organizations_environments-analytics-exports-create.md', 'Organizations', 'Environments Analytics Exports Create'] -- ['organizations_environments-analytics-exports-get.md', 'Organizations', 'Environments Analytics Exports Get'] -- ['organizations_environments-analytics-exports-list.md', 'Organizations', 'Environments Analytics Exports List'] -- ['organizations_environments-apis-deployments-list.md', 'Organizations', 'Environments Apis Deployments List'] -- ['organizations_environments-apis-revisions-debugsessions-create.md', 'Organizations', 'Environments Apis Revisions Debugsessions Create'] -- ['organizations_environments-apis-revisions-debugsessions-data-get.md', 'Organizations', 'Environments Apis Revisions Debugsessions Data Get'] -- ['organizations_environments-apis-revisions-debugsessions-delete-data.md', 'Organizations', 'Environments Apis Revisions Debugsessions Delete Data'] -- ['organizations_environments-apis-revisions-debugsessions-get.md', 'Organizations', 'Environments Apis Revisions Debugsessions Get'] -- ['organizations_environments-apis-revisions-debugsessions-list.md', 'Organizations', 'Environments Apis Revisions Debugsessions List'] -- ['organizations_environments-apis-revisions-deploy.md', 'Organizations', 'Environments Apis Revisions Deploy'] -- ['organizations_environments-apis-revisions-deployments-generate-deploy-change-report.md', 'Organizations', 'Environments Apis Revisions Deployments Generate Deploy Change Report'] -- ['organizations_environments-apis-revisions-deployments-generate-undeploy-change-report.md', 'Organizations', 'Environments Apis Revisions Deployments Generate Undeploy Change Report'] -- ['organizations_environments-apis-revisions-get-deployments.md', 'Organizations', 'Environments Apis Revisions Get Deployments'] -- ['organizations_environments-apis-revisions-undeploy.md', 'Organizations', 'Environments Apis Revisions Undeploy'] -- ['organizations_environments-archive-deployments-create.md', 'Organizations', 'Environments Archive Deployments Create'] -- ['organizations_environments-archive-deployments-delete.md', 'Organizations', 'Environments Archive Deployments Delete'] -- ['organizations_environments-archive-deployments-generate-download-url.md', 'Organizations', 'Environments Archive Deployments Generate Download Url'] -- ['organizations_environments-archive-deployments-generate-upload-url.md', 'Organizations', 'Environments Archive Deployments Generate Upload Url'] -- ['organizations_environments-archive-deployments-get.md', 'Organizations', 'Environments Archive Deployments Get'] -- ['organizations_environments-archive-deployments-list.md', 'Organizations', 'Environments Archive Deployments List'] -- ['organizations_environments-archive-deployments-patch.md', 'Organizations', 'Environments Archive Deployments Patch'] -- ['organizations_environments-caches-delete.md', 'Organizations', 'Environments Caches Delete'] -- ['organizations_environments-create.md', 'Organizations', 'Environments Create'] -- ['organizations_environments-delete.md', 'Organizations', 'Environments Delete'] -- ['organizations_environments-deployments-list.md', 'Organizations', 'Environments Deployments List'] -- ['organizations_environments-flowhooks-attach-shared-flow-to-flow-hook.md', 'Organizations', 'Environments Flowhooks Attach Shared Flow To Flow Hook'] -- ['organizations_environments-flowhooks-detach-shared-flow-from-flow-hook.md', 'Organizations', 'Environments Flowhooks Detach Shared Flow From Flow Hook'] -- ['organizations_environments-flowhooks-get.md', 'Organizations', 'Environments Flowhooks Get'] -- ['organizations_environments-get.md', 'Organizations', 'Environments Get'] -- ['organizations_environments-get-debugmask.md', 'Organizations', 'Environments Get Debugmask'] -- ['organizations_environments-get-deployed-config.md', 'Organizations', 'Environments Get Deployed Config'] -- ['organizations_environments-get-iam-policy.md', 'Organizations', 'Environments Get Iam Policy'] -- ['organizations_environments-get-trace-config.md', 'Organizations', 'Environments Get Trace Config'] -- ['organizations_environments-keystores-aliases-create.md', 'Organizations', 'Environments Keystores Aliases Create'] -- ['organizations_environments-keystores-aliases-csr.md', 'Organizations', 'Environments Keystores Aliases Csr'] -- ['organizations_environments-keystores-aliases-delete.md', 'Organizations', 'Environments Keystores Aliases Delete'] -- ['organizations_environments-keystores-aliases-get.md', 'Organizations', 'Environments Keystores Aliases Get'] -- ['organizations_environments-keystores-aliases-get-certificate.md', 'Organizations', 'Environments Keystores Aliases Get Certificate'] -- ['organizations_environments-keystores-aliases-update.md', 'Organizations', 'Environments Keystores Aliases Update'] -- ['organizations_environments-keystores-create.md', 'Organizations', 'Environments Keystores Create'] -- ['organizations_environments-keystores-delete.md', 'Organizations', 'Environments Keystores Delete'] -- ['organizations_environments-keystores-get.md', 'Organizations', 'Environments Keystores Get'] -- ['organizations_environments-keyvaluemaps-create.md', 'Organizations', 'Environments Keyvaluemaps Create'] -- ['organizations_environments-keyvaluemaps-delete.md', 'Organizations', 'Environments Keyvaluemaps Delete'] -- ['organizations_environments-optimized-stats-get.md', 'Organizations', 'Environments Optimized Stats Get'] -- ['organizations_environments-queries-create.md', 'Organizations', 'Environments Queries Create'] -- ['organizations_environments-queries-get.md', 'Organizations', 'Environments Queries Get'] -- ['organizations_environments-queries-get-result.md', 'Organizations', 'Environments Queries Get Result'] -- ['organizations_environments-queries-list.md', 'Organizations', 'Environments Queries List'] -- ['organizations_environments-references-create.md', 'Organizations', 'Environments References Create'] -- ['organizations_environments-references-delete.md', 'Organizations', 'Environments References Delete'] -- ['organizations_environments-references-get.md', 'Organizations', 'Environments References Get'] -- ['organizations_environments-references-update.md', 'Organizations', 'Environments References Update'] -- ['organizations_environments-resourcefiles-create.md', 'Organizations', 'Environments Resourcefiles Create'] -- ['organizations_environments-resourcefiles-delete.md', 'Organizations', 'Environments Resourcefiles Delete'] -- ['organizations_environments-resourcefiles-get.md', 'Organizations', 'Environments Resourcefiles Get'] -- ['organizations_environments-resourcefiles-list.md', 'Organizations', 'Environments Resourcefiles List'] -- ['organizations_environments-resourcefiles-list-environment-resources.md', 'Organizations', 'Environments Resourcefiles List Environment Resources'] -- ['organizations_environments-resourcefiles-update.md', 'Organizations', 'Environments Resourcefiles Update'] -- ['organizations_environments-set-iam-policy.md', 'Organizations', 'Environments Set Iam Policy'] -- ['organizations_environments-sharedflows-deployments-list.md', 'Organizations', 'Environments Sharedflows Deployments List'] -- ['organizations_environments-sharedflows-revisions-deploy.md', 'Organizations', 'Environments Sharedflows Revisions Deploy'] -- ['organizations_environments-sharedflows-revisions-get-deployments.md', 'Organizations', 'Environments Sharedflows Revisions Get Deployments'] -- ['organizations_environments-sharedflows-revisions-undeploy.md', 'Organizations', 'Environments Sharedflows Revisions Undeploy'] -- ['organizations_environments-stats-get.md', 'Organizations', 'Environments Stats Get'] -- ['organizations_environments-subscribe.md', 'Organizations', 'Environments Subscribe'] -- ['organizations_environments-targetservers-create.md', 'Organizations', 'Environments Targetservers Create'] -- ['organizations_environments-targetservers-delete.md', 'Organizations', 'Environments Targetservers Delete'] -- ['organizations_environments-targetservers-get.md', 'Organizations', 'Environments Targetservers Get'] -- ['organizations_environments-targetservers-update.md', 'Organizations', 'Environments Targetservers Update'] -- ['organizations_environments-test-iam-permissions.md', 'Organizations', 'Environments Test Iam Permissions'] -- ['organizations_environments-trace-config-overrides-create.md', 'Organizations', 'Environments Trace Config Overrides Create'] -- ['organizations_environments-trace-config-overrides-delete.md', 'Organizations', 'Environments Trace Config Overrides Delete'] -- ['organizations_environments-trace-config-overrides-get.md', 'Organizations', 'Environments Trace Config Overrides Get'] -- ['organizations_environments-trace-config-overrides-list.md', 'Organizations', 'Environments Trace Config Overrides List'] -- ['organizations_environments-trace-config-overrides-patch.md', 'Organizations', 'Environments Trace Config Overrides Patch'] -- ['organizations_environments-unsubscribe.md', 'Organizations', 'Environments Unsubscribe'] -- ['organizations_environments-update.md', 'Organizations', 'Environments Update'] -- ['organizations_environments-update-debugmask.md', 'Organizations', 'Environments Update Debugmask'] -- ['organizations_environments-update-environment.md', 'Organizations', 'Environments Update Environment'] -- ['organizations_environments-update-trace-config.md', 'Organizations', 'Environments Update Trace Config'] -- ['organizations_get.md', 'Organizations', 'Get'] -- ['organizations_get-deployed-ingress-config.md', 'Organizations', 'Get Deployed Ingress Config'] -- ['organizations_get-runtime-config.md', 'Organizations', 'Get Runtime Config'] -- ['organizations_get-sync-authorization.md', 'Organizations', 'Get Sync Authorization'] -- ['organizations_host-queries-create.md', 'Organizations', 'Host Queries Create'] -- ['organizations_host-queries-get.md', 'Organizations', 'Host Queries Get'] -- ['organizations_host-queries-get-result.md', 'Organizations', 'Host Queries Get Result'] -- ['organizations_host-queries-get-result-view.md', 'Organizations', 'Host Queries Get Result View'] -- ['organizations_host-queries-list.md', 'Organizations', 'Host Queries List'] -- ['organizations_host-stats-get.md', 'Organizations', 'Host Stats Get'] -- ['organizations_instances-attachments-create.md', 'Organizations', 'Instances Attachments Create'] -- ['organizations_instances-attachments-delete.md', 'Organizations', 'Instances Attachments Delete'] -- ['organizations_instances-attachments-get.md', 'Organizations', 'Instances Attachments Get'] -- ['organizations_instances-attachments-list.md', 'Organizations', 'Instances Attachments List'] -- ['organizations_instances-canaryevaluations-create.md', 'Organizations', 'Instances Canaryevaluations Create'] -- ['organizations_instances-canaryevaluations-get.md', 'Organizations', 'Instances Canaryevaluations Get'] -- ['organizations_instances-create.md', 'Organizations', 'Instances Create'] -- ['organizations_instances-delete.md', 'Organizations', 'Instances Delete'] -- ['organizations_instances-get.md', 'Organizations', 'Instances Get'] -- ['organizations_instances-list.md', 'Organizations', 'Instances List'] -- ['organizations_instances-nat-addresses-activate.md', 'Organizations', 'Instances Nat Addresses Activate'] -- ['organizations_instances-nat-addresses-create.md', 'Organizations', 'Instances Nat Addresses Create'] -- ['organizations_instances-nat-addresses-delete.md', 'Organizations', 'Instances Nat Addresses Delete'] -- ['organizations_instances-nat-addresses-get.md', 'Organizations', 'Instances Nat Addresses Get'] -- ['organizations_instances-nat-addresses-list.md', 'Organizations', 'Instances Nat Addresses List'] -- ['organizations_instances-patch.md', 'Organizations', 'Instances Patch'] -- ['organizations_instances-report-status.md', 'Organizations', 'Instances Report Status'] -- ['organizations_keyvaluemaps-create.md', 'Organizations', 'Keyvaluemaps Create'] -- ['organizations_keyvaluemaps-delete.md', 'Organizations', 'Keyvaluemaps Delete'] -- ['organizations_list.md', 'Organizations', 'List'] -- ['organizations_operations-get.md', 'Organizations', 'Operations Get'] -- ['organizations_operations-list.md', 'Organizations', 'Operations List'] -- ['organizations_optimized-host-stats-get.md', 'Organizations', 'Optimized Host Stats Get'] -- ['organizations_reports-create.md', 'Organizations', 'Reports Create'] -- ['organizations_reports-delete.md', 'Organizations', 'Reports Delete'] -- ['organizations_reports-get.md', 'Organizations', 'Reports Get'] -- ['organizations_reports-list.md', 'Organizations', 'Reports List'] -- ['organizations_reports-update.md', 'Organizations', 'Reports Update'] -- ['organizations_set-addons.md', 'Organizations', 'Set Addons'] -- ['organizations_set-sync-authorization.md', 'Organizations', 'Set Sync Authorization'] -- ['organizations_sharedflows-create.md', 'Organizations', 'Sharedflows Create'] -- ['organizations_sharedflows-delete.md', 'Organizations', 'Sharedflows Delete'] -- ['organizations_sharedflows-deployments-list.md', 'Organizations', 'Sharedflows Deployments List'] -- ['organizations_sharedflows-get.md', 'Organizations', 'Sharedflows Get'] -- ['organizations_sharedflows-list.md', 'Organizations', 'Sharedflows List'] -- ['organizations_sharedflows-revisions-delete.md', 'Organizations', 'Sharedflows Revisions Delete'] -- ['organizations_sharedflows-revisions-deployments-list.md', 'Organizations', 'Sharedflows Revisions Deployments List'] -- ['organizations_sharedflows-revisions-get.md', 'Organizations', 'Sharedflows Revisions Get'] -- ['organizations_sharedflows-revisions-update-shared-flow-revision.md', 'Organizations', 'Sharedflows Revisions Update Shared Flow Revision'] -- ['organizations_sites-apicategories-create.md', 'Organizations', 'Sites Apicategories Create'] -- ['organizations_sites-apicategories-delete.md', 'Organizations', 'Sites Apicategories Delete'] -- ['organizations_sites-apicategories-get.md', 'Organizations', 'Sites Apicategories Get'] -- ['organizations_sites-apicategories-list.md', 'Organizations', 'Sites Apicategories List'] -- ['organizations_sites-apicategories-patch.md', 'Organizations', 'Sites Apicategories Patch'] -- ['organizations_update.md', 'Organizations', 'Update'] -- ['projects_provision-organization.md', 'Projects', 'Provision Organization'] +nav: +- Home: 'index.md' +- 'Hybrid': + - 'Issuers List': 'hybrid_issuers-list.md' +- 'Organizations': + - 'Analytics Datastores Create': 'organizations_analytics-datastores-create.md' + - 'Analytics Datastores Delete': 'organizations_analytics-datastores-delete.md' + - 'Analytics Datastores Get': 'organizations_analytics-datastores-get.md' + - 'Analytics Datastores List': 'organizations_analytics-datastores-list.md' + - 'Analytics Datastores Test': 'organizations_analytics-datastores-test.md' + - 'Analytics Datastores Update': 'organizations_analytics-datastores-update.md' + - 'Apiproducts Attributes': 'organizations_apiproducts-attributes.md' + - 'Apiproducts Attributes Delete': 'organizations_apiproducts-attributes-delete.md' + - 'Apiproducts Attributes Get': 'organizations_apiproducts-attributes-get.md' + - 'Apiproducts Attributes List': 'organizations_apiproducts-attributes-list.md' + - 'Apiproducts Attributes Update Api Product Attribute': 'organizations_apiproducts-attributes-update-api-product-attribute.md' + - 'Apiproducts Create': 'organizations_apiproducts-create.md' + - 'Apiproducts Delete': 'organizations_apiproducts-delete.md' + - 'Apiproducts Get': 'organizations_apiproducts-get.md' + - 'Apiproducts List': 'organizations_apiproducts-list.md' + - 'Apiproducts Rateplans Create': 'organizations_apiproducts-rateplans-create.md' + - 'Apiproducts Rateplans Delete': 'organizations_apiproducts-rateplans-delete.md' + - 'Apiproducts Rateplans Get': 'organizations_apiproducts-rateplans-get.md' + - 'Apiproducts Rateplans List': 'organizations_apiproducts-rateplans-list.md' + - 'Apiproducts Rateplans Update': 'organizations_apiproducts-rateplans-update.md' + - 'Apiproducts Update': 'organizations_apiproducts-update.md' + - 'Apis Create': 'organizations_apis-create.md' + - 'Apis Delete': 'organizations_apis-delete.md' + - 'Apis Deployments List': 'organizations_apis-deployments-list.md' + - 'Apis Get': 'organizations_apis-get.md' + - 'Apis Keyvaluemaps Create': 'organizations_apis-keyvaluemaps-create.md' + - 'Apis Keyvaluemaps Delete': 'organizations_apis-keyvaluemaps-delete.md' + - 'Apis Keyvaluemaps Entries Create': 'organizations_apis-keyvaluemaps-entries-create.md' + - 'Apis Keyvaluemaps Entries Delete': 'organizations_apis-keyvaluemaps-entries-delete.md' + - 'Apis Keyvaluemaps Entries Get': 'organizations_apis-keyvaluemaps-entries-get.md' + - 'Apis Keyvaluemaps Entries List': 'organizations_apis-keyvaluemaps-entries-list.md' + - 'Apis List': 'organizations_apis-list.md' + - 'Apis Patch': 'organizations_apis-patch.md' + - 'Apis Revisions Delete': 'organizations_apis-revisions-delete.md' + - 'Apis Revisions Deployments List': 'organizations_apis-revisions-deployments-list.md' + - 'Apis Revisions Get': 'organizations_apis-revisions-get.md' + - 'Apis Revisions Update Api Proxy Revision': 'organizations_apis-revisions-update-api-proxy-revision.md' + - 'Apps Get': 'organizations_apps-get.md' + - 'Apps List': 'organizations_apps-list.md' + - 'Create': 'organizations_create.md' + - 'Datacollectors Create': 'organizations_datacollectors-create.md' + - 'Datacollectors Delete': 'organizations_datacollectors-delete.md' + - 'Datacollectors Get': 'organizations_datacollectors-get.md' + - 'Datacollectors List': 'organizations_datacollectors-list.md' + - 'Datacollectors Patch': 'organizations_datacollectors-patch.md' + - 'Delete': 'organizations_delete.md' + - 'Deployments List': 'organizations_deployments-list.md' + - 'Developers Apps Attributes': 'organizations_developers-apps-attributes.md' + - 'Developers Apps Attributes Delete': 'organizations_developers-apps-attributes-delete.md' + - 'Developers Apps Attributes Get': 'organizations_developers-apps-attributes-get.md' + - 'Developers Apps Attributes List': 'organizations_developers-apps-attributes-list.md' + - 'Developers Apps Attributes Update Developer App Attribute': 'organizations_developers-apps-attributes-update-developer-app-attribute.md' + - 'Developers Apps Create': 'organizations_developers-apps-create.md' + - 'Developers Apps Delete': 'organizations_developers-apps-delete.md' + - 'Developers Apps Generate Key Pair Or Update Developer App Status': 'organizations_developers-apps-generate-key-pair-or-update-developer-app-status.md' + - 'Developers Apps Get': 'organizations_developers-apps-get.md' + - 'Developers Apps Keys Apiproducts Delete': 'organizations_developers-apps-keys-apiproducts-delete.md' + - 'Developers Apps Keys Apiproducts Update Developer App Key Api Product': 'organizations_developers-apps-keys-apiproducts-update-developer-app-key-api-product.md' + - 'Developers Apps Keys Create': 'organizations_developers-apps-keys-create.md' + - 'Developers Apps Keys Create Create': 'organizations_developers-apps-keys-create-create.md' + - 'Developers Apps Keys Delete': 'organizations_developers-apps-keys-delete.md' + - 'Developers Apps Keys Get': 'organizations_developers-apps-keys-get.md' + - 'Developers Apps Keys Replace Developer App Key': 'organizations_developers-apps-keys-replace-developer-app-key.md' + - 'Developers Apps Keys Update Developer App Key': 'organizations_developers-apps-keys-update-developer-app-key.md' + - 'Developers Apps List': 'organizations_developers-apps-list.md' + - 'Developers Apps Update': 'organizations_developers-apps-update.md' + - 'Developers Attributes': 'organizations_developers-attributes.md' + - 'Developers Attributes Delete': 'organizations_developers-attributes-delete.md' + - 'Developers Attributes Get': 'organizations_developers-attributes-get.md' + - 'Developers Attributes List': 'organizations_developers-attributes-list.md' + - 'Developers Attributes Update Developer Attribute': 'organizations_developers-attributes-update-developer-attribute.md' + - 'Developers Balance Adjust': 'organizations_developers-balance-adjust.md' + - 'Developers Balance Credit': 'organizations_developers-balance-credit.md' + - 'Developers Create': 'organizations_developers-create.md' + - 'Developers Delete': 'organizations_developers-delete.md' + - 'Developers Get': 'organizations_developers-get.md' + - 'Developers Get Balance': 'organizations_developers-get-balance.md' + - 'Developers Get Monetization Config': 'organizations_developers-get-monetization-config.md' + - 'Developers List': 'organizations_developers-list.md' + - 'Developers Set Developer Status': 'organizations_developers-set-developer-status.md' + - 'Developers Subscriptions Create': 'organizations_developers-subscriptions-create.md' + - 'Developers Subscriptions Expire': 'organizations_developers-subscriptions-expire.md' + - 'Developers Subscriptions Get': 'organizations_developers-subscriptions-get.md' + - 'Developers Subscriptions List': 'organizations_developers-subscriptions-list.md' + - 'Developers Update': 'organizations_developers-update.md' + - 'Developers Update Monetization Config': 'organizations_developers-update-monetization-config.md' + - 'Endpoint Attachments Create': 'organizations_endpoint-attachments-create.md' + - 'Endpoint Attachments Delete': 'organizations_endpoint-attachments-delete.md' + - 'Endpoint Attachments Get': 'organizations_endpoint-attachments-get.md' + - 'Endpoint Attachments List': 'organizations_endpoint-attachments-list.md' + - 'Envgroups Attachments Create': 'organizations_envgroups-attachments-create.md' + - 'Envgroups Attachments Delete': 'organizations_envgroups-attachments-delete.md' + - 'Envgroups Attachments Get': 'organizations_envgroups-attachments-get.md' + - 'Envgroups Attachments List': 'organizations_envgroups-attachments-list.md' + - 'Envgroups Create': 'organizations_envgroups-create.md' + - 'Envgroups Delete': 'organizations_envgroups-delete.md' + - 'Envgroups Get': 'organizations_envgroups-get.md' + - 'Envgroups Get Deployed Ingress Config': 'organizations_envgroups-get-deployed-ingress-config.md' + - 'Envgroups List': 'organizations_envgroups-list.md' + - 'Envgroups Patch': 'organizations_envgroups-patch.md' + - 'Environments Analytics Admin Get Schemav2': 'organizations_environments-analytics-admin-get-schemav2.md' + - 'Environments Analytics Exports Create': 'organizations_environments-analytics-exports-create.md' + - 'Environments Analytics Exports Get': 'organizations_environments-analytics-exports-get.md' + - 'Environments Analytics Exports List': 'organizations_environments-analytics-exports-list.md' + - 'Environments Apis Deployments List': 'organizations_environments-apis-deployments-list.md' + - 'Environments Apis Revisions Debugsessions Create': 'organizations_environments-apis-revisions-debugsessions-create.md' + - 'Environments Apis Revisions Debugsessions Data Get': 'organizations_environments-apis-revisions-debugsessions-data-get.md' + - 'Environments Apis Revisions Debugsessions Delete Data': 'organizations_environments-apis-revisions-debugsessions-delete-data.md' + - 'Environments Apis Revisions Debugsessions Get': 'organizations_environments-apis-revisions-debugsessions-get.md' + - 'Environments Apis Revisions Debugsessions List': 'organizations_environments-apis-revisions-debugsessions-list.md' + - 'Environments Apis Revisions Deploy': 'organizations_environments-apis-revisions-deploy.md' + - 'Environments Apis Revisions Deployments Generate Deploy Change Report': 'organizations_environments-apis-revisions-deployments-generate-deploy-change-report.md' + - 'Environments Apis Revisions Deployments Generate Undeploy Change Report': 'organizations_environments-apis-revisions-deployments-generate-undeploy-change-report.md' + - 'Environments Apis Revisions Get Deployments': 'organizations_environments-apis-revisions-get-deployments.md' + - 'Environments Apis Revisions Undeploy': 'organizations_environments-apis-revisions-undeploy.md' + - 'Environments Archive Deployments Create': 'organizations_environments-archive-deployments-create.md' + - 'Environments Archive Deployments Delete': 'organizations_environments-archive-deployments-delete.md' + - 'Environments Archive Deployments Generate Download Url': 'organizations_environments-archive-deployments-generate-download-url.md' + - 'Environments Archive Deployments Generate Upload Url': 'organizations_environments-archive-deployments-generate-upload-url.md' + - 'Environments Archive Deployments Get': 'organizations_environments-archive-deployments-get.md' + - 'Environments Archive Deployments List': 'organizations_environments-archive-deployments-list.md' + - 'Environments Archive Deployments Patch': 'organizations_environments-archive-deployments-patch.md' + - 'Environments Caches Delete': 'organizations_environments-caches-delete.md' + - 'Environments Create': 'organizations_environments-create.md' + - 'Environments Delete': 'organizations_environments-delete.md' + - 'Environments Deployments List': 'organizations_environments-deployments-list.md' + - 'Environments Flowhooks Attach Shared Flow To Flow Hook': 'organizations_environments-flowhooks-attach-shared-flow-to-flow-hook.md' + - 'Environments Flowhooks Detach Shared Flow From Flow Hook': 'organizations_environments-flowhooks-detach-shared-flow-from-flow-hook.md' + - 'Environments Flowhooks Get': 'organizations_environments-flowhooks-get.md' + - 'Environments Get': 'organizations_environments-get.md' + - 'Environments Get Api Security Runtime Config': 'organizations_environments-get-api-security-runtime-config.md' + - 'Environments Get Debugmask': 'organizations_environments-get-debugmask.md' + - 'Environments Get Deployed Config': 'organizations_environments-get-deployed-config.md' + - 'Environments Get Iam Policy': 'organizations_environments-get-iam-policy.md' + - 'Environments Get Trace Config': 'organizations_environments-get-trace-config.md' + - 'Environments Keystores Aliases Create': 'organizations_environments-keystores-aliases-create.md' + - 'Environments Keystores Aliases Csr': 'organizations_environments-keystores-aliases-csr.md' + - 'Environments Keystores Aliases Delete': 'organizations_environments-keystores-aliases-delete.md' + - 'Environments Keystores Aliases Get': 'organizations_environments-keystores-aliases-get.md' + - 'Environments Keystores Aliases Get Certificate': 'organizations_environments-keystores-aliases-get-certificate.md' + - 'Environments Keystores Aliases Update': 'organizations_environments-keystores-aliases-update.md' + - 'Environments Keystores Create': 'organizations_environments-keystores-create.md' + - 'Environments Keystores Delete': 'organizations_environments-keystores-delete.md' + - 'Environments Keystores Get': 'organizations_environments-keystores-get.md' + - 'Environments Keyvaluemaps Create': 'organizations_environments-keyvaluemaps-create.md' + - 'Environments Keyvaluemaps Delete': 'organizations_environments-keyvaluemaps-delete.md' + - 'Environments Keyvaluemaps Entries Create': 'organizations_environments-keyvaluemaps-entries-create.md' + - 'Environments Keyvaluemaps Entries Delete': 'organizations_environments-keyvaluemaps-entries-delete.md' + - 'Environments Keyvaluemaps Entries Get': 'organizations_environments-keyvaluemaps-entries-get.md' + - 'Environments Keyvaluemaps Entries List': 'organizations_environments-keyvaluemaps-entries-list.md' + - 'Environments Modify Environment': 'organizations_environments-modify-environment.md' + - 'Environments Optimized Stats Get': 'organizations_environments-optimized-stats-get.md' + - 'Environments Queries Create': 'organizations_environments-queries-create.md' + - 'Environments Queries Get': 'organizations_environments-queries-get.md' + - 'Environments Queries Get Result': 'organizations_environments-queries-get-result.md' + - 'Environments Queries Get Resulturl': 'organizations_environments-queries-get-resulturl.md' + - 'Environments Queries List': 'organizations_environments-queries-list.md' + - 'Environments References Create': 'organizations_environments-references-create.md' + - 'Environments References Delete': 'organizations_environments-references-delete.md' + - 'Environments References Get': 'organizations_environments-references-get.md' + - 'Environments References Update': 'organizations_environments-references-update.md' + - 'Environments Resourcefiles Create': 'organizations_environments-resourcefiles-create.md' + - 'Environments Resourcefiles Delete': 'organizations_environments-resourcefiles-delete.md' + - 'Environments Resourcefiles Get': 'organizations_environments-resourcefiles-get.md' + - 'Environments Resourcefiles List': 'organizations_environments-resourcefiles-list.md' + - 'Environments Resourcefiles List Environment Resources': 'organizations_environments-resourcefiles-list-environment-resources.md' + - 'Environments Resourcefiles Update': 'organizations_environments-resourcefiles-update.md' + - 'Environments Security Reports Create': 'organizations_environments-security-reports-create.md' + - 'Environments Security Reports Get': 'organizations_environments-security-reports-get.md' + - 'Environments Security Reports Get Result': 'organizations_environments-security-reports-get-result.md' + - 'Environments Security Reports Get Result View': 'organizations_environments-security-reports-get-result-view.md' + - 'Environments Security Reports List': 'organizations_environments-security-reports-list.md' + - 'Environments Security Stats Query Tabular Stats': 'organizations_environments-security-stats-query-tabular-stats.md' + - 'Environments Security Stats Query Time Series Stats': 'organizations_environments-security-stats-query-time-series-stats.md' + - 'Environments Set Iam Policy': 'organizations_environments-set-iam-policy.md' + - 'Environments Sharedflows Deployments List': 'organizations_environments-sharedflows-deployments-list.md' + - 'Environments Sharedflows Revisions Deploy': 'organizations_environments-sharedflows-revisions-deploy.md' + - 'Environments Sharedflows Revisions Get Deployments': 'organizations_environments-sharedflows-revisions-get-deployments.md' + - 'Environments Sharedflows Revisions Undeploy': 'organizations_environments-sharedflows-revisions-undeploy.md' + - 'Environments Stats Get': 'organizations_environments-stats-get.md' + - 'Environments Subscribe': 'organizations_environments-subscribe.md' + - 'Environments Targetservers Create': 'organizations_environments-targetservers-create.md' + - 'Environments Targetservers Delete': 'organizations_environments-targetservers-delete.md' + - 'Environments Targetservers Get': 'organizations_environments-targetservers-get.md' + - 'Environments Targetservers Update': 'organizations_environments-targetservers-update.md' + - 'Environments Test Iam Permissions': 'organizations_environments-test-iam-permissions.md' + - 'Environments Trace Config Overrides Create': 'organizations_environments-trace-config-overrides-create.md' + - 'Environments Trace Config Overrides Delete': 'organizations_environments-trace-config-overrides-delete.md' + - 'Environments Trace Config Overrides Get': 'organizations_environments-trace-config-overrides-get.md' + - 'Environments Trace Config Overrides List': 'organizations_environments-trace-config-overrides-list.md' + - 'Environments Trace Config Overrides Patch': 'organizations_environments-trace-config-overrides-patch.md' + - 'Environments Unsubscribe': 'organizations_environments-unsubscribe.md' + - 'Environments Update': 'organizations_environments-update.md' + - 'Environments Update Debugmask': 'organizations_environments-update-debugmask.md' + - 'Environments Update Environment': 'organizations_environments-update-environment.md' + - 'Environments Update Trace Config': 'organizations_environments-update-trace-config.md' + - 'Get': 'organizations_get.md' + - 'Get Deployed Ingress Config': 'organizations_get-deployed-ingress-config.md' + - 'Get Project Mapping': 'organizations_get-project-mapping.md' + - 'Get Runtime Config': 'organizations_get-runtime-config.md' + - 'Get Sync Authorization': 'organizations_get-sync-authorization.md' + - 'Host Queries Create': 'organizations_host-queries-create.md' + - 'Host Queries Get': 'organizations_host-queries-get.md' + - 'Host Queries Get Result': 'organizations_host-queries-get-result.md' + - 'Host Queries Get Result View': 'organizations_host-queries-get-result-view.md' + - 'Host Queries List': 'organizations_host-queries-list.md' + - 'Host Security Reports Create': 'organizations_host-security-reports-create.md' + - 'Host Security Reports Get': 'organizations_host-security-reports-get.md' + - 'Host Security Reports Get Result': 'organizations_host-security-reports-get-result.md' + - 'Host Security Reports Get Result View': 'organizations_host-security-reports-get-result-view.md' + - 'Host Security Reports List': 'organizations_host-security-reports-list.md' + - 'Host Stats Get': 'organizations_host-stats-get.md' + - 'Instances Attachments Create': 'organizations_instances-attachments-create.md' + - 'Instances Attachments Delete': 'organizations_instances-attachments-delete.md' + - 'Instances Attachments Get': 'organizations_instances-attachments-get.md' + - 'Instances Attachments List': 'organizations_instances-attachments-list.md' + - 'Instances Canaryevaluations Create': 'organizations_instances-canaryevaluations-create.md' + - 'Instances Canaryevaluations Get': 'organizations_instances-canaryevaluations-get.md' + - 'Instances Create': 'organizations_instances-create.md' + - 'Instances Delete': 'organizations_instances-delete.md' + - 'Instances Get': 'organizations_instances-get.md' + - 'Instances List': 'organizations_instances-list.md' + - 'Instances Nat Addresses Activate': 'organizations_instances-nat-addresses-activate.md' + - 'Instances Nat Addresses Create': 'organizations_instances-nat-addresses-create.md' + - 'Instances Nat Addresses Delete': 'organizations_instances-nat-addresses-delete.md' + - 'Instances Nat Addresses Get': 'organizations_instances-nat-addresses-get.md' + - 'Instances Nat Addresses List': 'organizations_instances-nat-addresses-list.md' + - 'Instances Patch': 'organizations_instances-patch.md' + - 'Instances Report Status': 'organizations_instances-report-status.md' + - 'Keyvaluemaps Create': 'organizations_keyvaluemaps-create.md' + - 'Keyvaluemaps Delete': 'organizations_keyvaluemaps-delete.md' + - 'Keyvaluemaps Entries Create': 'organizations_keyvaluemaps-entries-create.md' + - 'Keyvaluemaps Entries Delete': 'organizations_keyvaluemaps-entries-delete.md' + - 'Keyvaluemaps Entries Get': 'organizations_keyvaluemaps-entries-get.md' + - 'Keyvaluemaps Entries List': 'organizations_keyvaluemaps-entries-list.md' + - 'List': 'organizations_list.md' + - 'Operations Get': 'organizations_operations-get.md' + - 'Operations List': 'organizations_operations-list.md' + - 'Optimized Host Stats Get': 'organizations_optimized-host-stats-get.md' + - 'Reports Create': 'organizations_reports-create.md' + - 'Reports Delete': 'organizations_reports-delete.md' + - 'Reports Get': 'organizations_reports-get.md' + - 'Reports List': 'organizations_reports-list.md' + - 'Reports Update': 'organizations_reports-update.md' + - 'Security Profiles Environments Compute Environment Scores': 'organizations_security-profiles-environments-compute-environment-scores.md' + - 'Security Profiles Environments Create': 'organizations_security-profiles-environments-create.md' + - 'Security Profiles Environments Delete': 'organizations_security-profiles-environments-delete.md' + - 'Security Profiles Get': 'organizations_security-profiles-get.md' + - 'Security Profiles List': 'organizations_security-profiles-list.md' + - 'Security Profiles List Revisions': 'organizations_security-profiles-list-revisions.md' + - 'Set Addons': 'organizations_set-addons.md' + - 'Set Sync Authorization': 'organizations_set-sync-authorization.md' + - 'Sharedflows Create': 'organizations_sharedflows-create.md' + - 'Sharedflows Delete': 'organizations_sharedflows-delete.md' + - 'Sharedflows Deployments List': 'organizations_sharedflows-deployments-list.md' + - 'Sharedflows Get': 'organizations_sharedflows-get.md' + - 'Sharedflows List': 'organizations_sharedflows-list.md' + - 'Sharedflows Revisions Delete': 'organizations_sharedflows-revisions-delete.md' + - 'Sharedflows Revisions Deployments List': 'organizations_sharedflows-revisions-deployments-list.md' + - 'Sharedflows Revisions Get': 'organizations_sharedflows-revisions-get.md' + - 'Sharedflows Revisions Update Shared Flow Revision': 'organizations_sharedflows-revisions-update-shared-flow-revision.md' + - 'Sites Apicategories Create': 'organizations_sites-apicategories-create.md' + - 'Sites Apicategories Delete': 'organizations_sites-apicategories-delete.md' + - 'Sites Apicategories Get': 'organizations_sites-apicategories-get.md' + - 'Sites Apicategories List': 'organizations_sites-apicategories-list.md' + - 'Sites Apicategories Patch': 'organizations_sites-apicategories-patch.md' + - 'Update': 'organizations_update.md' +- 'Projects': + - 'Provision Organization': 'projects_provision-organization.md' theme: readthedocs diff --git a/gen/apigee1-cli/src/client.rs b/gen/apigee1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/apigee1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/apigee1-cli/src/main.rs b/gen/apigee1-cli/src/main.rs index 7a73d09dee..31cd21712c 100644 --- a/gen/apigee1-cli/src/main.rs +++ b/gen/apigee1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_apigee1::{api, Error, oauth2}; +use google_apigee1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -909,11 +908,12 @@ where "operation-group.operation-config-type" => Some(("operationGroup.operationConfigType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proxies" => Some(("proxies", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "quota" => Some(("quota", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "quota-counter-scope" => Some(("quotaCounterScope", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "quota-interval" => Some(("quotaInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "quota-time-unit" => Some(("quotaTimeUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "scopes" => Some(("scopes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-resources", "approval-type", "created-at", "description", "display-name", "environments", "graphql-operation-group", "last-modified-at", "name", "operation-config-type", "operation-group", "proxies", "quota", "quota-interval", "quota-time-unit", "scopes"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-resources", "approval-type", "created-at", "description", "display-name", "environments", "graphql-operation-group", "last-modified-at", "name", "operation-config-type", "operation-group", "proxies", "quota", "quota-counter-scope", "quota-interval", "quota-time-unit", "scopes"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1087,10 +1087,10 @@ where call = call.start_key(value.unwrap_or("")); }, "expand" => { - call = call.expand(arg_from_str(value.unwrap_or("false"), err, "expand", "boolean")); + call = call.expand( value.map(|v| arg_from_str(v, err, "expand", "boolean")).unwrap_or(false)); }, "count" => { - call = call.count(value.unwrap_or("")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int64")).unwrap_or(-0)); }, "attributevalue" => { call = call.attributevalue(value.unwrap_or("")); @@ -1370,10 +1370,10 @@ where call = call.order_by(value.unwrap_or("")); }, "expand" => { - call = call.expand(arg_from_str(value.unwrap_or("false"), err, "expand", "boolean")); + call = call.expand( value.map(|v| arg_from_str(v, err, "expand", "boolean")).unwrap_or(false)); }, "count" => { - call = call.count(arg_from_str(value.unwrap_or("-0"), err, "count", "integer")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1562,11 +1562,12 @@ where "operation-group.operation-config-type" => Some(("operationGroup.operationConfigType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proxies" => Some(("proxies", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "quota" => Some(("quota", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "quota-counter-scope" => Some(("quotaCounterScope", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "quota-interval" => Some(("quotaInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "quota-time-unit" => Some(("quotaTimeUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "scopes" => Some(("scopes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-resources", "approval-type", "created-at", "description", "display-name", "environments", "graphql-operation-group", "last-modified-at", "name", "operation-config-type", "operation-group", "proxies", "quota", "quota-interval", "quota-time-unit", "scopes"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-resources", "approval-type", "created-at", "description", "display-name", "environments", "graphql-operation-group", "last-modified-at", "name", "operation-config-type", "operation-group", "proxies", "quota", "quota-counter-scope", "quota-interval", "quota-time-unit", "scopes"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1667,7 +1668,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate" => { - call = call.validate(arg_from_str(value.unwrap_or("false"), err, "validate", "boolean")); + call = call.validate( value.map(|v| arg_from_str(v, err, "validate", "boolean")).unwrap_or(false)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -2016,6 +2017,255 @@ where } } + async fn _organizations_apis_keyvaluemaps_entries_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "value" => Some(("value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "value"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1KeyValueEntry = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().apis_keyvaluemaps_entries_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_apis_keyvaluemaps_entries_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().apis_keyvaluemaps_entries_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_apis_keyvaluemaps_entries_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().apis_keyvaluemaps_entries_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_apis_keyvaluemaps_entries_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().apis_keyvaluemaps_entries_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_apis_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().apis_list(opt.value_of("parent").unwrap_or("")); @@ -2023,10 +2273,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-revisions" => { - call = call.include_revisions(arg_from_str(value.unwrap_or("false"), err, "include-revisions", "boolean")); + call = call.include_revisions( value.map(|v| arg_from_str(v, err, "include-revisions", "boolean")).unwrap_or(false)); }, "include-meta-data" => { - call = call.include_meta_data(arg_from_str(value.unwrap_or("false"), err, "include-meta-data", "boolean")); + call = call.include_meta_data( value.map(|v| arg_from_str(v, err, "include-meta-data", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2123,7 +2373,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2373,7 +2623,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate" => { - call = call.validate(arg_from_str(value.unwrap_or("false"), err, "validate", "boolean")); + call = call.validate( value.map(|v| arg_from_str(v, err, "validate", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2487,19 +2737,19 @@ where call = call.start_key(value.unwrap_or("")); }, "rows" => { - call = call.rows(value.unwrap_or("")); + call = call.rows( value.map(|v| arg_from_str(v, err, "rows", "int64")).unwrap_or(-0)); }, "key-status" => { call = call.key_status(value.unwrap_or("")); }, "include-cred" => { - call = call.include_cred(arg_from_str(value.unwrap_or("false"), err, "include-cred", "boolean")); + call = call.include_cred( value.map(|v| arg_from_str(v, err, "include-cred", "boolean")).unwrap_or(false)); }, "ids" => { call = call.ids(value.unwrap_or("")); }, "expand" => { - call = call.expand(arg_from_str(value.unwrap_or("false"), err, "expand", "boolean")); + call = call.expand( value.map(|v| arg_from_str(v, err, "expand", "boolean")).unwrap_or(false)); }, "apptype" => { call = call.apptype(value.unwrap_or("")); @@ -2578,11 +2828,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "addons-config.advanced-api-ops-config.enabled" => Some(("addonsConfig.advancedApiOpsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.api-security-config.enabled" => Some(("addonsConfig.apiSecurityConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.api-security-config.expires-at" => Some(("addonsConfig.apiSecurityConfig.expiresAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "addons-config.connectors-platform-config.enabled" => Some(("addonsConfig.connectorsPlatformConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.connectors-platform-config.expires-at" => Some(("addonsConfig.connectorsPlatformConfig.expiresAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "addons-config.integration-config.enabled" => Some(("addonsConfig.integrationConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.monetization-config.enabled" => Some(("addonsConfig.monetizationConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "analytics-region" => Some(("analyticsRegion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "apigee-project-id" => Some(("apigeeProjectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "attributes" => Some(("attributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "authorized-network" => Some(("authorizedNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "billing-type" => Some(("billingType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2603,7 +2856,7 @@ where "subscription-type" => Some(("subscriptionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-api-ops-config", "analytics-region", "attributes", "authorized-network", "billing-type", "ca-certificate", "connectors-platform-config", "created-at", "customer-name", "description", "display-name", "enabled", "environments", "expires-at", "integration-config", "last-modified-at", "monetization-config", "name", "portal-disabled", "project-id", "runtime-database-encryption-key-name", "runtime-type", "state", "subscription-type", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-api-ops-config", "analytics-region", "api-security-config", "apigee-project-id", "attributes", "authorized-network", "billing-type", "ca-certificate", "connectors-platform-config", "created-at", "customer-name", "description", "display-name", "enabled", "environments", "expires-at", "integration-config", "last-modified-at", "monetization-config", "name", "portal-disabled", "project-id", "runtime-database-encryption-key-name", "runtime-type", "state", "subscription-type", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2874,7 +3127,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2967,7 +3220,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3022,6 +3275,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "retention" => { + call = call.retention(value.unwrap_or("")); + }, _ => { let mut found = false; for param in &self.gp { @@ -3035,6 +3291,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["retention"].iter().map(|v|*v)); v } )); } } @@ -3075,7 +3332,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "shared-flows" => { - call = call.shared_flows(arg_from_str(value.unwrap_or("false"), err, "shared-flows", "boolean")); + call = call.shared_flows( value.map(|v| arg_from_str(v, err, "shared-flows", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4345,13 +4602,13 @@ where call = call.start_key(value.unwrap_or("")); }, "shallow-expand" => { - call = call.shallow_expand(arg_from_str(value.unwrap_or("false"), err, "shallow-expand", "boolean")); + call = call.shallow_expand( value.map(|v| arg_from_str(v, err, "shallow-expand", "boolean")).unwrap_or(false)); }, "expand" => { - call = call.expand(arg_from_str(value.unwrap_or("false"), err, "expand", "boolean")); + call = call.expand( value.map(|v| arg_from_str(v, err, "expand", "boolean")).unwrap_or(false)); }, "count" => { - call = call.count(value.unwrap_or("")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5315,16 +5572,16 @@ where call = call.start_key(value.unwrap_or("")); }, "include-company" => { - call = call.include_company(arg_from_str(value.unwrap_or("false"), err, "include-company", "boolean")); + call = call.include_company( value.map(|v| arg_from_str(v, err, "include-company", "boolean")).unwrap_or(false)); }, "ids" => { call = call.ids(value.unwrap_or("")); }, "expand" => { - call = call.expand(arg_from_str(value.unwrap_or("false"), err, "expand", "boolean")); + call = call.expand( value.map(|v| arg_from_str(v, err, "expand", "boolean")).unwrap_or(false)); }, "count" => { - call = call.count(value.unwrap_or("")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int64")).unwrap_or(-0)); }, "app" => { call = call.app(value.unwrap_or("")); @@ -5668,7 +5925,7 @@ where call = call.start_key(value.unwrap_or("")); }, "count" => { - call = call.count(arg_from_str(value.unwrap_or("-0"), err, "count", "integer")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5922,12 +6179,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "connection-state" => Some(("connectionState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-attachment" => Some(("serviceAttachment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["host", "location", "name", "service-attachment"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["connection-state", "host", "location", "name", "service-attachment", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6105,7 +6364,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6179,9 +6438,10 @@ where match &temp_cursor.to_string()[..] { "created-at" => Some(("createdAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "environment" => Some(("environment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "environment-group-id" => Some(("environmentGroupId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["created-at", "environment", "name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["created-at", "environment", "environment-group-id", "name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6355,7 +6615,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6601,6 +6861,62 @@ where } } + async fn _organizations_envgroups_get_deployed_ingress_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().envgroups_get_deployed_ingress_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["view"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_envgroups_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().envgroups_list(opt.value_of("parent").unwrap_or("")); @@ -6611,7 +6927,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6704,7 +7020,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6763,7 +7079,7 @@ where call = call.type_(value.unwrap_or("")); }, "disable-cache" => { - call = call.disable_cache(arg_from_str(value.unwrap_or("false"), err, "disable-cache", "boolean")); + call = call.disable_cache( value.map(|v| arg_from_str(v, err, "disable-cache", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7105,7 +7421,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "timeout" => { - call = call.timeout(value.unwrap_or("")); + call = call.timeout( value.map(|v| arg_from_str(v, err, "timeout", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7320,7 +7636,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7379,10 +7695,10 @@ where call = call.service_account(value.unwrap_or("")); }, "sequenced-rollout" => { - call = call.sequenced_rollout(arg_from_str(value.unwrap_or("false"), err, "sequenced-rollout", "boolean")); + call = call.sequenced_rollout( value.map(|v| arg_from_str(v, err, "sequenced-rollout", "boolean")).unwrap_or(false)); }, "override" => { - call = call.override_(arg_from_str(value.unwrap_or("false"), err, "override", "boolean")); + call = call.override_( value.map(|v| arg_from_str(v, err, "override", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7438,7 +7754,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "override" => { - call = call.override_(arg_from_str(value.unwrap_or("false"), err, "override", "boolean")); + call = call.override_( value.map(|v| arg_from_str(v, err, "override", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7598,7 +7914,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "sequenced-rollout" => { - call = call.sequenced_rollout(arg_from_str(value.unwrap_or("false"), err, "sequenced-rollout", "boolean")); + call = call.sequenced_rollout( value.map(|v| arg_from_str(v, err, "sequenced-rollout", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -8019,7 +8335,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8116,7 +8432,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8245,11 +8561,15 @@ where "deployment-type" => Some(("deploymentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "forward-proxy-uri" => Some(("forwardProxyUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-modified-at" => Some(("lastModifiedAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-config.current-aggregate-node-count" => Some(("nodeConfig.currentAggregateNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.max-node-count" => Some(("nodeConfig.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.min-node-count" => Some(("nodeConfig.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-proxy-type", "created-at", "deployment-type", "description", "display-name", "last-modified-at", "name", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-proxy-type", "created-at", "current-aggregate-node-count", "deployment-type", "description", "display-name", "forward-proxy-uri", "last-modified-at", "max-node-count", "min-node-count", "name", "node-config", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -8372,7 +8692,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "shared-flows" => { - call = call.shared_flows(arg_from_str(value.unwrap_or("false"), err, "shared-flows", "boolean")); + call = call.shared_flows( value.map(|v| arg_from_str(v, err, "shared-flows", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -8665,6 +8985,58 @@ where } } + async fn _organizations_environments_get_api_security_runtime_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_get_api_security_runtime_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_environments_get_debugmask(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().environments_get_debugmask(opt.value_of("name").unwrap_or("")); @@ -8776,7 +9148,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8918,10 +9290,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-newline-validation" => { - call = call.ignore_newline_validation(arg_from_str(value.unwrap_or("false"), err, "ignore-newline-validation", "boolean")); + call = call.ignore_newline_validation( value.map(|v| arg_from_str(v, err, "ignore-newline-validation", "boolean")).unwrap_or(false)); }, "ignore-expiry-validation" => { - call = call.ignore_expiry_validation(arg_from_str(value.unwrap_or("false"), err, "ignore-expiry-validation", "boolean")); + call = call.ignore_expiry_validation( value.map(|v| arg_from_str(v, err, "ignore-expiry-validation", "boolean")).unwrap_or(false)); }, "format" => { call = call.format(value.unwrap_or("")); @@ -9228,10 +9600,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-newline-validation" => { - call = call.ignore_newline_validation(arg_from_str(value.unwrap_or("false"), err, "ignore-newline-validation", "boolean")); + call = call.ignore_newline_validation( value.map(|v| arg_from_str(v, err, "ignore-newline-validation", "boolean")).unwrap_or(false)); }, "ignore-expiry-validation" => { - call = call.ignore_expiry_validation(arg_from_str(value.unwrap_or("false"), err, "ignore-expiry-validation", "boolean")); + call = call.ignore_expiry_validation( value.map(|v| arg_from_str(v, err, "ignore-expiry-validation", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -9612,6 +9984,355 @@ where } } + async fn _organizations_environments_keyvaluemaps_entries_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "value" => Some(("value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "value"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1KeyValueEntry = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().environments_keyvaluemaps_entries_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_keyvaluemaps_entries_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_keyvaluemaps_entries_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_keyvaluemaps_entries_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_keyvaluemaps_entries_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_keyvaluemaps_entries_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_keyvaluemaps_entries_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_modify_environment(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "api-proxy-type" => Some(("apiProxyType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "created-at" => Some(("createdAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "deployment-type" => Some(("deploymentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "forward-proxy-uri" => Some(("forwardProxyUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "last-modified-at" => Some(("lastModifiedAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-config.current-aggregate-node-count" => Some(("nodeConfig.currentAggregateNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.max-node-count" => Some(("nodeConfig.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.min-node-count" => Some(("nodeConfig.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-proxy-type", "created-at", "current-aggregate-node-count", "deployment-type", "description", "display-name", "forward-proxy-uri", "last-modified-at", "max-node-count", "min-node-count", "name", "node-config", "state"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1Environment = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().environments_modify_environment(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_environments_optimized_stats_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().environments_optimized_stats_get(opt.value_of("name").unwrap_or("")); @@ -9622,7 +10343,7 @@ where call = call.tzo(value.unwrap_or("")); }, "ts-ascending" => { - call = call.ts_ascending(arg_from_str(value.unwrap_or("false"), err, "ts-ascending", "boolean")); + call = call.ts_ascending( value.map(|v| arg_from_str(v, err, "ts-ascending", "boolean")).unwrap_or(false)); }, "topk" => { call = call.topk(value.unwrap_or("")); @@ -9640,13 +10361,13 @@ where call = call.sort(value.unwrap_or("")); }, "sonar" => { - call = call.sonar(arg_from_str(value.unwrap_or("false"), err, "sonar", "boolean")); + call = call.sonar( value.map(|v| arg_from_str(v, err, "sonar", "boolean")).unwrap_or(false)); }, "select" => { call = call.select(value.unwrap_or("")); }, "realtime" => { - call = call.realtime(arg_from_str(value.unwrap_or("false"), err, "realtime", "boolean")); + call = call.realtime( value.map(|v| arg_from_str(v, err, "realtime", "boolean")).unwrap_or(false)); }, "offset" => { call = call.offset(value.unwrap_or("")); @@ -9907,6 +10628,58 @@ where } } + async fn _organizations_environments_queries_get_resulturl(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_queries_get_resulturl(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_environments_queries_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().environments_queries_list(opt.value_of("parent").unwrap_or("")); @@ -10649,6 +11422,511 @@ where } } + async fn _organizations_environments_security_reports_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "csv-delimiter" => Some(("csvDelimiter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dimensions" => Some(("dimensions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "envgroup-hostname" => Some(("envgroupHostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "group-by-time-unit" => Some(("groupByTimeUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "limit" => Some(("limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "mime-type" => Some(("mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "report-definition-id" => Some(("reportDefinitionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["csv-delimiter", "dimensions", "display-name", "envgroup-hostname", "filter", "group-by-time-unit", "limit", "mime-type", "report-definition-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1SecurityReportQuery = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().environments_security_reports_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_security_reports_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_security_reports_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_security_reports_get_result(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_security_reports_get_result(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_security_reports_get_result_view(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_security_reports_get_result_view(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_security_reports_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().environments_security_reports_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "to" => { + call = call.to(value.unwrap_or("")); + }, + "submitted-by" => { + call = call.submitted_by(value.unwrap_or("")); + }, + "status" => { + call = call.status(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "from" => { + call = call.from(value.unwrap_or("")); + }, + "dataset" => { + call = call.dataset(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["dataset", "from", "page-size", "page-token", "status", "submitted-by", "to"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_security_stats_query_tabular_stats(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "dimensions" => Some(("dimensions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.end-time" => Some(("timeRange.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.start-time" => Some(("timeRange.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["dimensions", "end-time", "filter", "page-size", "page-token", "start-time", "time-range"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1QueryTabularStatsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().environments_security_stats_query_tabular_stats(request, opt.value_of("orgenv").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_environments_security_stats_query_time_series_stats(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "dimensions" => Some(("dimensions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.end-time" => Some(("timeRange.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.start-time" => Some(("timeRange.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "timestamp-order" => Some(("timestampOrder", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "window-size" => Some(("windowSize", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["dimensions", "end-time", "filter", "page-size", "page-token", "start-time", "time-range", "timestamp-order", "window-size"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1QueryTimeSeriesStatsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().environments_security_stats_query_time_series_stats(request, opt.value_of("orgenv").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_environments_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -10798,7 +12076,7 @@ where call = call.service_account(value.unwrap_or("")); }, "override" => { - call = call.override_(arg_from_str(value.unwrap_or("false"), err, "override", "boolean")); + call = call.override_( value.map(|v| arg_from_str(v, err, "override", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10961,7 +12239,7 @@ where call = call.tzo(value.unwrap_or("")); }, "ts-ascending" => { - call = call.ts_ascending(arg_from_str(value.unwrap_or("false"), err, "ts-ascending", "boolean")); + call = call.ts_ascending( value.map(|v| arg_from_str(v, err, "ts-ascending", "boolean")).unwrap_or(false)); }, "topk" => { call = call.topk(value.unwrap_or("")); @@ -10979,13 +12257,13 @@ where call = call.sort(value.unwrap_or("")); }, "sonar" => { - call = call.sonar(arg_from_str(value.unwrap_or("false"), err, "sonar", "boolean")); + call = call.sonar( value.map(|v| arg_from_str(v, err, "sonar", "boolean")).unwrap_or(false)); }, "select" => { call = call.select(value.unwrap_or("")); }, "realtime" => { - call = call.realtime(arg_from_str(value.unwrap_or("false"), err, "realtime", "boolean")); + call = call.realtime( value.map(|v| arg_from_str(v, err, "realtime", "boolean")).unwrap_or(false)); }, "offset" => { call = call.offset(value.unwrap_or("")); @@ -11696,7 +12974,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11788,7 +13066,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -11950,11 +13228,15 @@ where "deployment-type" => Some(("deploymentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "forward-proxy-uri" => Some(("forwardProxyUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-modified-at" => Some(("lastModifiedAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-config.current-aggregate-node-count" => Some(("nodeConfig.currentAggregateNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.max-node-count" => Some(("nodeConfig.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.min-node-count" => Some(("nodeConfig.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-proxy-type", "created-at", "deployment-type", "description", "display-name", "last-modified-at", "name", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-proxy-type", "created-at", "current-aggregate-node-count", "deployment-type", "description", "display-name", "forward-proxy-uri", "last-modified-at", "max-node-count", "min-node-count", "name", "node-config", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -12062,10 +13344,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "replace-repeated-fields" => { - call = call.replace_repeated_fields(arg_from_str(value.unwrap_or("false"), err, "replace-repeated-fields", "boolean")); + call = call.replace_repeated_fields( value.map(|v| arg_from_str(v, err, "replace-repeated-fields", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -12142,11 +13424,15 @@ where "deployment-type" => Some(("deploymentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "forward-proxy-uri" => Some(("forwardProxyUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-modified-at" => Some(("lastModifiedAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-config.current-aggregate-node-count" => Some(("nodeConfig.currentAggregateNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.max-node-count" => Some(("nodeConfig.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.min-node-count" => Some(("nodeConfig.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-proxy-type", "created-at", "deployment-type", "description", "display-name", "last-modified-at", "name", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-proxy-type", "created-at", "current-aggregate-node-count", "deployment-type", "description", "display-name", "forward-proxy-uri", "last-modified-at", "max-node-count", "min-node-count", "name", "node-config", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -12249,7 +13535,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -12406,6 +13692,58 @@ where } } + async fn _organizations_get_project_mapping(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().get_project_mapping(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_get_runtime_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().get_runtime_config(opt.value_of("name").unwrap_or("")); @@ -12865,6 +14203,332 @@ where } } + async fn _organizations_host_security_reports_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "csv-delimiter" => Some(("csvDelimiter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dimensions" => Some(("dimensions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "envgroup-hostname" => Some(("envgroupHostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "group-by-time-unit" => Some(("groupByTimeUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "limit" => Some(("limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "mime-type" => Some(("mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "report-definition-id" => Some(("reportDefinitionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["csv-delimiter", "dimensions", "display-name", "envgroup-hostname", "filter", "group-by-time-unit", "limit", "mime-type", "report-definition-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1SecurityReportQuery = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().host_security_reports_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_host_security_reports_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().host_security_reports_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_host_security_reports_get_result(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().host_security_reports_get_result(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_host_security_reports_get_result_view(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().host_security_reports_get_result_view(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_host_security_reports_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().host_security_reports_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "to" => { + call = call.to(value.unwrap_or("")); + }, + "submitted-by" => { + call = call.submitted_by(value.unwrap_or("")); + }, + "status" => { + call = call.status(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "from" => { + call = call.from(value.unwrap_or("")); + }, + "envgroup-hostname" => { + call = call.envgroup_hostname(value.unwrap_or("")); + }, + "dataset" => { + call = call.dataset(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["dataset", "envgroup-hostname", "from", "page-size", "page-token", "status", "submitted-by", "to"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_host_stats_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().host_stats_get(opt.value_of("name").unwrap_or("")); @@ -12875,7 +14539,7 @@ where call = call.tzo(value.unwrap_or("")); }, "ts-ascending" => { - call = call.ts_ascending(arg_from_str(value.unwrap_or("false"), err, "ts-ascending", "boolean")); + call = call.ts_ascending( value.map(|v| arg_from_str(v, err, "ts-ascending", "boolean")).unwrap_or(false)); }, "topk" => { call = call.topk(value.unwrap_or("")); @@ -12896,7 +14560,7 @@ where call = call.select(value.unwrap_or("")); }, "realtime" => { - call = call.realtime(arg_from_str(value.unwrap_or("false"), err, "realtime", "boolean")); + call = call.realtime( value.map(|v| arg_from_str(v, err, "realtime", "boolean")).unwrap_or(false)); }, "offset" => { call = call.offset(value.unwrap_or("")); @@ -13161,7 +14825,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13380,6 +15044,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "consumer-accept-list" => Some(("consumerAcceptList", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "created-at" => Some(("createdAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-encryption-key-name" => Some(("diskEncryptionKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -13392,9 +15057,10 @@ where "peering-cidr-range" => Some(("peeringCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "port" => Some(("port", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "runtime-version" => Some(("runtimeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-attachment" => Some(("serviceAttachment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["created-at", "description", "disk-encryption-key-name", "display-name", "host", "ip-range", "last-modified-at", "location", "name", "peering-cidr-range", "port", "runtime-version", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["consumer-accept-list", "created-at", "description", "disk-encryption-key-name", "display-name", "host", "ip-range", "last-modified-at", "location", "name", "peering-cidr-range", "port", "runtime-version", "service-attachment", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13568,7 +15234,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13902,7 +15568,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13974,6 +15640,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "consumer-accept-list" => Some(("consumerAcceptList", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "created-at" => Some(("createdAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-encryption-key-name" => Some(("diskEncryptionKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -13986,9 +15653,10 @@ where "peering-cidr-range" => Some(("peeringCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "port" => Some(("port", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "runtime-version" => Some(("runtimeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-attachment" => Some(("serviceAttachment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["created-at", "description", "disk-encryption-key-name", "display-name", "host", "ip-range", "last-modified-at", "location", "name", "peering-cidr-range", "port", "runtime-version", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["consumer-accept-list", "created-at", "description", "disk-encryption-key-name", "display-name", "host", "ip-range", "last-modified-at", "location", "name", "peering-cidr-range", "port", "runtime-version", "service-attachment", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14003,7 +15671,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -14276,6 +15944,255 @@ where } } + async fn _organizations_keyvaluemaps_entries_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "value" => Some(("value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "value"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1KeyValueEntry = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().keyvaluemaps_entries_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_keyvaluemaps_entries_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().keyvaluemaps_entries_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_keyvaluemaps_entries_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().keyvaluemaps_entries_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_keyvaluemaps_entries_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().keyvaluemaps_entries_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().list(opt.value_of("parent").unwrap_or("")); @@ -14390,7 +16307,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -14452,7 +16369,7 @@ where call = call.tzo(value.unwrap_or("")); }, "ts-ascending" => { - call = call.ts_ascending(arg_from_str(value.unwrap_or("false"), err, "ts-ascending", "boolean")); + call = call.ts_ascending( value.map(|v| arg_from_str(v, err, "ts-ascending", "boolean")).unwrap_or(false)); }, "topk" => { call = call.topk(value.unwrap_or("")); @@ -14473,7 +16390,7 @@ where call = call.select(value.unwrap_or("")); }, "realtime" => { - call = call.realtime(arg_from_str(value.unwrap_or("false"), err, "realtime", "boolean")); + call = call.realtime( value.map(|v| arg_from_str(v, err, "realtime", "boolean")).unwrap_or(false)); }, "offset" => { call = call.offset(value.unwrap_or("")); @@ -14752,7 +16669,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "expand" => { - call = call.expand(arg_from_str(value.unwrap_or("false"), err, "expand", "boolean")); + call = call.expand( value.map(|v| arg_from_str(v, err, "expand", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14905,6 +16822,403 @@ where } } + async fn _organizations_security_profiles_environments_compute_environment_scores(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.end-time" => Some(("timeRange.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.start-time" => Some(("timeRange.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["end-time", "page-size", "page-token", "start-time", "time-range"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1ComputeEnvironmentScoresRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().security_profiles_environments_compute_environment_scores(request, opt.value_of("profile-environment").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_security_profiles_environments_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attach-time" => Some(("attachTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "security-profile-revision-id" => Some(("securityProfileRevisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attach-time", "name", "security-profile-revision-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudApigeeV1SecurityProfileEnvironmentAssociation = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().security_profiles_environments_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_security_profiles_environments_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().security_profiles_environments_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_security_profiles_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().security_profiles_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_security_profiles_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().security_profiles_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_security_profiles_list_revisions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().security_profiles_list_revisions(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_set_addons(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -14929,12 +17243,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "addons-config.advanced-api-ops-config.enabled" => Some(("addonsConfig.advancedApiOpsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.api-security-config.enabled" => Some(("addonsConfig.apiSecurityConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.api-security-config.expires-at" => Some(("addonsConfig.apiSecurityConfig.expiresAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "addons-config.connectors-platform-config.enabled" => Some(("addonsConfig.connectorsPlatformConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.connectors-platform-config.expires-at" => Some(("addonsConfig.connectorsPlatformConfig.expiresAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "addons-config.integration-config.enabled" => Some(("addonsConfig.integrationConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.monetization-config.enabled" => Some(("addonsConfig.monetizationConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-api-ops-config", "connectors-platform-config", "enabled", "expires-at", "integration-config", "monetization-config"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-api-ops-config", "api-security-config", "connectors-platform-config", "enabled", "expires-at", "integration-config", "monetization-config"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -15336,10 +17652,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-revisions" => { - call = call.include_revisions(arg_from_str(value.unwrap_or("false"), err, "include-revisions", "boolean")); + call = call.include_revisions( value.map(|v| arg_from_str(v, err, "include-revisions", "boolean")).unwrap_or(false)); }, "include-meta-data" => { - call = call.include_meta_data(arg_from_str(value.unwrap_or("false"), err, "include-meta-data", "boolean")); + call = call.include_meta_data( value.map(|v| arg_from_str(v, err, "include-meta-data", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15589,7 +17905,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate" => { - call = call.validate(arg_from_str(value.unwrap_or("false"), err, "validate", "boolean")); + call = call.validate( value.map(|v| arg_from_str(v, err, "validate", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15994,11 +18310,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "addons-config.advanced-api-ops-config.enabled" => Some(("addonsConfig.advancedApiOpsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.api-security-config.enabled" => Some(("addonsConfig.apiSecurityConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.api-security-config.expires-at" => Some(("addonsConfig.apiSecurityConfig.expiresAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "addons-config.connectors-platform-config.enabled" => Some(("addonsConfig.connectorsPlatformConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.connectors-platform-config.expires-at" => Some(("addonsConfig.connectorsPlatformConfig.expiresAt", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "addons-config.integration-config.enabled" => Some(("addonsConfig.integrationConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.monetization-config.enabled" => Some(("addonsConfig.monetizationConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "analytics-region" => Some(("analyticsRegion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "apigee-project-id" => Some(("apigeeProjectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "attributes" => Some(("attributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "authorized-network" => Some(("authorizedNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "billing-type" => Some(("billingType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -16019,7 +18338,7 @@ where "subscription-type" => Some(("subscriptionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-api-ops-config", "analytics-region", "attributes", "authorized-network", "billing-type", "ca-certificate", "connectors-platform-config", "created-at", "customer-name", "description", "display-name", "enabled", "environments", "expires-at", "integration-config", "last-modified-at", "monetization-config", "name", "portal-disabled", "project-id", "runtime-database-encryption-key-name", "runtime-type", "state", "subscription-type", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-api-ops-config", "analytics-region", "api-security-config", "apigee-project-id", "attributes", "authorized-network", "billing-type", "ca-certificate", "connectors-platform-config", "created-at", "customer-name", "description", "display-name", "enabled", "environments", "expires-at", "integration-config", "last-modified-at", "monetization-config", "name", "portal-disabled", "project-id", "runtime-database-encryption-key-name", "runtime-type", "state", "subscription-type", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -16265,6 +18584,18 @@ where ("apis-keyvaluemaps-delete", Some(opt)) => { call_result = self._organizations_apis_keyvaluemaps_delete(opt, dry_run, &mut err).await; }, + ("apis-keyvaluemaps-entries-create", Some(opt)) => { + call_result = self._organizations_apis_keyvaluemaps_entries_create(opt, dry_run, &mut err).await; + }, + ("apis-keyvaluemaps-entries-delete", Some(opt)) => { + call_result = self._organizations_apis_keyvaluemaps_entries_delete(opt, dry_run, &mut err).await; + }, + ("apis-keyvaluemaps-entries-get", Some(opt)) => { + call_result = self._organizations_apis_keyvaluemaps_entries_get(opt, dry_run, &mut err).await; + }, + ("apis-keyvaluemaps-entries-list", Some(opt)) => { + call_result = self._organizations_apis_keyvaluemaps_entries_list(opt, dry_run, &mut err).await; + }, ("apis-list", Some(opt)) => { call_result = self._organizations_apis_list(opt, dry_run, &mut err).await; }, @@ -16463,6 +18794,9 @@ where ("envgroups-get", Some(opt)) => { call_result = self._organizations_envgroups_get(opt, dry_run, &mut err).await; }, + ("envgroups-get-deployed-ingress-config", Some(opt)) => { + call_result = self._organizations_envgroups_get_deployed_ingress_config(opt, dry_run, &mut err).await; + }, ("envgroups-list", Some(opt)) => { call_result = self._organizations_envgroups_list(opt, dry_run, &mut err).await; }, @@ -16559,6 +18893,9 @@ where ("environments-get", Some(opt)) => { call_result = self._organizations_environments_get(opt, dry_run, &mut err).await; }, + ("environments-get-api-security-runtime-config", Some(opt)) => { + call_result = self._organizations_environments_get_api_security_runtime_config(opt, dry_run, &mut err).await; + }, ("environments-get-debugmask", Some(opt)) => { call_result = self._organizations_environments_get_debugmask(opt, dry_run, &mut err).await; }, @@ -16604,6 +18941,21 @@ where ("environments-keyvaluemaps-delete", Some(opt)) => { call_result = self._organizations_environments_keyvaluemaps_delete(opt, dry_run, &mut err).await; }, + ("environments-keyvaluemaps-entries-create", Some(opt)) => { + call_result = self._organizations_environments_keyvaluemaps_entries_create(opt, dry_run, &mut err).await; + }, + ("environments-keyvaluemaps-entries-delete", Some(opt)) => { + call_result = self._organizations_environments_keyvaluemaps_entries_delete(opt, dry_run, &mut err).await; + }, + ("environments-keyvaluemaps-entries-get", Some(opt)) => { + call_result = self._organizations_environments_keyvaluemaps_entries_get(opt, dry_run, &mut err).await; + }, + ("environments-keyvaluemaps-entries-list", Some(opt)) => { + call_result = self._organizations_environments_keyvaluemaps_entries_list(opt, dry_run, &mut err).await; + }, + ("environments-modify-environment", Some(opt)) => { + call_result = self._organizations_environments_modify_environment(opt, dry_run, &mut err).await; + }, ("environments-optimized-stats-get", Some(opt)) => { call_result = self._organizations_environments_optimized_stats_get(opt, dry_run, &mut err).await; }, @@ -16616,6 +18968,9 @@ where ("environments-queries-get-result", Some(opt)) => { call_result = self._organizations_environments_queries_get_result(opt, dry_run, &mut err).await; }, + ("environments-queries-get-resulturl", Some(opt)) => { + call_result = self._organizations_environments_queries_get_resulturl(opt, dry_run, &mut err).await; + }, ("environments-queries-list", Some(opt)) => { call_result = self._organizations_environments_queries_list(opt, dry_run, &mut err).await; }, @@ -16649,6 +19004,27 @@ where ("environments-resourcefiles-update", Some(opt)) => { call_result = self._organizations_environments_resourcefiles_update(opt, dry_run, &mut err).await; }, + ("environments-security-reports-create", Some(opt)) => { + call_result = self._organizations_environments_security_reports_create(opt, dry_run, &mut err).await; + }, + ("environments-security-reports-get", Some(opt)) => { + call_result = self._organizations_environments_security_reports_get(opt, dry_run, &mut err).await; + }, + ("environments-security-reports-get-result", Some(opt)) => { + call_result = self._organizations_environments_security_reports_get_result(opt, dry_run, &mut err).await; + }, + ("environments-security-reports-get-result-view", Some(opt)) => { + call_result = self._organizations_environments_security_reports_get_result_view(opt, dry_run, &mut err).await; + }, + ("environments-security-reports-list", Some(opt)) => { + call_result = self._organizations_environments_security_reports_list(opt, dry_run, &mut err).await; + }, + ("environments-security-stats-query-tabular-stats", Some(opt)) => { + call_result = self._organizations_environments_security_stats_query_tabular_stats(opt, dry_run, &mut err).await; + }, + ("environments-security-stats-query-time-series-stats", Some(opt)) => { + call_result = self._organizations_environments_security_stats_query_time_series_stats(opt, dry_run, &mut err).await; + }, ("environments-set-iam-policy", Some(opt)) => { call_result = self._organizations_environments_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -16721,6 +19097,9 @@ where ("get-deployed-ingress-config", Some(opt)) => { call_result = self._organizations_get_deployed_ingress_config(opt, dry_run, &mut err).await; }, + ("get-project-mapping", Some(opt)) => { + call_result = self._organizations_get_project_mapping(opt, dry_run, &mut err).await; + }, ("get-runtime-config", Some(opt)) => { call_result = self._organizations_get_runtime_config(opt, dry_run, &mut err).await; }, @@ -16742,6 +19121,21 @@ where ("host-queries-list", Some(opt)) => { call_result = self._organizations_host_queries_list(opt, dry_run, &mut err).await; }, + ("host-security-reports-create", Some(opt)) => { + call_result = self._organizations_host_security_reports_create(opt, dry_run, &mut err).await; + }, + ("host-security-reports-get", Some(opt)) => { + call_result = self._organizations_host_security_reports_get(opt, dry_run, &mut err).await; + }, + ("host-security-reports-get-result", Some(opt)) => { + call_result = self._organizations_host_security_reports_get_result(opt, dry_run, &mut err).await; + }, + ("host-security-reports-get-result-view", Some(opt)) => { + call_result = self._organizations_host_security_reports_get_result_view(opt, dry_run, &mut err).await; + }, + ("host-security-reports-list", Some(opt)) => { + call_result = self._organizations_host_security_reports_list(opt, dry_run, &mut err).await; + }, ("host-stats-get", Some(opt)) => { call_result = self._organizations_host_stats_get(opt, dry_run, &mut err).await; }, @@ -16802,6 +19196,18 @@ where ("keyvaluemaps-delete", Some(opt)) => { call_result = self._organizations_keyvaluemaps_delete(opt, dry_run, &mut err).await; }, + ("keyvaluemaps-entries-create", Some(opt)) => { + call_result = self._organizations_keyvaluemaps_entries_create(opt, dry_run, &mut err).await; + }, + ("keyvaluemaps-entries-delete", Some(opt)) => { + call_result = self._organizations_keyvaluemaps_entries_delete(opt, dry_run, &mut err).await; + }, + ("keyvaluemaps-entries-get", Some(opt)) => { + call_result = self._organizations_keyvaluemaps_entries_get(opt, dry_run, &mut err).await; + }, + ("keyvaluemaps-entries-list", Some(opt)) => { + call_result = self._organizations_keyvaluemaps_entries_list(opt, dry_run, &mut err).await; + }, ("list", Some(opt)) => { call_result = self._organizations_list(opt, dry_run, &mut err).await; }, @@ -16829,6 +19235,24 @@ where ("reports-update", Some(opt)) => { call_result = self._organizations_reports_update(opt, dry_run, &mut err).await; }, + ("security-profiles-environments-compute-environment-scores", Some(opt)) => { + call_result = self._organizations_security_profiles_environments_compute_environment_scores(opt, dry_run, &mut err).await; + }, + ("security-profiles-environments-create", Some(opt)) => { + call_result = self._organizations_security_profiles_environments_create(opt, dry_run, &mut err).await; + }, + ("security-profiles-environments-delete", Some(opt)) => { + call_result = self._organizations_security_profiles_environments_delete(opt, dry_run, &mut err).await; + }, + ("security-profiles-get", Some(opt)) => { + call_result = self._organizations_security_profiles_get(opt, dry_run, &mut err).await; + }, + ("security-profiles-list", Some(opt)) => { + call_result = self._organizations_security_profiles_list(opt, dry_run, &mut err).await; + }, + ("security-profiles-list-revisions", Some(opt)) => { + call_result = self._organizations_security_profiles_list_revisions(opt, dry_run, &mut err).await; + }, ("set-addons", Some(opt)) => { call_result = self._organizations_set_addons(opt, dry_run, &mut err).await; }, @@ -16995,7 +19419,7 @@ async fn main() { ]), ]), - ("organizations", "methods: 'analytics-datastores-create', 'analytics-datastores-delete', 'analytics-datastores-get', 'analytics-datastores-list', 'analytics-datastores-test', 'analytics-datastores-update', 'apiproducts-attributes', 'apiproducts-attributes-delete', 'apiproducts-attributes-get', 'apiproducts-attributes-list', 'apiproducts-attributes-update-api-product-attribute', 'apiproducts-create', 'apiproducts-delete', 'apiproducts-get', 'apiproducts-list', 'apiproducts-rateplans-create', 'apiproducts-rateplans-delete', 'apiproducts-rateplans-get', 'apiproducts-rateplans-list', 'apiproducts-rateplans-update', 'apiproducts-update', 'apis-create', 'apis-delete', 'apis-deployments-list', 'apis-get', 'apis-keyvaluemaps-create', 'apis-keyvaluemaps-delete', 'apis-list', 'apis-patch', 'apis-revisions-delete', 'apis-revisions-deployments-list', 'apis-revisions-get', 'apis-revisions-update-api-proxy-revision', 'apps-get', 'apps-list', 'create', 'datacollectors-create', 'datacollectors-delete', 'datacollectors-get', 'datacollectors-list', 'datacollectors-patch', 'delete', 'deployments-list', 'developers-apps-attributes', 'developers-apps-attributes-delete', 'developers-apps-attributes-get', 'developers-apps-attributes-list', 'developers-apps-attributes-update-developer-app-attribute', 'developers-apps-create', 'developers-apps-delete', 'developers-apps-generate-key-pair-or-update-developer-app-status', 'developers-apps-get', 'developers-apps-keys-apiproducts-delete', 'developers-apps-keys-apiproducts-update-developer-app-key-api-product', 'developers-apps-keys-create', 'developers-apps-keys-create-create', 'developers-apps-keys-delete', 'developers-apps-keys-get', 'developers-apps-keys-replace-developer-app-key', 'developers-apps-keys-update-developer-app-key', 'developers-apps-list', 'developers-apps-update', 'developers-attributes', 'developers-attributes-delete', 'developers-attributes-get', 'developers-attributes-list', 'developers-attributes-update-developer-attribute', 'developers-balance-adjust', 'developers-balance-credit', 'developers-create', 'developers-delete', 'developers-get', 'developers-get-balance', 'developers-get-monetization-config', 'developers-list', 'developers-set-developer-status', 'developers-subscriptions-create', 'developers-subscriptions-expire', 'developers-subscriptions-get', 'developers-subscriptions-list', 'developers-update', 'developers-update-monetization-config', 'endpoint-attachments-create', 'endpoint-attachments-delete', 'endpoint-attachments-get', 'endpoint-attachments-list', 'envgroups-attachments-create', 'envgroups-attachments-delete', 'envgroups-attachments-get', 'envgroups-attachments-list', 'envgroups-create', 'envgroups-delete', 'envgroups-get', 'envgroups-list', 'envgroups-patch', 'environments-analytics-admin-get-schemav2', 'environments-analytics-exports-create', 'environments-analytics-exports-get', 'environments-analytics-exports-list', 'environments-apis-deployments-list', 'environments-apis-revisions-debugsessions-create', 'environments-apis-revisions-debugsessions-data-get', 'environments-apis-revisions-debugsessions-delete-data', 'environments-apis-revisions-debugsessions-get', 'environments-apis-revisions-debugsessions-list', 'environments-apis-revisions-deploy', 'environments-apis-revisions-deployments-generate-deploy-change-report', 'environments-apis-revisions-deployments-generate-undeploy-change-report', 'environments-apis-revisions-get-deployments', 'environments-apis-revisions-undeploy', 'environments-archive-deployments-create', 'environments-archive-deployments-delete', 'environments-archive-deployments-generate-download-url', 'environments-archive-deployments-generate-upload-url', 'environments-archive-deployments-get', 'environments-archive-deployments-list', 'environments-archive-deployments-patch', 'environments-caches-delete', 'environments-create', 'environments-delete', 'environments-deployments-list', 'environments-flowhooks-attach-shared-flow-to-flow-hook', 'environments-flowhooks-detach-shared-flow-from-flow-hook', 'environments-flowhooks-get', 'environments-get', 'environments-get-debugmask', 'environments-get-deployed-config', 'environments-get-iam-policy', 'environments-get-trace-config', 'environments-keystores-aliases-create', 'environments-keystores-aliases-csr', 'environments-keystores-aliases-delete', 'environments-keystores-aliases-get', 'environments-keystores-aliases-get-certificate', 'environments-keystores-aliases-update', 'environments-keystores-create', 'environments-keystores-delete', 'environments-keystores-get', 'environments-keyvaluemaps-create', 'environments-keyvaluemaps-delete', 'environments-optimized-stats-get', 'environments-queries-create', 'environments-queries-get', 'environments-queries-get-result', 'environments-queries-list', 'environments-references-create', 'environments-references-delete', 'environments-references-get', 'environments-references-update', 'environments-resourcefiles-create', 'environments-resourcefiles-delete', 'environments-resourcefiles-get', 'environments-resourcefiles-list', 'environments-resourcefiles-list-environment-resources', 'environments-resourcefiles-update', 'environments-set-iam-policy', 'environments-sharedflows-deployments-list', 'environments-sharedflows-revisions-deploy', 'environments-sharedflows-revisions-get-deployments', 'environments-sharedflows-revisions-undeploy', 'environments-stats-get', 'environments-subscribe', 'environments-targetservers-create', 'environments-targetservers-delete', 'environments-targetservers-get', 'environments-targetservers-update', 'environments-test-iam-permissions', 'environments-trace-config-overrides-create', 'environments-trace-config-overrides-delete', 'environments-trace-config-overrides-get', 'environments-trace-config-overrides-list', 'environments-trace-config-overrides-patch', 'environments-unsubscribe', 'environments-update', 'environments-update-debugmask', 'environments-update-environment', 'environments-update-trace-config', 'get', 'get-deployed-ingress-config', 'get-runtime-config', 'get-sync-authorization', 'host-queries-create', 'host-queries-get', 'host-queries-get-result', 'host-queries-get-result-view', 'host-queries-list', 'host-stats-get', 'instances-attachments-create', 'instances-attachments-delete', 'instances-attachments-get', 'instances-attachments-list', 'instances-canaryevaluations-create', 'instances-canaryevaluations-get', 'instances-create', 'instances-delete', 'instances-get', 'instances-list', 'instances-nat-addresses-activate', 'instances-nat-addresses-create', 'instances-nat-addresses-delete', 'instances-nat-addresses-get', 'instances-nat-addresses-list', 'instances-patch', 'instances-report-status', 'keyvaluemaps-create', 'keyvaluemaps-delete', 'list', 'operations-get', 'operations-list', 'optimized-host-stats-get', 'reports-create', 'reports-delete', 'reports-get', 'reports-list', 'reports-update', 'set-addons', 'set-sync-authorization', 'sharedflows-create', 'sharedflows-delete', 'sharedflows-deployments-list', 'sharedflows-get', 'sharedflows-list', 'sharedflows-revisions-delete', 'sharedflows-revisions-deployments-list', 'sharedflows-revisions-get', 'sharedflows-revisions-update-shared-flow-revision', 'sites-apicategories-create', 'sites-apicategories-delete', 'sites-apicategories-get', 'sites-apicategories-list', 'sites-apicategories-patch' and 'update'", vec![ + ("organizations", "methods: 'analytics-datastores-create', 'analytics-datastores-delete', 'analytics-datastores-get', 'analytics-datastores-list', 'analytics-datastores-test', 'analytics-datastores-update', 'apiproducts-attributes', 'apiproducts-attributes-delete', 'apiproducts-attributes-get', 'apiproducts-attributes-list', 'apiproducts-attributes-update-api-product-attribute', 'apiproducts-create', 'apiproducts-delete', 'apiproducts-get', 'apiproducts-list', 'apiproducts-rateplans-create', 'apiproducts-rateplans-delete', 'apiproducts-rateplans-get', 'apiproducts-rateplans-list', 'apiproducts-rateplans-update', 'apiproducts-update', 'apis-create', 'apis-delete', 'apis-deployments-list', 'apis-get', 'apis-keyvaluemaps-create', 'apis-keyvaluemaps-delete', 'apis-keyvaluemaps-entries-create', 'apis-keyvaluemaps-entries-delete', 'apis-keyvaluemaps-entries-get', 'apis-keyvaluemaps-entries-list', 'apis-list', 'apis-patch', 'apis-revisions-delete', 'apis-revisions-deployments-list', 'apis-revisions-get', 'apis-revisions-update-api-proxy-revision', 'apps-get', 'apps-list', 'create', 'datacollectors-create', 'datacollectors-delete', 'datacollectors-get', 'datacollectors-list', 'datacollectors-patch', 'delete', 'deployments-list', 'developers-apps-attributes', 'developers-apps-attributes-delete', 'developers-apps-attributes-get', 'developers-apps-attributes-list', 'developers-apps-attributes-update-developer-app-attribute', 'developers-apps-create', 'developers-apps-delete', 'developers-apps-generate-key-pair-or-update-developer-app-status', 'developers-apps-get', 'developers-apps-keys-apiproducts-delete', 'developers-apps-keys-apiproducts-update-developer-app-key-api-product', 'developers-apps-keys-create', 'developers-apps-keys-create-create', 'developers-apps-keys-delete', 'developers-apps-keys-get', 'developers-apps-keys-replace-developer-app-key', 'developers-apps-keys-update-developer-app-key', 'developers-apps-list', 'developers-apps-update', 'developers-attributes', 'developers-attributes-delete', 'developers-attributes-get', 'developers-attributes-list', 'developers-attributes-update-developer-attribute', 'developers-balance-adjust', 'developers-balance-credit', 'developers-create', 'developers-delete', 'developers-get', 'developers-get-balance', 'developers-get-monetization-config', 'developers-list', 'developers-set-developer-status', 'developers-subscriptions-create', 'developers-subscriptions-expire', 'developers-subscriptions-get', 'developers-subscriptions-list', 'developers-update', 'developers-update-monetization-config', 'endpoint-attachments-create', 'endpoint-attachments-delete', 'endpoint-attachments-get', 'endpoint-attachments-list', 'envgroups-attachments-create', 'envgroups-attachments-delete', 'envgroups-attachments-get', 'envgroups-attachments-list', 'envgroups-create', 'envgroups-delete', 'envgroups-get', 'envgroups-get-deployed-ingress-config', 'envgroups-list', 'envgroups-patch', 'environments-analytics-admin-get-schemav2', 'environments-analytics-exports-create', 'environments-analytics-exports-get', 'environments-analytics-exports-list', 'environments-apis-deployments-list', 'environments-apis-revisions-debugsessions-create', 'environments-apis-revisions-debugsessions-data-get', 'environments-apis-revisions-debugsessions-delete-data', 'environments-apis-revisions-debugsessions-get', 'environments-apis-revisions-debugsessions-list', 'environments-apis-revisions-deploy', 'environments-apis-revisions-deployments-generate-deploy-change-report', 'environments-apis-revisions-deployments-generate-undeploy-change-report', 'environments-apis-revisions-get-deployments', 'environments-apis-revisions-undeploy', 'environments-archive-deployments-create', 'environments-archive-deployments-delete', 'environments-archive-deployments-generate-download-url', 'environments-archive-deployments-generate-upload-url', 'environments-archive-deployments-get', 'environments-archive-deployments-list', 'environments-archive-deployments-patch', 'environments-caches-delete', 'environments-create', 'environments-delete', 'environments-deployments-list', 'environments-flowhooks-attach-shared-flow-to-flow-hook', 'environments-flowhooks-detach-shared-flow-from-flow-hook', 'environments-flowhooks-get', 'environments-get', 'environments-get-api-security-runtime-config', 'environments-get-debugmask', 'environments-get-deployed-config', 'environments-get-iam-policy', 'environments-get-trace-config', 'environments-keystores-aliases-create', 'environments-keystores-aliases-csr', 'environments-keystores-aliases-delete', 'environments-keystores-aliases-get', 'environments-keystores-aliases-get-certificate', 'environments-keystores-aliases-update', 'environments-keystores-create', 'environments-keystores-delete', 'environments-keystores-get', 'environments-keyvaluemaps-create', 'environments-keyvaluemaps-delete', 'environments-keyvaluemaps-entries-create', 'environments-keyvaluemaps-entries-delete', 'environments-keyvaluemaps-entries-get', 'environments-keyvaluemaps-entries-list', 'environments-modify-environment', 'environments-optimized-stats-get', 'environments-queries-create', 'environments-queries-get', 'environments-queries-get-result', 'environments-queries-get-resulturl', 'environments-queries-list', 'environments-references-create', 'environments-references-delete', 'environments-references-get', 'environments-references-update', 'environments-resourcefiles-create', 'environments-resourcefiles-delete', 'environments-resourcefiles-get', 'environments-resourcefiles-list', 'environments-resourcefiles-list-environment-resources', 'environments-resourcefiles-update', 'environments-security-reports-create', 'environments-security-reports-get', 'environments-security-reports-get-result', 'environments-security-reports-get-result-view', 'environments-security-reports-list', 'environments-security-stats-query-tabular-stats', 'environments-security-stats-query-time-series-stats', 'environments-set-iam-policy', 'environments-sharedflows-deployments-list', 'environments-sharedflows-revisions-deploy', 'environments-sharedflows-revisions-get-deployments', 'environments-sharedflows-revisions-undeploy', 'environments-stats-get', 'environments-subscribe', 'environments-targetservers-create', 'environments-targetservers-delete', 'environments-targetservers-get', 'environments-targetservers-update', 'environments-test-iam-permissions', 'environments-trace-config-overrides-create', 'environments-trace-config-overrides-delete', 'environments-trace-config-overrides-get', 'environments-trace-config-overrides-list', 'environments-trace-config-overrides-patch', 'environments-unsubscribe', 'environments-update', 'environments-update-debugmask', 'environments-update-environment', 'environments-update-trace-config', 'get', 'get-deployed-ingress-config', 'get-project-mapping', 'get-runtime-config', 'get-sync-authorization', 'host-queries-create', 'host-queries-get', 'host-queries-get-result', 'host-queries-get-result-view', 'host-queries-list', 'host-security-reports-create', 'host-security-reports-get', 'host-security-reports-get-result', 'host-security-reports-get-result-view', 'host-security-reports-list', 'host-stats-get', 'instances-attachments-create', 'instances-attachments-delete', 'instances-attachments-get', 'instances-attachments-list', 'instances-canaryevaluations-create', 'instances-canaryevaluations-get', 'instances-create', 'instances-delete', 'instances-get', 'instances-list', 'instances-nat-addresses-activate', 'instances-nat-addresses-create', 'instances-nat-addresses-delete', 'instances-nat-addresses-get', 'instances-nat-addresses-list', 'instances-patch', 'instances-report-status', 'keyvaluemaps-create', 'keyvaluemaps-delete', 'keyvaluemaps-entries-create', 'keyvaluemaps-entries-delete', 'keyvaluemaps-entries-get', 'keyvaluemaps-entries-list', 'list', 'operations-get', 'operations-list', 'optimized-host-stats-get', 'reports-create', 'reports-delete', 'reports-get', 'reports-list', 'reports-update', 'security-profiles-environments-compute-environment-scores', 'security-profiles-environments-create', 'security-profiles-environments-delete', 'security-profiles-get', 'security-profiles-list', 'security-profiles-list-revisions', 'set-addons', 'set-sync-authorization', 'sharedflows-create', 'sharedflows-delete', 'sharedflows-deployments-list', 'sharedflows-get', 'sharedflows-list', 'sharedflows-revisions-delete', 'sharedflows-revisions-deployments-list', 'sharedflows-revisions-get', 'sharedflows-revisions-update-shared-flow-revision', 'sites-apicategories-create', 'sites-apicategories-delete', 'sites-apicategories-get', 'sites-apicategories-list', 'sites-apicategories-patch' and 'update'", vec![ ("analytics-datastores-create", Some(r##"Create a Datastore for an org"##), "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_analytics-datastores-create", @@ -17650,6 +20074,100 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apis-keyvaluemaps-entries-create", + Some(r##"Creates key value entries in a key value map scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_apis-keyvaluemaps-entries-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to create the key value map entry. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apis-keyvaluemaps-entries-delete", + Some(r##"Deletes a key value entry from a key value map scoped to an organization, environment, or API proxy. **Notes:** * After you delete the key value entry, the policy consuming the entry will continue to function with its cached values for a few minutes. This is expected behavior. * Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_apis-keyvaluemaps-entries-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to delete the key value map entry. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}/entries/{entry}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}/entries/{entry}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}/entries/{entry}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apis-keyvaluemaps-entries-get", + Some(r##"Get the key value entry value for a key value map scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_apis-keyvaluemaps-entries-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to fetch the key value map entry/value. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}/entries/{entry}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}/entries/{entry}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}/entries/{entry}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apis-keyvaluemaps-entries-list", + Some(r##"Lists key value entries for key values maps scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_apis-keyvaluemaps-entries-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to list key value maps. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -17989,7 +20507,7 @@ async fn main() { Some(false)), ]), ("delete", - Some(r##"Delete an Apigee organization. Only supported for SubscriptionType TRIAL."##), + Some(r##"Delete an Apigee organization. For organizations with BillingType EVALUATION, an immediate deletion is performed. For paid organizations, a soft-deletion is performed. The organization can be restored within the soft-deletion period which can be controlled using the retention field in the request."##), "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_delete", vec![ (Some(r##"name"##), @@ -18205,7 +20723,7 @@ async fn main() { Some(false)), ]), ("developers-apps-generate-key-pair-or-update-developer-app-status", - Some(r##"Manages access to a developer app by enabling you to: * Approve or revoke a developer app * Generate a new consumer key and secret for a developer app To approve or revoke a developer app, set the `action` query parameter to `approved` or `revoked`, respectively, and the `Content-Type` header to `application/octet-stream`. If a developer app is revoked, none of its API keys are valid for API calls even though the keys are still `approved`. If successful, the API call returns the following HTTP status code: `204 No Content` To generate a new consumer key and secret for a developer app, pass the new key/secret details. Rather than replace an existing key, this API generates a new key. In this case, multiple key pairs may be associated with a single developer app. Each key pair has an independent status (`approved` or `revoked`) and expiration time. Any approved, non-expired key can be used in an API call. For example, if you're using API key rotation, you can generate new keys with expiration times that overlap keys that are going to expire. You might also generate a new consumer key/secret if the security of the original key/secret is compromised. The `keyExpiresIn` property defines the expiration time for the API key in milliseconds. If you don't set this property or set it to `-1`, the API key never expires. **Notes**: * When generating a new key/secret, this API replaces the existing attributes, notes, and callback URLs with those specified in the request. Include or exclude any existing information that you want to retain or delete, respectively. * To migrate existing consumer keys and secrets to hybrid from another system, see the CreateDeveloperAppKey API."##), + Some(r##"Manages access to a developer app by enabling you to: * Approve or revoke a developer app * Generate a new consumer key and secret for a developer app To approve or revoke a developer app, set the `action` query parameter to `approve` or `revoke`, respectively, and the `Content-Type` header to `application/octet-stream`. If a developer app is revoked, none of its API keys are valid for API calls even though the keys are still approved. If successful, the API call returns the following HTTP status code: `204 No Content` To generate a new consumer key and secret for a developer app, pass the new key/secret details. Rather than replace an existing key, this API generates a new key. In this case, multiple key pairs may be associated with a single developer app. Each key pair has an independent status (`approve` or `revoke`) and expiration time. Any approved, non-expired key can be used in an API call. For example, if you're using API key rotation, you can generate new keys with expiration times that overlap keys that are going to expire. You might also generate a new consumer key/secret if the security of the original key/secret is compromised. The `keyExpiresIn` property defines the expiration time for the API key in milliseconds. If you don't set this property or set it to `-1`, the API key never expires. **Notes**: * When generating a new key/secret, this API replaces the existing attributes, notes, and callback URLs with those specified in the request. Include or exclude any existing information that you want to retain or delete, respectively. * To migrate existing consumer keys and secrets to hybrid from another system, see the CreateDeveloperAppKey API."##), "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_developers-apps-generate-key-pair-or-update-developer-app-status", vec![ (Some(r##"name"##), @@ -19252,6 +21770,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("envgroups-get-deployed-ingress-config", + Some(r##"Gets the deployed ingress configuration for an environment group."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_envgroups-get-deployed-ingress-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the deployed configuration for the environment group in the following format: 'organizations/{org}/envgroups/{envgroup}/deployedIngressConfig'."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -19879,7 +22419,7 @@ async fn main() { Some(false)), ]), ("environments-delete", - Some(r##"Deletes an environment from an organization."##), + Some(r##"Deletes an environment from an organization. **Warning: You must delete all key value maps and key value entries before you delete an environment.** Otherwise, if you re-create the environment the key value map entry operations will encounter encryption/decryption discrepancies."##), "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-delete", vec![ (Some(r##"name"##), @@ -20010,6 +22550,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-get-api-security-runtime-config", + Some(r##"Gets the API Security runtime configuration for an environment. This named ApiSecurityRuntimeConfig to prevent conflicts with ApiSecurityConfig from addon config."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-get-api-security-runtime-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the environment API Security Runtime configuration resource. Use the following structure in your request: `organizations/{org}/environments/{env}/apiSecurityRuntimeConfig`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -20066,7 +22628,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -20364,6 +22926,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-keyvaluemaps-entries-create", + Some(r##"Creates key value entries in a key value map scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-keyvaluemaps-entries-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to create the key value map entry. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-keyvaluemaps-entries-delete", + Some(r##"Deletes a key value entry from a key value map scoped to an organization, environment, or API proxy. **Notes:** * After you delete the key value entry, the policy consuming the entry will continue to function with its cached values for a few minutes. This is expected behavior. * Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-keyvaluemaps-entries-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to delete the key value map entry. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}/entries/{entry}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}/entries/{entry}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}/entries/{entry}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-keyvaluemaps-entries-get", + Some(r##"Get the key value entry value for a key value map scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-keyvaluemaps-entries-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to fetch the key value map entry/value. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}/entries/{entry}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}/entries/{entry}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}/entries/{entry}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-keyvaluemaps-entries-list", + Some(r##"Lists key value entries for key values maps scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-keyvaluemaps-entries-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to list key value maps. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-modify-environment", + Some(r##"Updates properties for an Apigee environment with patch semantics using a field mask. **Note:** Not supported for Apigee hybrid."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-modify-environment", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the environment. Use the following structure in your request: `organizations/{org}/environments/{environment}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -20458,6 +23142,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-queries-get-resulturl", + Some(r##"After the query is completed, use this API to retrieve the results. If the request succeeds, and there is a non-zero result set, the result is sent to the client as a list of urls to JSON files."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-queries-get-resulturl", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the asynchronous query result to get. Must be of the form `organizations/{org}/environments/{env}/queries/{queryId}/resulturl`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -20766,6 +23472,178 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-security-reports-create", + Some(r##"Submit a report request to be processed in the background. If the submission succeeds, the API returns a 200 status and an ID that refer to the report request. In addition to the HTTP status 200, the `state` of "enqueued" means that the request succeeded."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-security-reports-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. Must be of the form `organizations/{org}/environments/{env}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-security-reports-get", + Some(r##"Get security report status If the query is still in progress, the `state` is set to "running" After the query has completed successfully, `state` is set to "completed""##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-security-reports-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the security report to get. Must be of the form `organizations/{org}/environments/{env}/securityReports/{reportId}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-security-reports-get-result", + Some(r##"After the query is completed, use this API to retrieve the results as file. If the request succeeds, and there is a non-zero result set, the result is downloaded to the client as a zipped JSON file. The name of the downloaded file will be: OfflineQueryResult-.zip Example: `OfflineQueryResult-9cfc0d85-0f30-46d6-ae6f-318d0cb961bd.zip`"##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-security-reports-get-result", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the security report result to get. Must be of the form `organizations/{org}/environments/{env}/securityReports/{reportId}/result`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-security-reports-get-result-view", + Some(r##"After the query is completed, use this API to view the query result when result size is small."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-security-reports-get-result-view", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the security report result view to get. Must be of the form `organizations/{org}/environments/{env}/securityReports/{reportId}/resultView`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-security-reports-list", + Some(r##"Return a list of Security Reports"##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-security-reports-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. Must be of the form `organizations/{org}/environments/{env}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-security-stats-query-tabular-stats", + Some(r##"Retrieve security statistics as tabular rows."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-security-stats-query-tabular-stats", + vec![ + (Some(r##"orgenv"##), + None, + Some(r##"Required. Should be of the form organizations//environments/."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("environments-security-stats-query-time-series-stats", + Some(r##"Retrieve security statistics as a collection of time series."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_environments-security-stats-query-time-series-stats", + vec![ + (Some(r##"orgenv"##), + None, + Some(r##"Required. Should be of the form organizations//environments/."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -20778,7 +23656,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -21038,7 +23916,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -21360,6 +24238,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-project-mapping", + Some(r##"Gets the project ID and region for an Apigee organization."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_get-project-mapping", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Apigee organization name in the following format: `organizations/{org}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -21526,6 +24426,122 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("host-security-reports-create", + Some(r##"Submit a query at host level to be processed in the background. If the submission of the query succeeds, the API returns a 201 status and an ID that refer to the query. In addition to the HTTP status 201, the `state` of "enqueued" means that the request succeeded."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_host-security-reports-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. Must be of the form `organizations/{org}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("host-security-reports-get", + Some(r##"Get status of a query submitted at host level. If the query is still in progress, the `state` is set to "running" After the query has completed successfully, `state` is set to "completed""##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_host-security-reports-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the security report to get. Must be of the form `organizations/{org}/securityReports/{reportId}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("host-security-reports-get-result", + Some(r##"After the query is completed, use this API to retrieve the results. If the request succeeds, and there is a non-zero result set, the result is downloaded to the client as a zipped JSON file. The name of the downloaded file will be: OfflineQueryResult-.zip Example: `OfflineQueryResult-9cfc0d85-0f30-46d6-ae6f-318d0cb961bd.zip`"##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_host-security-reports-get-result", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the security report result to get. Must be of the form `organizations/{org}/securityReports/{reportId}/result`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("host-security-reports-get-result-view", + Some(r##"After the query is completed, use this API to view the query result when result size is small."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_host-security-reports-get-result-view", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the security report result view to get. Must be of the form `organizations/{org}/securityReports/{reportId}/resultView`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("host-security-reports-list", + Some(r##"Return a list of Security Reports at host level."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_host-security-reports-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. Must be of the form `organizations/{org}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -22014,6 +25030,100 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("keyvaluemaps-entries-create", + Some(r##"Creates key value entries in a key value map scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_keyvaluemaps-entries-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to create the key value map entry. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("keyvaluemaps-entries-delete", + Some(r##"Deletes a key value entry from a key value map scoped to an organization, environment, or API proxy. **Notes:** * After you delete the key value entry, the policy consuming the entry will continue to function with its cached values for a few minutes. This is expected behavior. * Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_keyvaluemaps-entries-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to delete the key value map entry. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}/entries/{entry}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}/entries/{entry}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}/entries/{entry}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("keyvaluemaps-entries-get", + Some(r##"Get the key value entry value for a key value map scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_keyvaluemaps-entries-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to fetch the key value map entry/value. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}/entries/{entry}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}/entries/{entry}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}/entries/{entry}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("keyvaluemaps-entries-list", + Some(r##"Lists key value entries for key values maps scoped to an organization, environment, or API proxy. **Note**: Supported for Apigee hybrid 1.8.x and higher."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_keyvaluemaps-entries-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Scope as indicated by the URI in which to list key value maps. Use **one** of the following structures in your request: * `organizations/{organization}/apis/{api}/keyvaluemaps/{keyvaluemap}`. * `organizations/{organization}/environments/{environment}/keyvaluemaps/{keyvaluemap}` * `organizations/{organization}/keyvaluemaps/{keyvaluemap}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -22021,7 +25131,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"Lists the Apigee organizations and associated GCP projects that you have permission to access. See [Understanding organizations](https://cloud.google.com/apigee/docs/api-platform/fundamentals/organization-structure)."##), + Some(r##"Lists the Apigee organizations and associated Google Cloud projects that you have permission to access. See [Understanding organizations](https://cloud.google.com/apigee/docs/api-platform/fundamentals/organization-structure)."##), "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_list", vec![ (Some(r##"parent"##), @@ -22224,6 +25334,150 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("security-profiles-environments-compute-environment-scores", + Some(r##"ComputeEnvironmentScores calculates scores for requested time range for the specified security profile and environment."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_security-profiles-environments-compute-environment-scores", + vec![ + (Some(r##"profile-environment"##), + None, + Some(r##"Required. Name of organization and environment and profile id for which score needs to be computed. Format: organizations/{org}/securityProfiles/{profile}/environments/{env}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("security-profiles-environments-create", + Some(r##"CreateSecurityProfileEnvironmentAssociation creates profile environment association i.e. attaches environment to security profile."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_security-profiles-environments-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of organization and security profile ID. Format: organizations/{org}/securityProfiles/{profile}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("security-profiles-environments-delete", + Some(r##"DeleteSecurityProfileEnvironmentAssociation removes profile environment association i.e. detaches environment from security profile."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_security-profiles-environments-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the environment attachment to delete. Format: organizations/{org}/securityProfiles/{profile}/environments/{env}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("security-profiles-get", + Some(r##"GetSecurityProfile gets the specified security profile. Returns NOT_FOUND if security profile is not present for the specified organization."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_security-profiles-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Security profile in the following format: `organizations/{org}/securityProfiles/{profile}'. Profile may optionally contain revision ID. If revision ID is not provided, the response will contain latest revision by default. Example: organizations/testOrg/securityProfiles/testProfile@5"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("security-profiles-list", + Some(r##"ListSecurityProfiles lists all the security profiles associated with the org including attached and unattached profiles."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_security-profiles-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. For a specific organization, list of all the security profiles. Format: `organizations/{org}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("security-profiles-list-revisions", + Some(r##"ListSecurityProfileRevisions lists all the revisions of the security profile."##), + "Details at http://byron.github.io/google-apis-rs/google_apigee1_cli/organizations_security-profiles-list-revisions", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. For a specific profile, list all the revisions. Format: `organizations/{org}/securityProfiles/{profile}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -22683,7 +25937,7 @@ async fn main() { let mut app = App::new("apigee1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230104") .about("Use the Apigee API to programmatically develop and manage APIs with a set of RESTful operations. Develop and secure API proxies, deploy and undeploy API proxy revisions, monitor APIs, configure environments, manage users, and more. Note: This product is available as a free trial for a time period of 60 days.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_apigee1_cli") .arg(Arg::with_name("url") diff --git a/gen/apigee1/Cargo.toml b/gen/apigee1/Cargo.toml index 8bf8d0cd2f..1718cbe39e 100644 --- a/gen/apigee1/Cargo.toml +++ b/gen/apigee1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-apigee1" -version = "5.0.2-beta-1+20230104" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Apigee (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/apigee1" homepage = "https://cloud.google.com/apigee-api-management/" -documentation = "https://docs.rs/google-apigee1/5.0.2-beta-1+20230104" +documentation = "https://docs.rs/google-apigee1/5.0.2+20230104" license = "MIT" keywords = ["apigee", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/apigee1/README.md b/gen/apigee1/README.md index f7a13602d2..b1e92a2a77 100644 --- a/gen/apigee1/README.md +++ b/gen/apigee1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-apigee1` library allows access to all features of the *Google Apigee* service. -This documentation was generated from *Apigee* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *apigee:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Apigee* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *apigee:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Apigee* *v1* API can be found at the [official documentation site](https://cloud.google.com/apigee-api-management/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/Apigee) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/Apigee) ... * hybrid - * [*issuers list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::HybridIssuerListCall) + * [*issuers list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::HybridIssuerListCall) * organizations - * [*analytics datastores create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAnalyticDatastoreCreateCall), [*analytics datastores delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAnalyticDatastoreDeleteCall), [*analytics datastores get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAnalyticDatastoreGetCall), [*analytics datastores list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAnalyticDatastoreListCall), [*analytics datastores test*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAnalyticDatastoreTestCall), [*analytics datastores update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAnalyticDatastoreUpdateCall), [*apiproducts attributes*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductAttributeCall), [*apiproducts attributes delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductAttributeDeleteCall), [*apiproducts attributes get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductAttributeGetCall), [*apiproducts attributes list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductAttributeListCall), [*apiproducts attributes update api product attribute*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductAttributeUpdateApiProductAttributeCall), [*apiproducts create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductCreateCall), [*apiproducts delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductDeleteCall), [*apiproducts get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductGetCall), [*apiproducts list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductListCall), [*apiproducts rateplans create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductRateplanCreateCall), [*apiproducts rateplans delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductRateplanDeleteCall), [*apiproducts rateplans get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductRateplanGetCall), [*apiproducts rateplans list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductRateplanListCall), [*apiproducts rateplans update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductRateplanUpdateCall), [*apiproducts update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiproductUpdateCall), [*apis create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiCreateCall), [*apis delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiDeleteCall), [*apis deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiDeploymentListCall), [*apis get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiGetCall), [*apis keyvaluemaps create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiKeyvaluemapCreateCall), [*apis keyvaluemaps delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiKeyvaluemapDeleteCall), [*apis keyvaluemaps entries create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryCreateCall), [*apis keyvaluemaps entries delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryDeleteCall), [*apis keyvaluemaps entries get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryGetCall), [*apis keyvaluemaps entries list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryListCall), [*apis list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiListCall), [*apis patch*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiPatchCall), [*apis revisions delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiRevisionDeleteCall), [*apis revisions deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiRevisionDeploymentListCall), [*apis revisions get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiRevisionGetCall), [*apis revisions update api proxy revision*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationApiRevisionUpdateApiProxyRevisionCall), [*apps get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAppGetCall), [*apps list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationAppListCall), [*create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationCreateCall), [*datacollectors create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDatacollectorCreateCall), [*datacollectors delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDatacollectorDeleteCall), [*datacollectors get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDatacollectorGetCall), [*datacollectors list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDatacollectorListCall), [*datacollectors patch*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDatacollectorPatchCall), [*delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeleteCall), [*deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeploymentListCall), [*developers apps attributes*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeCall), [*developers apps attributes delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeDeleteCall), [*developers apps attributes get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeGetCall), [*developers apps attributes list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeListCall), [*developers apps attributes update developer app attribute*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeUpdateDeveloperAppAttributeCall), [*developers apps create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppCreateCall), [*developers apps delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppDeleteCall), [*developers apps generate key pair or update developer app status*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppGenerateKeyPairOrUpdateDeveloperAppStatuCall), [*developers apps get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppGetCall), [*developers apps keys apiproducts delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyApiproductDeleteCall), [*developers apps keys apiproducts update developer app key api product*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyApiproductUpdateDeveloperAppKeyApiProductCall), [*developers apps keys create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyCreateCall), [*developers apps keys create create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyCreateCreateCall), [*developers apps keys delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyDeleteCall), [*developers apps keys get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyGetCall), [*developers apps keys replace developer app key*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyReplaceDeveloperAppKeyCall), [*developers apps keys update developer app key*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppKeyUpdateDeveloperAppKeyCall), [*developers apps list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppListCall), [*developers apps update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAppUpdateCall), [*developers attributes*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAttributeCall), [*developers attributes delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAttributeDeleteCall), [*developers attributes get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAttributeGetCall), [*developers attributes list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAttributeListCall), [*developers attributes update developer attribute*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperAttributeUpdateDeveloperAttributeCall), [*developers balance adjust*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperBalanceAdjustCall), [*developers balance credit*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperBalanceCreditCall), [*developers create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperCreateCall), [*developers delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperDeleteCall), [*developers get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperGetCall), [*developers get balance*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperGetBalanceCall), [*developers get monetization config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperGetMonetizationConfigCall), [*developers list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperListCall), [*developers set developer status*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperSetDeveloperStatuCall), [*developers subscriptions create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionCreateCall), [*developers subscriptions expire*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionExpireCall), [*developers subscriptions get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionGetCall), [*developers subscriptions list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionListCall), [*developers update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperUpdateCall), [*developers update monetization config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationDeveloperUpdateMonetizationConfigCall), [*endpoint attachments create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEndpointAttachmentCreateCall), [*endpoint attachments delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEndpointAttachmentDeleteCall), [*endpoint attachments get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEndpointAttachmentGetCall), [*endpoint attachments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEndpointAttachmentListCall), [*envgroups attachments create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentCreateCall), [*envgroups attachments delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentDeleteCall), [*envgroups attachments get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentGetCall), [*envgroups attachments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentListCall), [*envgroups create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupCreateCall), [*envgroups delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupDeleteCall), [*envgroups get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupGetCall), [*envgroups get deployed ingress config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupGetDeployedIngressConfigCall), [*envgroups list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupListCall), [*envgroups patch*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvgroupPatchCall), [*environments analytics admin get schemav2*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticAdminGetSchemav2Call), [*environments analytics exports create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticExportCreateCall), [*environments analytics exports get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticExportGetCall), [*environments analytics exports list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticExportListCall), [*environments apis deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiDeploymentListCall), [*environments apis revisions debugsessions create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionCreateCall), [*environments apis revisions debugsessions data get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionDataGetCall), [*environments apis revisions debugsessions delete data*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionDeleteDataCall), [*environments apis revisions debugsessions get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionGetCall), [*environments apis revisions debugsessions list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionListCall), [*environments apis revisions deploy*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDeployCall), [*environments apis revisions deployments generate deploy change report*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDeploymentGenerateDeployChangeReportCall), [*environments apis revisions deployments generate undeploy change report*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDeploymentGenerateUndeployChangeReportCall), [*environments apis revisions get deployments*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionGetDeploymentCall), [*environments apis revisions undeploy*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionUndeployCall), [*environments archive deployments create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentCreateCall), [*environments archive deployments delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentDeleteCall), [*environments archive deployments generate download url*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentGenerateDownloadUrlCall), [*environments archive deployments generate upload url*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentGenerateUploadUrlCall), [*environments archive deployments get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentGetCall), [*environments archive deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentListCall), [*environments archive deployments patch*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentPatchCall), [*environments caches delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentCacheDeleteCall), [*environments create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentCreateCall), [*environments delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentDeleteCall), [*environments deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentDeploymentListCall), [*environments flowhooks attach shared flow to flow hook*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentFlowhookAttachSharedFlowToFlowHookCall), [*environments flowhooks detach shared flow from flow hook*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentFlowhookDetachSharedFlowFromFlowHookCall), [*environments flowhooks get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentFlowhookGetCall), [*environments get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentGetCall), [*environments get api security runtime config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentGetApiSecurityRuntimeConfigCall), [*environments get debugmask*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentGetDebugmaskCall), [*environments get deployed config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentGetDeployedConfigCall), [*environments get iam policy*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentGetIamPolicyCall), [*environments get trace config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentGetTraceConfigCall), [*environments keystores aliases create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasCreateCall), [*environments keystores aliases csr*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasCsrCall), [*environments keystores aliases delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasDeleteCall), [*environments keystores aliases get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasGetCall), [*environments keystores aliases get certificate*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasGetCertificateCall), [*environments keystores aliases update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasUpdateCall), [*environments keystores create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreCreateCall), [*environments keystores delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreDeleteCall), [*environments keystores get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreGetCall), [*environments keyvaluemaps create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapCreateCall), [*environments keyvaluemaps delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapDeleteCall), [*environments keyvaluemaps entries create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryCreateCall), [*environments keyvaluemaps entries delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryDeleteCall), [*environments keyvaluemaps entries get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryGetCall), [*environments keyvaluemaps entries list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryListCall), [*environments modify environment*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentModifyEnvironmentCall), [*environments optimized stats get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentOptimizedStatGetCall), [*environments queries create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentQueryCreateCall), [*environments queries get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentQueryGetCall), [*environments queries get result*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentQueryGetResultCall), [*environments queries get resulturl*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentQueryGetResulturlCall), [*environments queries list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentQueryListCall), [*environments references create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentReferenceCreateCall), [*environments references delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentReferenceDeleteCall), [*environments references get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentReferenceGetCall), [*environments references update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentReferenceUpdateCall), [*environments resourcefiles create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileCreateCall), [*environments resourcefiles delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileDeleteCall), [*environments resourcefiles get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileGetCall), [*environments resourcefiles list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileListCall), [*environments resourcefiles list environment resources*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileListEnvironmentResourceCall), [*environments resourcefiles update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileUpdateCall), [*environments security reports create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportCreateCall), [*environments security reports get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportGetCall), [*environments security reports get result*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportGetResultCall), [*environments security reports get result view*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportGetResultViewCall), [*environments security reports list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportListCall), [*environments security stats query tabular stats*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSecurityStatQueryTabularStatCall), [*environments security stats query time series stats*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSecurityStatQueryTimeSeriesStatCall), [*environments set iam policy*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSetIamPolicyCall), [*environments sharedflows deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowDeploymentListCall), [*environments sharedflows revisions deploy*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowRevisionDeployCall), [*environments sharedflows revisions get deployments*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowRevisionGetDeploymentCall), [*environments sharedflows revisions undeploy*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowRevisionUndeployCall), [*environments stats get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentStatGetCall), [*environments subscribe*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentSubscribeCall), [*environments targetservers create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverCreateCall), [*environments targetservers delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverDeleteCall), [*environments targetservers get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverGetCall), [*environments targetservers update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverUpdateCall), [*environments test iam permissions*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTestIamPermissionCall), [*environments trace config overrides create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideCreateCall), [*environments trace config overrides delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideDeleteCall), [*environments trace config overrides get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideGetCall), [*environments trace config overrides list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideListCall), [*environments trace config overrides patch*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverridePatchCall), [*environments unsubscribe*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentUnsubscribeCall), [*environments update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentUpdateCall), [*environments update debugmask*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentUpdateDebugmaskCall), [*environments update environment*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentUpdateEnvironmentCall), [*environments update trace config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationEnvironmentUpdateTraceConfigCall), [*get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationGetCall), [*get deployed ingress config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationGetDeployedIngressConfigCall), [*get project mapping*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationGetProjectMappingCall), [*get runtime config*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationGetRuntimeConfigCall), [*get sync authorization*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationGetSyncAuthorizationCall), [*host queries create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostQueryCreateCall), [*host queries get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostQueryGetCall), [*host queries get result*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostQueryGetResultCall), [*host queries get result view*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostQueryGetResultViewCall), [*host queries list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostQueryListCall), [*host security reports create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostSecurityReportCreateCall), [*host security reports get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostSecurityReportGetCall), [*host security reports get result*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostSecurityReportGetResultCall), [*host security reports get result view*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostSecurityReportGetResultViewCall), [*host security reports list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostSecurityReportListCall), [*host stats get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationHostStatGetCall), [*instances attachments create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceAttachmentCreateCall), [*instances attachments delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceAttachmentDeleteCall), [*instances attachments get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceAttachmentGetCall), [*instances attachments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceAttachmentListCall), [*instances canaryevaluations create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceCanaryevaluationCreateCall), [*instances canaryevaluations get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceCanaryevaluationGetCall), [*instances create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceCreateCall), [*instances delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceDeleteCall), [*instances get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceGetCall), [*instances list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceListCall), [*instances nat addresses activate*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceNatAddressActivateCall), [*instances nat addresses create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceNatAddressCreateCall), [*instances nat addresses delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceNatAddressDeleteCall), [*instances nat addresses get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceNatAddressGetCall), [*instances nat addresses list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceNatAddressListCall), [*instances patch*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstancePatchCall), [*instances report status*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationInstanceReportStatuCall), [*keyvaluemaps create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationKeyvaluemapCreateCall), [*keyvaluemaps delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationKeyvaluemapDeleteCall), [*keyvaluemaps entries create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryCreateCall), [*keyvaluemaps entries delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryDeleteCall), [*keyvaluemaps entries get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryGetCall), [*keyvaluemaps entries list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryListCall), [*list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationListCall), [*operations get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationOperationGetCall), [*operations list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationOperationListCall), [*optimized host stats get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationOptimizedHostStatGetCall), [*reports create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationReportCreateCall), [*reports delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationReportDeleteCall), [*reports get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationReportGetCall), [*reports list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationReportListCall), [*reports update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationReportUpdateCall), [*security profiles environments compute environment scores*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSecurityProfileEnvironmentComputeEnvironmentScoreCall), [*security profiles environments create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSecurityProfileEnvironmentCreateCall), [*security profiles environments delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSecurityProfileEnvironmentDeleteCall), [*security profiles get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSecurityProfileGetCall), [*security profiles list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSecurityProfileListCall), [*security profiles list revisions*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSecurityProfileListRevisionCall), [*set addons*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSetAddonCall), [*set sync authorization*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSetSyncAuthorizationCall), [*sharedflows create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowCreateCall), [*sharedflows delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowDeleteCall), [*sharedflows deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowDeploymentListCall), [*sharedflows get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowGetCall), [*sharedflows list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowListCall), [*sharedflows revisions delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowRevisionDeleteCall), [*sharedflows revisions deployments list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowRevisionDeploymentListCall), [*sharedflows revisions get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowRevisionGetCall), [*sharedflows revisions update shared flow revision*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSharedflowRevisionUpdateSharedFlowRevisionCall), [*sites apicategories create*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSiteApicategoryCreateCall), [*sites apicategories delete*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSiteApicategoryDeleteCall), [*sites apicategories get*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSiteApicategoryGetCall), [*sites apicategories list*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSiteApicategoryListCall), [*sites apicategories patch*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationSiteApicategoryPatchCall) and [*update*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::OrganizationUpdateCall) + * [*analytics datastores create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAnalyticDatastoreCreateCall), [*analytics datastores delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAnalyticDatastoreDeleteCall), [*analytics datastores get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAnalyticDatastoreGetCall), [*analytics datastores list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAnalyticDatastoreListCall), [*analytics datastores test*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAnalyticDatastoreTestCall), [*analytics datastores update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAnalyticDatastoreUpdateCall), [*apiproducts attributes*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductAttributeCall), [*apiproducts attributes delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductAttributeDeleteCall), [*apiproducts attributes get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductAttributeGetCall), [*apiproducts attributes list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductAttributeListCall), [*apiproducts attributes update api product attribute*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductAttributeUpdateApiProductAttributeCall), [*apiproducts create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductCreateCall), [*apiproducts delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductDeleteCall), [*apiproducts get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductGetCall), [*apiproducts list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductListCall), [*apiproducts rateplans create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductRateplanCreateCall), [*apiproducts rateplans delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductRateplanDeleteCall), [*apiproducts rateplans get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductRateplanGetCall), [*apiproducts rateplans list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductRateplanListCall), [*apiproducts rateplans update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductRateplanUpdateCall), [*apiproducts update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiproductUpdateCall), [*apis create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiCreateCall), [*apis delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiDeleteCall), [*apis deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiDeploymentListCall), [*apis get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiGetCall), [*apis keyvaluemaps create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiKeyvaluemapCreateCall), [*apis keyvaluemaps delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiKeyvaluemapDeleteCall), [*apis keyvaluemaps entries create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryCreateCall), [*apis keyvaluemaps entries delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryDeleteCall), [*apis keyvaluemaps entries get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryGetCall), [*apis keyvaluemaps entries list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiKeyvaluemapEntryListCall), [*apis list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiListCall), [*apis patch*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiPatchCall), [*apis revisions delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiRevisionDeleteCall), [*apis revisions deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiRevisionDeploymentListCall), [*apis revisions get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiRevisionGetCall), [*apis revisions update api proxy revision*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationApiRevisionUpdateApiProxyRevisionCall), [*apps get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAppGetCall), [*apps list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationAppListCall), [*create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationCreateCall), [*datacollectors create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDatacollectorCreateCall), [*datacollectors delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDatacollectorDeleteCall), [*datacollectors get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDatacollectorGetCall), [*datacollectors list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDatacollectorListCall), [*datacollectors patch*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDatacollectorPatchCall), [*delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeleteCall), [*deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeploymentListCall), [*developers apps attributes*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeCall), [*developers apps attributes delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeDeleteCall), [*developers apps attributes get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeGetCall), [*developers apps attributes list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeListCall), [*developers apps attributes update developer app attribute*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppAttributeUpdateDeveloperAppAttributeCall), [*developers apps create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppCreateCall), [*developers apps delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppDeleteCall), [*developers apps generate key pair or update developer app status*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppGenerateKeyPairOrUpdateDeveloperAppStatuCall), [*developers apps get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppGetCall), [*developers apps keys apiproducts delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyApiproductDeleteCall), [*developers apps keys apiproducts update developer app key api product*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyApiproductUpdateDeveloperAppKeyApiProductCall), [*developers apps keys create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyCreateCall), [*developers apps keys create create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyCreateCreateCall), [*developers apps keys delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyDeleteCall), [*developers apps keys get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyGetCall), [*developers apps keys replace developer app key*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyReplaceDeveloperAppKeyCall), [*developers apps keys update developer app key*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppKeyUpdateDeveloperAppKeyCall), [*developers apps list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppListCall), [*developers apps update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAppUpdateCall), [*developers attributes*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAttributeCall), [*developers attributes delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAttributeDeleteCall), [*developers attributes get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAttributeGetCall), [*developers attributes list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAttributeListCall), [*developers attributes update developer attribute*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperAttributeUpdateDeveloperAttributeCall), [*developers balance adjust*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperBalanceAdjustCall), [*developers balance credit*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperBalanceCreditCall), [*developers create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperCreateCall), [*developers delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperDeleteCall), [*developers get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperGetCall), [*developers get balance*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperGetBalanceCall), [*developers get monetization config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperGetMonetizationConfigCall), [*developers list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperListCall), [*developers set developer status*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperSetDeveloperStatuCall), [*developers subscriptions create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionCreateCall), [*developers subscriptions expire*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionExpireCall), [*developers subscriptions get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionGetCall), [*developers subscriptions list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperSubscriptionListCall), [*developers update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperUpdateCall), [*developers update monetization config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationDeveloperUpdateMonetizationConfigCall), [*endpoint attachments create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEndpointAttachmentCreateCall), [*endpoint attachments delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEndpointAttachmentDeleteCall), [*endpoint attachments get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEndpointAttachmentGetCall), [*endpoint attachments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEndpointAttachmentListCall), [*envgroups attachments create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentCreateCall), [*envgroups attachments delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentDeleteCall), [*envgroups attachments get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentGetCall), [*envgroups attachments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupAttachmentListCall), [*envgroups create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupCreateCall), [*envgroups delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupDeleteCall), [*envgroups get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupGetCall), [*envgroups get deployed ingress config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupGetDeployedIngressConfigCall), [*envgroups list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupListCall), [*envgroups patch*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvgroupPatchCall), [*environments analytics admin get schemav2*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticAdminGetSchemav2Call), [*environments analytics exports create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticExportCreateCall), [*environments analytics exports get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticExportGetCall), [*environments analytics exports list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentAnalyticExportListCall), [*environments apis deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiDeploymentListCall), [*environments apis revisions debugsessions create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionCreateCall), [*environments apis revisions debugsessions data get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionDataGetCall), [*environments apis revisions debugsessions delete data*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionDeleteDataCall), [*environments apis revisions debugsessions get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionGetCall), [*environments apis revisions debugsessions list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDebugsessionListCall), [*environments apis revisions deploy*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDeployCall), [*environments apis revisions deployments generate deploy change report*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDeploymentGenerateDeployChangeReportCall), [*environments apis revisions deployments generate undeploy change report*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionDeploymentGenerateUndeployChangeReportCall), [*environments apis revisions get deployments*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionGetDeploymentCall), [*environments apis revisions undeploy*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentApiRevisionUndeployCall), [*environments archive deployments create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentCreateCall), [*environments archive deployments delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentDeleteCall), [*environments archive deployments generate download url*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentGenerateDownloadUrlCall), [*environments archive deployments generate upload url*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentGenerateUploadUrlCall), [*environments archive deployments get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentGetCall), [*environments archive deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentListCall), [*environments archive deployments patch*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentArchiveDeploymentPatchCall), [*environments caches delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentCacheDeleteCall), [*environments create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentCreateCall), [*environments delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentDeleteCall), [*environments deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentDeploymentListCall), [*environments flowhooks attach shared flow to flow hook*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentFlowhookAttachSharedFlowToFlowHookCall), [*environments flowhooks detach shared flow from flow hook*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentFlowhookDetachSharedFlowFromFlowHookCall), [*environments flowhooks get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentFlowhookGetCall), [*environments get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentGetCall), [*environments get api security runtime config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentGetApiSecurityRuntimeConfigCall), [*environments get debugmask*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentGetDebugmaskCall), [*environments get deployed config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentGetDeployedConfigCall), [*environments get iam policy*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentGetIamPolicyCall), [*environments get trace config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentGetTraceConfigCall), [*environments keystores aliases create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasCreateCall), [*environments keystores aliases csr*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasCsrCall), [*environments keystores aliases delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasDeleteCall), [*environments keystores aliases get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasGetCall), [*environments keystores aliases get certificate*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasGetCertificateCall), [*environments keystores aliases update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreAliasUpdateCall), [*environments keystores create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreCreateCall), [*environments keystores delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreDeleteCall), [*environments keystores get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeystoreGetCall), [*environments keyvaluemaps create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapCreateCall), [*environments keyvaluemaps delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapDeleteCall), [*environments keyvaluemaps entries create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryCreateCall), [*environments keyvaluemaps entries delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryDeleteCall), [*environments keyvaluemaps entries get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryGetCall), [*environments keyvaluemaps entries list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentKeyvaluemapEntryListCall), [*environments modify environment*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentModifyEnvironmentCall), [*environments optimized stats get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentOptimizedStatGetCall), [*environments queries create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentQueryCreateCall), [*environments queries get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentQueryGetCall), [*environments queries get result*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentQueryGetResultCall), [*environments queries get resulturl*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentQueryGetResulturlCall), [*environments queries list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentQueryListCall), [*environments references create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentReferenceCreateCall), [*environments references delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentReferenceDeleteCall), [*environments references get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentReferenceGetCall), [*environments references update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentReferenceUpdateCall), [*environments resourcefiles create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileCreateCall), [*environments resourcefiles delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileDeleteCall), [*environments resourcefiles get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileGetCall), [*environments resourcefiles list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileListCall), [*environments resourcefiles list environment resources*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileListEnvironmentResourceCall), [*environments resourcefiles update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentResourcefileUpdateCall), [*environments security reports create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportCreateCall), [*environments security reports get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportGetCall), [*environments security reports get result*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportGetResultCall), [*environments security reports get result view*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportGetResultViewCall), [*environments security reports list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSecurityReportListCall), [*environments security stats query tabular stats*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSecurityStatQueryTabularStatCall), [*environments security stats query time series stats*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSecurityStatQueryTimeSeriesStatCall), [*environments set iam policy*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSetIamPolicyCall), [*environments sharedflows deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowDeploymentListCall), [*environments sharedflows revisions deploy*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowRevisionDeployCall), [*environments sharedflows revisions get deployments*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowRevisionGetDeploymentCall), [*environments sharedflows revisions undeploy*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSharedflowRevisionUndeployCall), [*environments stats get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentStatGetCall), [*environments subscribe*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentSubscribeCall), [*environments targetservers create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverCreateCall), [*environments targetservers delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverDeleteCall), [*environments targetservers get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverGetCall), [*environments targetservers update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTargetserverUpdateCall), [*environments test iam permissions*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTestIamPermissionCall), [*environments trace config overrides create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideCreateCall), [*environments trace config overrides delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideDeleteCall), [*environments trace config overrides get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideGetCall), [*environments trace config overrides list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverrideListCall), [*environments trace config overrides patch*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentTraceConfigOverridePatchCall), [*environments unsubscribe*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentUnsubscribeCall), [*environments update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentUpdateCall), [*environments update debugmask*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentUpdateDebugmaskCall), [*environments update environment*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentUpdateEnvironmentCall), [*environments update trace config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationEnvironmentUpdateTraceConfigCall), [*get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationGetCall), [*get deployed ingress config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationGetDeployedIngressConfigCall), [*get project mapping*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationGetProjectMappingCall), [*get runtime config*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationGetRuntimeConfigCall), [*get sync authorization*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationGetSyncAuthorizationCall), [*host queries create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostQueryCreateCall), [*host queries get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostQueryGetCall), [*host queries get result*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostQueryGetResultCall), [*host queries get result view*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostQueryGetResultViewCall), [*host queries list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostQueryListCall), [*host security reports create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostSecurityReportCreateCall), [*host security reports get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostSecurityReportGetCall), [*host security reports get result*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostSecurityReportGetResultCall), [*host security reports get result view*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostSecurityReportGetResultViewCall), [*host security reports list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostSecurityReportListCall), [*host stats get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationHostStatGetCall), [*instances attachments create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceAttachmentCreateCall), [*instances attachments delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceAttachmentDeleteCall), [*instances attachments get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceAttachmentGetCall), [*instances attachments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceAttachmentListCall), [*instances canaryevaluations create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceCanaryevaluationCreateCall), [*instances canaryevaluations get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceCanaryevaluationGetCall), [*instances create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceCreateCall), [*instances delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceDeleteCall), [*instances get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceGetCall), [*instances list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceListCall), [*instances nat addresses activate*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceNatAddressActivateCall), [*instances nat addresses create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceNatAddressCreateCall), [*instances nat addresses delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceNatAddressDeleteCall), [*instances nat addresses get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceNatAddressGetCall), [*instances nat addresses list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceNatAddressListCall), [*instances patch*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstancePatchCall), [*instances report status*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationInstanceReportStatuCall), [*keyvaluemaps create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationKeyvaluemapCreateCall), [*keyvaluemaps delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationKeyvaluemapDeleteCall), [*keyvaluemaps entries create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryCreateCall), [*keyvaluemaps entries delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryDeleteCall), [*keyvaluemaps entries get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryGetCall), [*keyvaluemaps entries list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationKeyvaluemapEntryListCall), [*list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationListCall), [*operations get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationOperationGetCall), [*operations list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationOperationListCall), [*optimized host stats get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationOptimizedHostStatGetCall), [*reports create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationReportCreateCall), [*reports delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationReportDeleteCall), [*reports get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationReportGetCall), [*reports list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationReportListCall), [*reports update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationReportUpdateCall), [*security profiles environments compute environment scores*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSecurityProfileEnvironmentComputeEnvironmentScoreCall), [*security profiles environments create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSecurityProfileEnvironmentCreateCall), [*security profiles environments delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSecurityProfileEnvironmentDeleteCall), [*security profiles get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSecurityProfileGetCall), [*security profiles list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSecurityProfileListCall), [*security profiles list revisions*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSecurityProfileListRevisionCall), [*set addons*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSetAddonCall), [*set sync authorization*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSetSyncAuthorizationCall), [*sharedflows create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowCreateCall), [*sharedflows delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowDeleteCall), [*sharedflows deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowDeploymentListCall), [*sharedflows get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowGetCall), [*sharedflows list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowListCall), [*sharedflows revisions delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowRevisionDeleteCall), [*sharedflows revisions deployments list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowRevisionDeploymentListCall), [*sharedflows revisions get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowRevisionGetCall), [*sharedflows revisions update shared flow revision*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSharedflowRevisionUpdateSharedFlowRevisionCall), [*sites apicategories create*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSiteApicategoryCreateCall), [*sites apicategories delete*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSiteApicategoryDeleteCall), [*sites apicategories get*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSiteApicategoryGetCall), [*sites apicategories list*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSiteApicategoryListCall), [*sites apicategories patch*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationSiteApicategoryPatchCall) and [*update*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::OrganizationUpdateCall) * projects - * [*provision organization*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/api::ProjectProvisionOrganizationCall) + * [*provision organization*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/api::ProjectProvisionOrganizationCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/Apigee)** +* **[Hub](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/Apigee)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::CallBuilder) -* **[Resources](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::CallBuilder) +* **[Resources](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Part)** + * **[Parts](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -153,17 +153,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -173,29 +173,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Delegate) to the -[Method Builder](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Delegate) to the +[Method Builder](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::RequestValue) and -[decodable](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::RequestValue) and +[decodable](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-apigee1/5.0.2-beta-1+20230104/google_apigee1/client::RequestValue) are moved +* [request values](https://docs.rs/google-apigee1/5.0.2+20230104/google_apigee1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/apigee1/src/api.rs b/gen/apigee1/src/api.rs index d3a20aee72..2affa80dfa 100644 --- a/gen/apigee1/src/api.rs +++ b/gen/apigee1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Apigee { Apigee { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://apigee.googleapis.com/".to_string(), _root_url: "https://apigee.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> Apigee { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/apigee1/src/client.rs b/gen/apigee1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/apigee1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/apigee1/src/lib.rs b/gen/apigee1/src/lib.rs index 164a2f3aea..094c35302c 100644 --- a/gen/apigee1/src/lib.rs +++ b/gen/apigee1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Apigee* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *apigee:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Apigee* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *apigee:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Apigee* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/apigee-api-management/). diff --git a/gen/apikeys2-cli/Cargo.toml b/gen/apikeys2-cli/Cargo.toml index 99341ab23d..43aee9820d 100644 --- a/gen/apikeys2-cli/Cargo.toml +++ b/gen/apikeys2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-apikeys2-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Api Keys Service (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/apikeys2-cli" @@ -20,13 +20,13 @@ name = "apikeys2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-apikeys2] path = "../apikeys2" -version = "4.0.1+20220305" +version = "5.0.2+20230118" + diff --git a/gen/apikeys2-cli/README.md b/gen/apikeys2-cli/README.md index 71cceb3932..88f708a9c0 100644 --- a/gen/apikeys2-cli/README.md +++ b/gen/apikeys2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Api Keys Service* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Api Keys Service* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash apikeys2 [options] @@ -34,7 +34,6 @@ apikeys2 [options] operations get [-p ]... [-o ] projects - locations-keys-clone (-r )... [-p ]... [-o ] locations-keys-create (-r )... [-p ]... [-o ] locations-keys-delete [-p ]... [-o ] locations-keys-get [-p ]... [-o ] diff --git a/gen/apikeys2-cli/mkdocs.yml b/gen/apikeys2-cli/mkdocs.yml index b8e6c2264e..7d0f568a11 100644 --- a/gen/apikeys2-cli/mkdocs.yml +++ b/gen/apikeys2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Api Keys Service v4.0.1+20220305 +site_name: Api Keys Service v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-apikeys2-cli site_description: A complete library to interact with Api Keys Service (protocol v2) @@ -7,18 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/apikeys2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['keys_lookup-key.md', 'Keys', 'Lookup Key'] -- ['operations_get.md', 'Operations', 'Get'] -- ['projects_locations-keys-clone.md', 'Projects', 'Locations Keys Clone'] -- ['projects_locations-keys-create.md', 'Projects', 'Locations Keys Create'] -- ['projects_locations-keys-delete.md', 'Projects', 'Locations Keys Delete'] -- ['projects_locations-keys-get.md', 'Projects', 'Locations Keys Get'] -- ['projects_locations-keys-get-key-string.md', 'Projects', 'Locations Keys Get Key String'] -- ['projects_locations-keys-list.md', 'Projects', 'Locations Keys List'] -- ['projects_locations-keys-patch.md', 'Projects', 'Locations Keys Patch'] -- ['projects_locations-keys-undelete.md', 'Projects', 'Locations Keys Undelete'] +nav: +- Home: 'index.md' +- 'Keys': + - 'Lookup Key': 'keys_lookup-key.md' +- 'Operations': + - 'Get': 'operations_get.md' +- 'Projects': + - 'Locations Keys Create': 'projects_locations-keys-create.md' + - 'Locations Keys Delete': 'projects_locations-keys-delete.md' + - 'Locations Keys Get': 'projects_locations-keys-get.md' + - 'Locations Keys Get Key String': 'projects_locations-keys-get-key-string.md' + - 'Locations Keys List': 'projects_locations-keys-list.md' + - 'Locations Keys Patch': 'projects_locations-keys-patch.md' + - 'Locations Keys Undelete': 'projects_locations-keys-undelete.md' theme: readthedocs diff --git a/gen/apikeys2-cli/src/client.rs b/gen/apikeys2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/apikeys2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/apikeys2-cli/src/main.rs b/gen/apikeys2-cli/src/main.rs index b39ef04307..9d8364f39f 100644 --- a/gen/apikeys2-cli/src/main.rs +++ b/gen/apikeys2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_apikeys2::{api, Error, oauth2}; +use google_apikeys2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -159,91 +158,6 @@ where } } - async fn _projects_locations_keys_clone(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "key-id" => Some(("keyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["key-id"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::V2CloneKeyRequest = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_keys_clone(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_keys_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -267,6 +181,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delete-time" => Some(("deleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -279,7 +194,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-bundle-ids", "allowed-ips", "allowed-referrers", "browser-key-restrictions", "create-time", "delete-time", "display-name", "etag", "ios-key-restrictions", "key-string", "name", "restrictions", "server-key-restrictions", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-bundle-ids", "allowed-ips", "allowed-referrers", "annotations", "browser-key-restrictions", "create-time", "delete-time", "display-name", "etag", "ios-key-restrictions", "key-string", "name", "restrictions", "server-key-restrictions", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -510,16 +425,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -534,7 +446,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "page-size", "page-token", "show-deleted"].iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); v } )); } } @@ -591,6 +503,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delete-time" => Some(("deleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -603,7 +516,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-bundle-ids", "allowed-ips", "allowed-referrers", "browser-key-restrictions", "create-time", "delete-time", "display-name", "etag", "ios-key-restrictions", "key-string", "name", "restrictions", "server-key-restrictions", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-bundle-ids", "allowed-ips", "allowed-referrers", "annotations", "browser-key-restrictions", "create-time", "delete-time", "display-name", "etag", "ios-key-restrictions", "key-string", "name", "restrictions", "server-key-restrictions", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -618,7 +531,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -780,9 +693,6 @@ where }, ("projects", Some(opt)) => { match opt.subcommand() { - ("locations-keys-clone", Some(opt)) => { - call_result = self._projects_locations_keys_clone(opt, dry_run, &mut err).await; - }, ("locations-keys-create", Some(opt)) => { call_result = self._projects_locations_keys_create(opt, dry_run, &mut err).await; }, @@ -927,35 +837,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'locations-keys-clone', 'locations-keys-create', 'locations-keys-delete', 'locations-keys-get', 'locations-keys-get-key-string', 'locations-keys-list', 'locations-keys-patch' and 'locations-keys-undelete'", vec![ - ("locations-keys-clone", - Some(r##"DEPRECATED: API customers can call `GetKey` and then `CreateKey` methods to create a copy of an existing key. Retire `CloneKey` method to eliminate the unnessary method from API Keys API. Clones the existing key's restriction and display name to a new API key. The service account must have the `apikeys.keys.get` and `apikeys.keys.create` permissions in the project. NOTE: Key is a global resource; hence the only supported value for location is `global`."##), - "Details at http://byron.github.io/google-apis-rs/google_apikeys2_cli/projects_locations-keys-clone", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The resource name of the API key to be cloned in the same project."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), + ("projects", "methods: 'locations-keys-create', 'locations-keys-delete', 'locations-keys-get', 'locations-keys-get-key-string', 'locations-keys-list', 'locations-keys-patch' and 'locations-keys-undelete'", vec![ ("locations-keys-create", Some(r##"Creates a new API key. NOTE: Key is a global resource; hence the only supported value for location is `global`."##), "Details at http://byron.github.io/google-apis-rs/google_apikeys2_cli/projects_locations-keys-create", @@ -1134,7 +1016,7 @@ async fn main() { let mut app = App::new("apikeys2") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230118") .about("Manages the API keys associated with developer projects.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_apikeys2_cli") .arg(Arg::with_name("url") diff --git a/gen/apikeys2/Cargo.toml b/gen/apikeys2/Cargo.toml index 098dee8004..ef0b7d5bd2 100644 --- a/gen/apikeys2/Cargo.toml +++ b/gen/apikeys2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-apikeys2" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Api Keys Service (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/apikeys2" homepage = "https://cloud.google.com/api-keys/docs" -documentation = "https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-apikeys2/5.0.2+20230118" license = "MIT" keywords = ["apikeys", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/apikeys2/README.md b/gen/apikeys2/README.md index fd087712a7..a3cb0e7a5e 100644 --- a/gen/apikeys2/README.md +++ b/gen/apikeys2/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-apikeys2` library allows access to all features of the *Google Api Keys Service* service. -This documentation was generated from *Api Keys Service* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *apikeys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Api Keys Service* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *apikeys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Api Keys Service* *v2* API can be found at the [official documentation site](https://cloud.google.com/api-keys/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/ApiKeysService) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/ApiKeysService) ... * keys - * [*lookup key*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::KeyLookupKeyCall) -* [operations](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::Operation) - * [*get*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::OperationGetCall) + * [*lookup key*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::KeyLookupKeyCall) +* [operations](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::Operation) + * [*get*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::OperationGetCall) * projects - * [*locations keys create*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::ProjectLocationKeyCreateCall), [*locations keys delete*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::ProjectLocationKeyDeleteCall), [*locations keys get*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::ProjectLocationKeyGetCall), [*locations keys get key string*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::ProjectLocationKeyGetKeyStringCall), [*locations keys list*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::ProjectLocationKeyListCall), [*locations keys patch*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::ProjectLocationKeyPatchCall) and [*locations keys undelete*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/api::ProjectLocationKeyUndeleteCall) + * [*locations keys create*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::ProjectLocationKeyCreateCall), [*locations keys delete*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::ProjectLocationKeyDeleteCall), [*locations keys get*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::ProjectLocationKeyGetCall), [*locations keys get key string*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::ProjectLocationKeyGetKeyStringCall), [*locations keys list*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::ProjectLocationKeyListCall), [*locations keys patch*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::ProjectLocationKeyPatchCall) and [*locations keys undelete*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/api::ProjectLocationKeyUndeleteCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/ApiKeysService)** +* **[Hub](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/ApiKeysService)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::CallBuilder) -* **[Resources](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::CallBuilder) +* **[Resources](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Part)** + * **[Parts](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Delegate) to the -[Method Builder](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Delegate) to the +[Method Builder](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::RequestValue) and -[decodable](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::RequestValue) and +[decodable](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-apikeys2/5.0.2-beta-1+20230118/google_apikeys2/client::RequestValue) are moved +* [request values](https://docs.rs/google-apikeys2/5.0.2+20230118/google_apikeys2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/apikeys2/src/api.rs b/gen/apikeys2/src/api.rs index af21053a5e..bb8bc39cd7 100644 --- a/gen/apikeys2/src/api.rs +++ b/gen/apikeys2/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> ApiKeysService { ApiKeysService { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://apikeys.googleapis.com/".to_string(), _root_url: "https://apikeys.googleapis.com/".to_string(), } @@ -147,7 +147,7 @@ impl<'a, S> ApiKeysService { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/apikeys2/src/client.rs b/gen/apikeys2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/apikeys2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/apikeys2/src/lib.rs b/gen/apikeys2/src/lib.rs index f6d0dda68a..27cd661e55 100644 --- a/gen/apikeys2/src/lib.rs +++ b/gen/apikeys2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Api Keys Service* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *apikeys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Api Keys Service* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *apikeys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Api Keys Service* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/api-keys/docs). diff --git a/gen/appengine1-cli/Cargo.toml b/gen/appengine1-cli/Cargo.toml index fbf052bbbc..034b156ee9 100644 --- a/gen/appengine1-cli/Cargo.toml +++ b/gen/appengine1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-appengine1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with appengine (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1-cli" @@ -20,13 +20,13 @@ name = "appengine1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-appengine1] path = "../appengine1" -version = "4.0.1+20220226" +version = "5.0.2+20230114" + diff --git a/gen/appengine1-cli/README.md b/gen/appengine1-cli/README.md index 58b168a85d..7453956090 100644 --- a/gen/appengine1-cli/README.md +++ b/gen/appengine1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *appengine* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *appengine* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash appengine1 [options] @@ -68,6 +68,8 @@ appengine1 [options] services-versions-instances-list [-p ]... [-o ] services-versions-list [-p ]... [-o ] services-versions-patch (-r )... [-p ]... [-o ] + projects + locations-applications-get [-p ]... [-o ] appengine1 --help Configuration: diff --git a/gen/appengine1-cli/mkdocs.yml b/gen/appengine1-cli/mkdocs.yml index 1e7b644f58..b170d9ffcc 100644 --- a/gen/appengine1-cli/mkdocs.yml +++ b/gen/appengine1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: appengine v4.0.1+20220226 +site_name: appengine v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-appengine1-cli site_description: A complete library to interact with appengine (protocol v1) @@ -7,46 +7,49 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['apps_authorized-certificates-create.md', 'Apps', 'Authorized Certificates Create'] -- ['apps_authorized-certificates-delete.md', 'Apps', 'Authorized Certificates Delete'] -- ['apps_authorized-certificates-get.md', 'Apps', 'Authorized Certificates Get'] -- ['apps_authorized-certificates-list.md', 'Apps', 'Authorized Certificates List'] -- ['apps_authorized-certificates-patch.md', 'Apps', 'Authorized Certificates Patch'] -- ['apps_authorized-domains-list.md', 'Apps', 'Authorized Domains List'] -- ['apps_create.md', 'Apps', 'Create'] -- ['apps_domain-mappings-create.md', 'Apps', 'Domain Mappings Create'] -- ['apps_domain-mappings-delete.md', 'Apps', 'Domain Mappings Delete'] -- ['apps_domain-mappings-get.md', 'Apps', 'Domain Mappings Get'] -- ['apps_domain-mappings-list.md', 'Apps', 'Domain Mappings List'] -- ['apps_domain-mappings-patch.md', 'Apps', 'Domain Mappings Patch'] -- ['apps_firewall-ingress-rules-batch-update.md', 'Apps', 'Firewall Ingress Rules Batch Update'] -- ['apps_firewall-ingress-rules-create.md', 'Apps', 'Firewall Ingress Rules Create'] -- ['apps_firewall-ingress-rules-delete.md', 'Apps', 'Firewall Ingress Rules Delete'] -- ['apps_firewall-ingress-rules-get.md', 'Apps', 'Firewall Ingress Rules Get'] -- ['apps_firewall-ingress-rules-list.md', 'Apps', 'Firewall Ingress Rules List'] -- ['apps_firewall-ingress-rules-patch.md', 'Apps', 'Firewall Ingress Rules Patch'] -- ['apps_get.md', 'Apps', 'Get'] -- ['apps_locations-get.md', 'Apps', 'Locations Get'] -- ['apps_locations-list.md', 'Apps', 'Locations List'] -- ['apps_operations-get.md', 'Apps', 'Operations Get'] -- ['apps_operations-list.md', 'Apps', 'Operations List'] -- ['apps_patch.md', 'Apps', 'Patch'] -- ['apps_repair.md', 'Apps', 'Repair'] -- ['apps_services-delete.md', 'Apps', 'Services Delete'] -- ['apps_services-get.md', 'Apps', 'Services Get'] -- ['apps_services-list.md', 'Apps', 'Services List'] -- ['apps_services-patch.md', 'Apps', 'Services Patch'] -- ['apps_services-versions-create.md', 'Apps', 'Services Versions Create'] -- ['apps_services-versions-delete.md', 'Apps', 'Services Versions Delete'] -- ['apps_services-versions-get.md', 'Apps', 'Services Versions Get'] -- ['apps_services-versions-instances-debug.md', 'Apps', 'Services Versions Instances Debug'] -- ['apps_services-versions-instances-delete.md', 'Apps', 'Services Versions Instances Delete'] -- ['apps_services-versions-instances-get.md', 'Apps', 'Services Versions Instances Get'] -- ['apps_services-versions-instances-list.md', 'Apps', 'Services Versions Instances List'] -- ['apps_services-versions-list.md', 'Apps', 'Services Versions List'] -- ['apps_services-versions-patch.md', 'Apps', 'Services Versions Patch'] +nav: +- Home: 'index.md' +- 'Apps': + - 'Authorized Certificates Create': 'apps_authorized-certificates-create.md' + - 'Authorized Certificates Delete': 'apps_authorized-certificates-delete.md' + - 'Authorized Certificates Get': 'apps_authorized-certificates-get.md' + - 'Authorized Certificates List': 'apps_authorized-certificates-list.md' + - 'Authorized Certificates Patch': 'apps_authorized-certificates-patch.md' + - 'Authorized Domains List': 'apps_authorized-domains-list.md' + - 'Create': 'apps_create.md' + - 'Domain Mappings Create': 'apps_domain-mappings-create.md' + - 'Domain Mappings Delete': 'apps_domain-mappings-delete.md' + - 'Domain Mappings Get': 'apps_domain-mappings-get.md' + - 'Domain Mappings List': 'apps_domain-mappings-list.md' + - 'Domain Mappings Patch': 'apps_domain-mappings-patch.md' + - 'Firewall Ingress Rules Batch Update': 'apps_firewall-ingress-rules-batch-update.md' + - 'Firewall Ingress Rules Create': 'apps_firewall-ingress-rules-create.md' + - 'Firewall Ingress Rules Delete': 'apps_firewall-ingress-rules-delete.md' + - 'Firewall Ingress Rules Get': 'apps_firewall-ingress-rules-get.md' + - 'Firewall Ingress Rules List': 'apps_firewall-ingress-rules-list.md' + - 'Firewall Ingress Rules Patch': 'apps_firewall-ingress-rules-patch.md' + - 'Get': 'apps_get.md' + - 'Locations Get': 'apps_locations-get.md' + - 'Locations List': 'apps_locations-list.md' + - 'Operations Get': 'apps_operations-get.md' + - 'Operations List': 'apps_operations-list.md' + - 'Patch': 'apps_patch.md' + - 'Repair': 'apps_repair.md' + - 'Services Delete': 'apps_services-delete.md' + - 'Services Get': 'apps_services-get.md' + - 'Services List': 'apps_services-list.md' + - 'Services Patch': 'apps_services-patch.md' + - 'Services Versions Create': 'apps_services-versions-create.md' + - 'Services Versions Delete': 'apps_services-versions-delete.md' + - 'Services Versions Get': 'apps_services-versions-get.md' + - 'Services Versions Instances Debug': 'apps_services-versions-instances-debug.md' + - 'Services Versions Instances Delete': 'apps_services-versions-instances-delete.md' + - 'Services Versions Instances Get': 'apps_services-versions-instances-get.md' + - 'Services Versions Instances List': 'apps_services-versions-instances-list.md' + - 'Services Versions List': 'apps_services-versions-list.md' + - 'Services Versions Patch': 'apps_services-versions-patch.md' +- 'Projects': + - 'Locations Applications Get': 'projects_locations-applications-get.md' theme: readthedocs diff --git a/gen/appengine1-cli/src/client.rs b/gen/appengine1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/appengine1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/appengine1-cli/src/main.rs b/gen/appengine1-cli/src/main.rs index 5ddb848c2e..ef0c465896 100644 --- a/gen/appengine1-cli/src/main.rs +++ b/gen/appengine1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_appengine1::{api, Error, oauth2}; +use google_appengine1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -267,7 +266,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -366,7 +365,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -425,7 +424,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -783,7 +782,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -876,7 +875,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1211,7 +1210,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "matching-address" => { call = call.matching_address(value.unwrap_or("")); @@ -1306,7 +1305,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1469,7 +1468,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1583,7 +1582,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1692,7 +1691,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1939,7 +1938,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2033,10 +2032,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "migrate-traffic" => { - call = call.migrate_traffic(arg_from_str(value.unwrap_or("false"), err, "migrate-traffic", "boolean")); + call = call.migrate_traffic( value.map(|v| arg_from_str(v, err, "migrate-traffic", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2113,6 +2112,7 @@ where "api-config.script" => Some(("apiConfig.script", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "api-config.security-level" => Some(("apiConfig.securityLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "api-config.url" => Some(("apiConfig.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "app-engine-apis" => Some(("appEngineApis", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "automatic-scaling.cool-down-period" => Some(("automaticScaling.coolDownPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "automatic-scaling.cpu-utilization.aggregation-window-length" => Some(("automaticScaling.cpuUtilization.aggregationWindowLength", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "automatic-scaling.cpu-utilization.target-utilization" => Some(("automaticScaling.cpuUtilization.targetUtilization", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -2177,6 +2177,7 @@ where "manual-scaling.instances" => Some(("manualScaling.instances", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.forwarded-ports" => Some(("network.forwardedPorts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "network.instance-ip-mode" => Some(("network.instanceIpMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.instance-tag" => Some(("network.instanceTag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.name" => Some(("network.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.session-affinity" => Some(("network.sessionAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2206,7 +2207,7 @@ where "vpc-access-connector.name" => Some(("vpcAccessConnector.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zones" => Some(("zones", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["aggregation-window-length", "api-config", "app-start-timeout", "app-yaml-path", "auth-fail-action", "automatic-scaling", "basic-scaling", "beta-settings", "build-env-variables", "check-interval", "cloud-build-options", "cloud-build-timeout", "config-id", "container", "cool-down-period", "cpu", "cpu-utilization", "create-time", "created-by", "default-expiration", "deployment", "disable-health-check", "disable-trace-sampling", "disk-gb", "disk-usage-bytes", "disk-utilization", "egress-setting", "endpoints-api-service", "entrypoint", "env", "env-variables", "failure-threshold", "files-count", "forwarded-ports", "health-check", "healthy-threshold", "host", "id", "idle-timeout", "image", "inbound-services", "initial-delay", "instance-class", "instance-tag", "instances", "kms-key-reference", "liveness-check", "login", "manual-scaling", "max-concurrent-requests", "max-idle-instances", "max-instances", "max-pending-latency", "max-total-instances", "memory-gb", "min-idle-instances", "min-instances", "min-pending-latency", "min-total-instances", "name", "network", "network-utilization", "nobuild-files-regex", "path", "readiness-check", "request-utilization", "resources", "restart-threshold", "rollout-strategy", "runtime", "runtime-api-version", "runtime-channel", "runtime-main-executable-path", "script", "security-level", "service-account", "serving-status", "session-affinity", "shell", "source-url", "standard-scheduler-settings", "subnetwork-name", "success-threshold", "target-concurrent-requests", "target-cpu-utilization", "target-read-bytes-per-second", "target-read-ops-per-second", "target-received-bytes-per-second", "target-received-packets-per-second", "target-request-count-per-second", "target-sent-bytes-per-second", "target-sent-packets-per-second", "target-throughput-utilization", "target-utilization", "target-write-bytes-per-second", "target-write-ops-per-second", "threadsafe", "timeout", "unhealthy-threshold", "url", "version-url", "vm", "vpc-access-connector", "zip", "zones"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["aggregation-window-length", "api-config", "app-engine-apis", "app-start-timeout", "app-yaml-path", "auth-fail-action", "automatic-scaling", "basic-scaling", "beta-settings", "build-env-variables", "check-interval", "cloud-build-options", "cloud-build-timeout", "config-id", "container", "cool-down-period", "cpu", "cpu-utilization", "create-time", "created-by", "default-expiration", "deployment", "disable-health-check", "disable-trace-sampling", "disk-gb", "disk-usage-bytes", "disk-utilization", "egress-setting", "endpoints-api-service", "entrypoint", "env", "env-variables", "failure-threshold", "files-count", "forwarded-ports", "health-check", "healthy-threshold", "host", "id", "idle-timeout", "image", "inbound-services", "initial-delay", "instance-class", "instance-ip-mode", "instance-tag", "instances", "kms-key-reference", "liveness-check", "login", "manual-scaling", "max-concurrent-requests", "max-idle-instances", "max-instances", "max-pending-latency", "max-total-instances", "memory-gb", "min-idle-instances", "min-instances", "min-pending-latency", "min-total-instances", "name", "network", "network-utilization", "nobuild-files-regex", "path", "readiness-check", "request-utilization", "resources", "restart-threshold", "rollout-strategy", "runtime", "runtime-api-version", "runtime-channel", "runtime-main-executable-path", "script", "security-level", "service-account", "serving-status", "session-affinity", "shell", "source-url", "standard-scheduler-settings", "subnetwork-name", "success-threshold", "target-concurrent-requests", "target-cpu-utilization", "target-read-bytes-per-second", "target-read-ops-per-second", "target-received-bytes-per-second", "target-received-packets-per-second", "target-request-count-per-second", "target-sent-bytes-per-second", "target-sent-packets-per-second", "target-throughput-utilization", "target-utilization", "target-write-bytes-per-second", "target-write-ops-per-second", "threadsafe", "timeout", "unhealthy-threshold", "url", "version-url", "vm", "vpc-access-connector", "zip", "zones"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2573,7 +2574,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2635,7 +2636,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2712,6 +2713,7 @@ where "api-config.script" => Some(("apiConfig.script", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "api-config.security-level" => Some(("apiConfig.securityLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "api-config.url" => Some(("apiConfig.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "app-engine-apis" => Some(("appEngineApis", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "automatic-scaling.cool-down-period" => Some(("automaticScaling.coolDownPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "automatic-scaling.cpu-utilization.aggregation-window-length" => Some(("automaticScaling.cpuUtilization.aggregationWindowLength", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "automatic-scaling.cpu-utilization.target-utilization" => Some(("automaticScaling.cpuUtilization.targetUtilization", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -2776,6 +2778,7 @@ where "manual-scaling.instances" => Some(("manualScaling.instances", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.forwarded-ports" => Some(("network.forwardedPorts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "network.instance-ip-mode" => Some(("network.instanceIpMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.instance-tag" => Some(("network.instanceTag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.name" => Some(("network.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network.session-affinity" => Some(("network.sessionAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2805,7 +2808,7 @@ where "vpc-access-connector.name" => Some(("vpcAccessConnector.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zones" => Some(("zones", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["aggregation-window-length", "api-config", "app-start-timeout", "app-yaml-path", "auth-fail-action", "automatic-scaling", "basic-scaling", "beta-settings", "build-env-variables", "check-interval", "cloud-build-options", "cloud-build-timeout", "config-id", "container", "cool-down-period", "cpu", "cpu-utilization", "create-time", "created-by", "default-expiration", "deployment", "disable-health-check", "disable-trace-sampling", "disk-gb", "disk-usage-bytes", "disk-utilization", "egress-setting", "endpoints-api-service", "entrypoint", "env", "env-variables", "failure-threshold", "files-count", "forwarded-ports", "health-check", "healthy-threshold", "host", "id", "idle-timeout", "image", "inbound-services", "initial-delay", "instance-class", "instance-tag", "instances", "kms-key-reference", "liveness-check", "login", "manual-scaling", "max-concurrent-requests", "max-idle-instances", "max-instances", "max-pending-latency", "max-total-instances", "memory-gb", "min-idle-instances", "min-instances", "min-pending-latency", "min-total-instances", "name", "network", "network-utilization", "nobuild-files-regex", "path", "readiness-check", "request-utilization", "resources", "restart-threshold", "rollout-strategy", "runtime", "runtime-api-version", "runtime-channel", "runtime-main-executable-path", "script", "security-level", "service-account", "serving-status", "session-affinity", "shell", "source-url", "standard-scheduler-settings", "subnetwork-name", "success-threshold", "target-concurrent-requests", "target-cpu-utilization", "target-read-bytes-per-second", "target-read-ops-per-second", "target-received-bytes-per-second", "target-received-packets-per-second", "target-request-count-per-second", "target-sent-bytes-per-second", "target-sent-packets-per-second", "target-throughput-utilization", "target-utilization", "target-write-bytes-per-second", "target-write-ops-per-second", "threadsafe", "timeout", "unhealthy-threshold", "url", "version-url", "vm", "vpc-access-connector", "zip", "zones"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["aggregation-window-length", "api-config", "app-engine-apis", "app-start-timeout", "app-yaml-path", "auth-fail-action", "automatic-scaling", "basic-scaling", "beta-settings", "build-env-variables", "check-interval", "cloud-build-options", "cloud-build-timeout", "config-id", "container", "cool-down-period", "cpu", "cpu-utilization", "create-time", "created-by", "default-expiration", "deployment", "disable-health-check", "disable-trace-sampling", "disk-gb", "disk-usage-bytes", "disk-utilization", "egress-setting", "endpoints-api-service", "entrypoint", "env", "env-variables", "failure-threshold", "files-count", "forwarded-ports", "health-check", "healthy-threshold", "host", "id", "idle-timeout", "image", "inbound-services", "initial-delay", "instance-class", "instance-ip-mode", "instance-tag", "instances", "kms-key-reference", "liveness-check", "login", "manual-scaling", "max-concurrent-requests", "max-idle-instances", "max-instances", "max-pending-latency", "max-total-instances", "memory-gb", "min-idle-instances", "min-instances", "min-pending-latency", "min-total-instances", "name", "network", "network-utilization", "nobuild-files-regex", "path", "readiness-check", "request-utilization", "resources", "restart-threshold", "rollout-strategy", "runtime", "runtime-api-version", "runtime-channel", "runtime-main-executable-path", "script", "security-level", "service-account", "serving-status", "session-affinity", "shell", "source-url", "standard-scheduler-settings", "subnetwork-name", "success-threshold", "target-concurrent-requests", "target-cpu-utilization", "target-read-bytes-per-second", "target-read-ops-per-second", "target-received-bytes-per-second", "target-received-packets-per-second", "target-request-count-per-second", "target-sent-bytes-per-second", "target-sent-packets-per-second", "target-throughput-utilization", "target-utilization", "target-write-bytes-per-second", "target-write-ops-per-second", "threadsafe", "timeout", "unhealthy-threshold", "url", "version-url", "vm", "vpc-access-connector", "zip", "zones"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2820,7 +2823,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2869,6 +2872,58 @@ where } } + async fn _projects_locations_applications_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_applications_get(opt.value_of("projects-id").unwrap_or(""), opt.value_of("locations-id").unwrap_or(""), opt.value_of("applications-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -2996,6 +3051,17 @@ where } } }, + ("projects", Some(opt)) => { + match opt.subcommand() { + ("locations-applications-get", Some(opt)) => { + call_result = self._projects_locations_applications_get(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("projects".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); @@ -4184,11 +4250,48 @@ async fn main() { ]), ]), + ("projects", "methods: 'locations-applications-get'", vec![ + ("locations-applications-get", + Some(r##"Gets information about an application."##), + "Details at http://byron.github.io/google-apis-rs/google_appengine1_cli/projects_locations-applications-get", + vec![ + (Some(r##"projects-id"##), + None, + Some(r##"Part of `name`. Name of the Application resource to get. Example: apps/myapp."##), + Some(true), + Some(false)), + + (Some(r##"locations-id"##), + None, + Some(r##"Part of `name`. See documentation of `projectsId`."##), + Some(true), + Some(false)), + + (Some(r##"applications-id"##), + None, + Some(r##"Part of `name`. See documentation of `projectsId`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ]; let mut app = App::new("appengine1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230114") .about("Provisions and manages developers' App Engine applications.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_appengine1_cli") .arg(Arg::with_name("url") diff --git a/gen/appengine1/Cargo.toml b/gen/appengine1/Cargo.toml index 96e89c06d0..483c0ce042 100644 --- a/gen/appengine1/Cargo.toml +++ b/gen/appengine1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-appengine1" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with appengine (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1" homepage = "https://cloud.google.com/appengine/docs/admin-api/" -documentation = "https://docs.rs/google-appengine1/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-appengine1/5.0.2+20230114" license = "MIT" keywords = ["appengine", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/appengine1/README.md b/gen/appengine1/README.md index bcd4ecad66..f079469146 100644 --- a/gen/appengine1/README.md +++ b/gen/appengine1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-appengine1` library allows access to all features of the *Google appengine* service. -This documentation was generated from *appengine* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *appengine:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *appengine* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *appengine:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *appengine* *v1* API can be found at the [official documentation site](https://cloud.google.com/appengine/docs/admin-api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/Appengine) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/Appengine) ... * apps - * [*authorized certificates create*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppAuthorizedCertificateCreateCall), [*authorized certificates delete*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppAuthorizedCertificateDeleteCall), [*authorized certificates get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppAuthorizedCertificateGetCall), [*authorized certificates list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppAuthorizedCertificateListCall), [*authorized certificates patch*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppAuthorizedCertificatePatchCall), [*authorized domains list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppAuthorizedDomainListCall), [*create*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppCreateCall), [*domain mappings create*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppDomainMappingCreateCall), [*domain mappings delete*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppDomainMappingDeleteCall), [*domain mappings get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppDomainMappingGetCall), [*domain mappings list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppDomainMappingListCall), [*domain mappings patch*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppDomainMappingPatchCall), [*firewall ingress rules batch update*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppFirewallIngressRuleBatchUpdateCall), [*firewall ingress rules create*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppFirewallIngressRuleCreateCall), [*firewall ingress rules delete*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppFirewallIngressRuleDeleteCall), [*firewall ingress rules get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppFirewallIngressRuleGetCall), [*firewall ingress rules list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppFirewallIngressRuleListCall), [*firewall ingress rules patch*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppFirewallIngressRulePatchCall), [*get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppGetCall), [*locations get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppLocationGetCall), [*locations list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppLocationListCall), [*operations get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppOperationGetCall), [*operations list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppOperationListCall), [*patch*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppPatchCall), [*repair*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppRepairCall), [*services delete*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceDeleteCall), [*services get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceGetCall), [*services list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceListCall), [*services patch*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServicePatchCall), [*services versions create*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionCreateCall), [*services versions delete*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionDeleteCall), [*services versions get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionGetCall), [*services versions instances debug*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionInstanceDebugCall), [*services versions instances delete*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionInstanceDeleteCall), [*services versions instances get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionInstanceGetCall), [*services versions instances list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionInstanceListCall), [*services versions list*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionListCall) and [*services versions patch*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::AppServiceVersionPatchCall) + * [*authorized certificates create*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppAuthorizedCertificateCreateCall), [*authorized certificates delete*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppAuthorizedCertificateDeleteCall), [*authorized certificates get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppAuthorizedCertificateGetCall), [*authorized certificates list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppAuthorizedCertificateListCall), [*authorized certificates patch*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppAuthorizedCertificatePatchCall), [*authorized domains list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppAuthorizedDomainListCall), [*create*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppCreateCall), [*domain mappings create*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppDomainMappingCreateCall), [*domain mappings delete*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppDomainMappingDeleteCall), [*domain mappings get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppDomainMappingGetCall), [*domain mappings list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppDomainMappingListCall), [*domain mappings patch*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppDomainMappingPatchCall), [*firewall ingress rules batch update*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppFirewallIngressRuleBatchUpdateCall), [*firewall ingress rules create*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppFirewallIngressRuleCreateCall), [*firewall ingress rules delete*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppFirewallIngressRuleDeleteCall), [*firewall ingress rules get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppFirewallIngressRuleGetCall), [*firewall ingress rules list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppFirewallIngressRuleListCall), [*firewall ingress rules patch*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppFirewallIngressRulePatchCall), [*get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppGetCall), [*locations get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppLocationGetCall), [*locations list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppLocationListCall), [*operations get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppOperationGetCall), [*operations list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppOperationListCall), [*patch*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppPatchCall), [*repair*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppRepairCall), [*services delete*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceDeleteCall), [*services get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceGetCall), [*services list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceListCall), [*services patch*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServicePatchCall), [*services versions create*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionCreateCall), [*services versions delete*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionDeleteCall), [*services versions get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionGetCall), [*services versions instances debug*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionInstanceDebugCall), [*services versions instances delete*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionInstanceDeleteCall), [*services versions instances get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionInstanceGetCall), [*services versions instances list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionInstanceListCall), [*services versions list*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionListCall) and [*services versions patch*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::AppServiceVersionPatchCall) * projects - * [*locations applications get*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/api::ProjectLocationApplicationGetCall) + * [*locations applications get*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/api::ProjectLocationApplicationGetCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/Appengine)** +* **[Hub](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/Appengine)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::CallBuilder) -* **[Resources](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::CallBuilder) +* **[Resources](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Part)** + * **[Parts](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Delegate) to the -[Method Builder](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Delegate) to the +[Method Builder](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::RequestValue) and -[decodable](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::RequestValue) and +[decodable](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-appengine1/5.0.2-beta-1+20230114/google_appengine1/client::RequestValue) are moved +* [request values](https://docs.rs/google-appengine1/5.0.2+20230114/google_appengine1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/appengine1/src/api.rs b/gen/appengine1/src/api.rs index d2b70522ec..ba05025dab 100644 --- a/gen/appengine1/src/api.rs +++ b/gen/appengine1/src/api.rs @@ -133,7 +133,7 @@ impl<'a, S> Appengine { Appengine { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://appengine.googleapis.com/".to_string(), _root_url: "https://appengine.googleapis.com/".to_string(), } @@ -147,7 +147,7 @@ impl<'a, S> Appengine { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/appengine1/src/client.rs b/gen/appengine1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/appengine1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/appengine1/src/lib.rs b/gen/appengine1/src/lib.rs index a731797f29..a4bd2ee019 100644 --- a/gen/appengine1/src/lib.rs +++ b/gen/appengine1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *appengine* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *appengine:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *appengine* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *appengine:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *appengine* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/appengine/docs/admin-api/). diff --git a/gen/appengine1_beta4-cli/Cargo.toml b/gen/appengine1_beta4-cli/Cargo.toml index 3990c58f0f..e74a2d92c5 100644 --- a/gen/appengine1_beta4-cli/Cargo.toml +++ b/gen/appengine1_beta4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-appengine1_beta4-cli" -version = "4.0.1+20181005" +version = "5.0.2+20181005" authors = ["Sebastian Thiel "] description = "A complete library to interact with appengine (protocol v1beta4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1_beta4-cli" @@ -20,13 +20,13 @@ name = "appengine1-beta4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-appengine1_beta4] path = "../appengine1_beta4" -version = "4.0.1+20181005" +version = "5.0.2+20181005" + diff --git a/gen/appengine1_beta4-cli/README.md b/gen/appengine1_beta4-cli/README.md index 6b85f822b0..0447b35fce 100644 --- a/gen/appengine1_beta4-cli/README.md +++ b/gen/appengine1_beta4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *appengine* API at revision *20181005*. The CLI is at version *4.0.1*. +This documentation was generated from the *appengine* API at revision *20181005*. The CLI is at version *5.0.2*. ```bash appengine1-beta4 [options] diff --git a/gen/appengine1_beta4-cli/mkdocs.yml b/gen/appengine1_beta4-cli/mkdocs.yml index 3996e558fd..ed90df1f00 100644 --- a/gen/appengine1_beta4-cli/mkdocs.yml +++ b/gen/appengine1_beta4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: appengine v4.0.1+20181005 +site_name: appengine v5.0.2+20181005 site_url: http://byron.github.io/google-apis-rs/google-appengine1_beta4-cli site_description: A complete library to interact with appengine (protocol v1beta4) @@ -7,28 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1_beta4 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['apps_create.md', 'Apps', 'Create'] -- ['apps_get.md', 'Apps', 'Get'] -- ['apps_locations-get.md', 'Apps', 'Locations Get'] -- ['apps_locations-list.md', 'Apps', 'Locations List'] -- ['apps_modules-delete.md', 'Apps', 'Modules Delete'] -- ['apps_modules-get.md', 'Apps', 'Modules Get'] -- ['apps_modules-list.md', 'Apps', 'Modules List'] -- ['apps_modules-patch.md', 'Apps', 'Modules Patch'] -- ['apps_modules-versions-create.md', 'Apps', 'Modules Versions Create'] -- ['apps_modules-versions-delete.md', 'Apps', 'Modules Versions Delete'] -- ['apps_modules-versions-get.md', 'Apps', 'Modules Versions Get'] -- ['apps_modules-versions-instances-debug.md', 'Apps', 'Modules Versions Instances Debug'] -- ['apps_modules-versions-instances-delete.md', 'Apps', 'Modules Versions Instances Delete'] -- ['apps_modules-versions-instances-get.md', 'Apps', 'Modules Versions Instances Get'] -- ['apps_modules-versions-instances-list.md', 'Apps', 'Modules Versions Instances List'] -- ['apps_modules-versions-list.md', 'Apps', 'Modules Versions List'] -- ['apps_modules-versions-patch.md', 'Apps', 'Modules Versions Patch'] -- ['apps_operations-get.md', 'Apps', 'Operations Get'] -- ['apps_operations-list.md', 'Apps', 'Operations List'] -- ['apps_patch.md', 'Apps', 'Patch'] +nav: +- Home: 'index.md' +- 'Apps': + - 'Create': 'apps_create.md' + - 'Get': 'apps_get.md' + - 'Locations Get': 'apps_locations-get.md' + - 'Locations List': 'apps_locations-list.md' + - 'Modules Delete': 'apps_modules-delete.md' + - 'Modules Get': 'apps_modules-get.md' + - 'Modules List': 'apps_modules-list.md' + - 'Modules Patch': 'apps_modules-patch.md' + - 'Modules Versions Create': 'apps_modules-versions-create.md' + - 'Modules Versions Delete': 'apps_modules-versions-delete.md' + - 'Modules Versions Get': 'apps_modules-versions-get.md' + - 'Modules Versions Instances Debug': 'apps_modules-versions-instances-debug.md' + - 'Modules Versions Instances Delete': 'apps_modules-versions-instances-delete.md' + - 'Modules Versions Instances Get': 'apps_modules-versions-instances-get.md' + - 'Modules Versions Instances List': 'apps_modules-versions-instances-list.md' + - 'Modules Versions List': 'apps_modules-versions-list.md' + - 'Modules Versions Patch': 'apps_modules-versions-patch.md' + - 'Operations Get': 'apps_operations-get.md' + - 'Operations List': 'apps_operations-list.md' + - 'Patch': 'apps_patch.md' theme: readthedocs diff --git a/gen/appengine1_beta4-cli/src/client.rs b/gen/appengine1_beta4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/appengine1_beta4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/appengine1_beta4-cli/src/main.rs b/gen/appengine1_beta4-cli/src/main.rs index a1c2e80fde..9cb1ba342b 100644 --- a/gen/appengine1_beta4-cli/src/main.rs +++ b/gen/appengine1_beta4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_appengine1_beta4::{api, Error, oauth2}; +use google_appengine1_beta4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -154,7 +153,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ensure-resources-exist" => { - call = call.ensure_resources_exist(arg_from_str(value.unwrap_or("false"), err, "ensure-resources-exist", "boolean")); + call = call.ensure_resources_exist( value.map(|v| arg_from_str(v, err, "ensure-resources-exist", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -265,7 +264,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -431,7 +430,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -523,10 +522,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "migrate-traffic" => { - call = call.migrate_traffic(arg_from_str(value.unwrap_or("false"), err, "migrate-traffic", "boolean")); + call = call.migrate_traffic( value.map(|v| arg_from_str(v, err, "migrate-traffic", "boolean")).unwrap_or(false)); }, "mask" => { - call = call.mask(value.unwrap_or("")); + call = call.mask( value.map(|v| arg_from_str(v, err, "mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1028,7 +1027,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1090,7 +1089,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1240,7 +1239,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "mask" => { - call = call.mask(value.unwrap_or("")); + call = call.mask( value.map(|v| arg_from_str(v, err, "mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1351,7 +1350,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1454,7 +1453,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "mask" => { - call = call.mask(value.unwrap_or("")); + call = call.mask( value.map(|v| arg_from_str(v, err, "mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2278,7 +2277,7 @@ async fn main() { let mut app = App::new("appengine1-beta4") .author("Sebastian Thiel ") - .version("4.0.1+20181005") + .version("5.0.2+20181005") .about("The App Engine Admin API enables developers to provision and manage their App Engine applications.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_appengine1_beta4_cli") .arg(Arg::with_name("url") diff --git a/gen/appengine1_beta4/Cargo.toml b/gen/appengine1_beta4/Cargo.toml index e5ec03fb4c..b01267ddd7 100644 --- a/gen/appengine1_beta4/Cargo.toml +++ b/gen/appengine1_beta4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-appengine1_beta4" -version = "5.0.2-beta-1+20181005" +version = "5.0.2+20181005" authors = ["Sebastian Thiel "] description = "A complete library to interact with appengine (protocol v1beta4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1_beta4" homepage = "https://cloud.google.com/appengine/docs/admin-api/" -documentation = "https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005" +documentation = "https://docs.rs/google-appengine1_beta4/5.0.2+20181005" license = "MIT" keywords = ["appengine", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/appengine1_beta4/README.md b/gen/appengine1_beta4/README.md index 6ebf9a8192..2402531b23 100644 --- a/gen/appengine1_beta4/README.md +++ b/gen/appengine1_beta4/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-appengine1_beta4` library allows access to all features of the *Google appengine* service. -This documentation was generated from *appengine* crate version *5.0.2-beta-1+20181005*, where *20181005* is the exact revision of the *appengine:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *appengine* crate version *5.0.2+20181005*, where *20181005* is the exact revision of the *appengine:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *appengine* *v1_beta4* API can be found at the [official documentation site](https://cloud.google.com/appengine/docs/admin-api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/Appengine) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/Appengine) ... * apps - * [*create*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppCreateCall), [*get*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppGetCall), [*locations get*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppLocationGetCall), [*locations list*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppLocationListCall), [*modules delete*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleDeleteCall), [*modules get*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleGetCall), [*modules list*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleListCall), [*modules patch*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModulePatchCall), [*modules versions create*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionCreateCall), [*modules versions delete*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionDeleteCall), [*modules versions get*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionGetCall), [*modules versions instances debug*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceDebugCall), [*modules versions instances delete*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceDeleteCall), [*modules versions instances get*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceGetCall), [*modules versions instances list*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceListCall), [*modules versions list*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionListCall), [*modules versions patch*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppModuleVersionPatchCall), [*operations get*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppOperationGetCall), [*operations list*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppOperationListCall) and [*patch*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/api::AppPatchCall) + * [*create*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppCreateCall), [*get*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppGetCall), [*locations get*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppLocationGetCall), [*locations list*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppLocationListCall), [*modules delete*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleDeleteCall), [*modules get*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleGetCall), [*modules list*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleListCall), [*modules patch*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModulePatchCall), [*modules versions create*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionCreateCall), [*modules versions delete*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionDeleteCall), [*modules versions get*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionGetCall), [*modules versions instances debug*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceDebugCall), [*modules versions instances delete*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceDeleteCall), [*modules versions instances get*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceGetCall), [*modules versions instances list*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionInstanceListCall), [*modules versions list*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionListCall), [*modules versions patch*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppModuleVersionPatchCall), [*operations get*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppOperationGetCall), [*operations list*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppOperationListCall) and [*patch*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/api::AppPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/Appengine)** +* **[Hub](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/Appengine)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::CallBuilder) -* **[Resources](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::CallBuilder) +* **[Resources](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Part)** + * **[Parts](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Delegate) to the -[Method Builder](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Delegate) to the +[Method Builder](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::RequestValue) and -[decodable](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::RequestValue) and +[decodable](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-appengine1_beta4/5.0.2-beta-1+20181005/google_appengine1_beta4/client::RequestValue) are moved +* [request values](https://docs.rs/google-appengine1_beta4/5.0.2+20181005/google_appengine1_beta4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/appengine1_beta4/src/api.rs b/gen/appengine1_beta4/src/api.rs index 0624324ca3..1528103bde 100644 --- a/gen/appengine1_beta4/src/api.rs +++ b/gen/appengine1_beta4/src/api.rs @@ -133,7 +133,7 @@ impl<'a, S> Appengine { Appengine { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://appengine.googleapis.com/".to_string(), _root_url: "https://appengine.googleapis.com/".to_string(), } @@ -144,7 +144,7 @@ impl<'a, S> Appengine { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/appengine1_beta4/src/client.rs b/gen/appengine1_beta4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/appengine1_beta4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/appengine1_beta4/src/lib.rs b/gen/appengine1_beta4/src/lib.rs index ee4b6c21fc..97a9c3d915 100644 --- a/gen/appengine1_beta4/src/lib.rs +++ b/gen/appengine1_beta4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *appengine* crate version *5.0.2-beta-1+20181005*, where *20181005* is the exact revision of the *appengine:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *appengine* crate version *5.0.2+20181005*, where *20181005* is the exact revision of the *appengine:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *appengine* *v1_beta4* API can be found at the //! [official documentation site](https://cloud.google.com/appengine/docs/admin-api/). diff --git a/gen/appengine1_beta5-cli/Cargo.toml b/gen/appengine1_beta5-cli/Cargo.toml index faa8e717f8..7568f00b24 100644 --- a/gen/appengine1_beta5-cli/Cargo.toml +++ b/gen/appengine1_beta5-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-appengine1_beta5-cli" -version = "4.0.1+20181005" +version = "5.0.2+20181005" authors = ["Sebastian Thiel "] description = "A complete library to interact with appengine (protocol v1beta5)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1_beta5-cli" @@ -20,13 +20,13 @@ name = "appengine1-beta5" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-appengine1_beta5] path = "../appengine1_beta5" -version = "4.0.1+20181005" +version = "5.0.2+20181005" + diff --git a/gen/appengine1_beta5-cli/README.md b/gen/appengine1_beta5-cli/README.md index 793b1c4c59..2347081505 100644 --- a/gen/appengine1_beta5-cli/README.md +++ b/gen/appengine1_beta5-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *appengine* API at revision *20181005*. The CLI is at version *4.0.1*. +This documentation was generated from the *appengine* API at revision *20181005*. The CLI is at version *5.0.2*. ```bash appengine1-beta5 [options] diff --git a/gen/appengine1_beta5-cli/mkdocs.yml b/gen/appengine1_beta5-cli/mkdocs.yml index 8e03c1039f..b721f8d5ff 100644 --- a/gen/appengine1_beta5-cli/mkdocs.yml +++ b/gen/appengine1_beta5-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: appengine v4.0.1+20181005 +site_name: appengine v5.0.2+20181005 site_url: http://byron.github.io/google-apis-rs/google-appengine1_beta5-cli site_description: A complete library to interact with appengine (protocol v1beta5) @@ -7,28 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1_beta5 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['apps_create.md', 'Apps', 'Create'] -- ['apps_get.md', 'Apps', 'Get'] -- ['apps_locations-get.md', 'Apps', 'Locations Get'] -- ['apps_locations-list.md', 'Apps', 'Locations List'] -- ['apps_operations-get.md', 'Apps', 'Operations Get'] -- ['apps_operations-list.md', 'Apps', 'Operations List'] -- ['apps_patch.md', 'Apps', 'Patch'] -- ['apps_services-delete.md', 'Apps', 'Services Delete'] -- ['apps_services-get.md', 'Apps', 'Services Get'] -- ['apps_services-list.md', 'Apps', 'Services List'] -- ['apps_services-patch.md', 'Apps', 'Services Patch'] -- ['apps_services-versions-create.md', 'Apps', 'Services Versions Create'] -- ['apps_services-versions-delete.md', 'Apps', 'Services Versions Delete'] -- ['apps_services-versions-get.md', 'Apps', 'Services Versions Get'] -- ['apps_services-versions-instances-debug.md', 'Apps', 'Services Versions Instances Debug'] -- ['apps_services-versions-instances-delete.md', 'Apps', 'Services Versions Instances Delete'] -- ['apps_services-versions-instances-get.md', 'Apps', 'Services Versions Instances Get'] -- ['apps_services-versions-instances-list.md', 'Apps', 'Services Versions Instances List'] -- ['apps_services-versions-list.md', 'Apps', 'Services Versions List'] -- ['apps_services-versions-patch.md', 'Apps', 'Services Versions Patch'] +nav: +- Home: 'index.md' +- 'Apps': + - 'Create': 'apps_create.md' + - 'Get': 'apps_get.md' + - 'Locations Get': 'apps_locations-get.md' + - 'Locations List': 'apps_locations-list.md' + - 'Operations Get': 'apps_operations-get.md' + - 'Operations List': 'apps_operations-list.md' + - 'Patch': 'apps_patch.md' + - 'Services Delete': 'apps_services-delete.md' + - 'Services Get': 'apps_services-get.md' + - 'Services List': 'apps_services-list.md' + - 'Services Patch': 'apps_services-patch.md' + - 'Services Versions Create': 'apps_services-versions-create.md' + - 'Services Versions Delete': 'apps_services-versions-delete.md' + - 'Services Versions Get': 'apps_services-versions-get.md' + - 'Services Versions Instances Debug': 'apps_services-versions-instances-debug.md' + - 'Services Versions Instances Delete': 'apps_services-versions-instances-delete.md' + - 'Services Versions Instances Get': 'apps_services-versions-instances-get.md' + - 'Services Versions Instances List': 'apps_services-versions-instances-list.md' + - 'Services Versions List': 'apps_services-versions-list.md' + - 'Services Versions Patch': 'apps_services-versions-patch.md' theme: readthedocs diff --git a/gen/appengine1_beta5-cli/src/client.rs b/gen/appengine1_beta5-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/appengine1_beta5-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/appengine1_beta5-cli/src/main.rs b/gen/appengine1_beta5-cli/src/main.rs index e54922c10a..2ffb339514 100644 --- a/gen/appengine1_beta5-cli/src/main.rs +++ b/gen/appengine1_beta5-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_appengine1_beta5::{api, Error, oauth2}; +use google_appengine1_beta5::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -154,7 +153,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ensure-resources-exist" => { - call = call.ensure_resources_exist(arg_from_str(value.unwrap_or("false"), err, "ensure-resources-exist", "boolean")); + call = call.ensure_resources_exist( value.map(|v| arg_from_str(v, err, "ensure-resources-exist", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -265,7 +264,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -379,7 +378,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -482,7 +481,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "mask" => { - call = call.mask(value.unwrap_or("")); + call = call.mask( value.map(|v| arg_from_str(v, err, "mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -645,7 +644,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -737,10 +736,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "migrate-traffic" => { - call = call.migrate_traffic(arg_from_str(value.unwrap_or("false"), err, "migrate-traffic", "boolean")); + call = call.migrate_traffic( value.map(|v| arg_from_str(v, err, "migrate-traffic", "boolean")).unwrap_or(false)); }, "mask" => { - call = call.mask(value.unwrap_or("")); + call = call.mask( value.map(|v| arg_from_str(v, err, "mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1244,7 +1243,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1306,7 +1305,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1458,7 +1457,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "mask" => { - call = call.mask(value.unwrap_or("")); + call = call.mask( value.map(|v| arg_from_str(v, err, "mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2282,7 +2281,7 @@ async fn main() { let mut app = App::new("appengine1-beta5") .author("Sebastian Thiel ") - .version("4.0.1+20181005") + .version("5.0.2+20181005") .about("The App Engine Admin API enables developers to provision and manage their App Engine applications.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_appengine1_beta5_cli") .arg(Arg::with_name("url") diff --git a/gen/appengine1_beta5/Cargo.toml b/gen/appengine1_beta5/Cargo.toml index ed4b4f1996..e0b77e7404 100644 --- a/gen/appengine1_beta5/Cargo.toml +++ b/gen/appengine1_beta5/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-appengine1_beta5" -version = "5.0.2-beta-1+20181005" +version = "5.0.2+20181005" authors = ["Sebastian Thiel "] description = "A complete library to interact with appengine (protocol v1beta5)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appengine1_beta5" homepage = "https://cloud.google.com/appengine/docs/admin-api/" -documentation = "https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005" +documentation = "https://docs.rs/google-appengine1_beta5/5.0.2+20181005" license = "MIT" keywords = ["appengine", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/appengine1_beta5/README.md b/gen/appengine1_beta5/README.md index c4391f09d7..878037242f 100644 --- a/gen/appengine1_beta5/README.md +++ b/gen/appengine1_beta5/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-appengine1_beta5` library allows access to all features of the *Google appengine* service. -This documentation was generated from *appengine* crate version *5.0.2-beta-1+20181005*, where *20181005* is the exact revision of the *appengine:v1beta5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *appengine* crate version *5.0.2+20181005*, where *20181005* is the exact revision of the *appengine:v1beta5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *appengine* *v1_beta5* API can be found at the [official documentation site](https://cloud.google.com/appengine/docs/admin-api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/Appengine) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/Appengine) ... * apps - * [*create*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppCreateCall), [*get*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppGetCall), [*locations get*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppLocationGetCall), [*locations list*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppLocationListCall), [*operations get*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppOperationGetCall), [*operations list*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppOperationListCall), [*patch*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppPatchCall), [*services delete*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceDeleteCall), [*services get*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceGetCall), [*services list*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceListCall), [*services patch*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServicePatchCall), [*services versions create*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionCreateCall), [*services versions delete*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionDeleteCall), [*services versions get*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionGetCall), [*services versions instances debug*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceDebugCall), [*services versions instances delete*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceDeleteCall), [*services versions instances get*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceGetCall), [*services versions instances list*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceListCall), [*services versions list*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionListCall) and [*services versions patch*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/api::AppServiceVersionPatchCall) + * [*create*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppCreateCall), [*get*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppGetCall), [*locations get*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppLocationGetCall), [*locations list*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppLocationListCall), [*operations get*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppOperationGetCall), [*operations list*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppOperationListCall), [*patch*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppPatchCall), [*services delete*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceDeleteCall), [*services get*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceGetCall), [*services list*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceListCall), [*services patch*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServicePatchCall), [*services versions create*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionCreateCall), [*services versions delete*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionDeleteCall), [*services versions get*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionGetCall), [*services versions instances debug*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceDebugCall), [*services versions instances delete*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceDeleteCall), [*services versions instances get*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceGetCall), [*services versions instances list*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionInstanceListCall), [*services versions list*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionListCall) and [*services versions patch*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/api::AppServiceVersionPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/Appengine)** +* **[Hub](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/Appengine)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::CallBuilder) -* **[Resources](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::CallBuilder) +* **[Resources](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Part)** + * **[Parts](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::CallBuilder)** +* **[Activities](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Delegate) to the -[Method Builder](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Delegate) to the +[Method Builder](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::RequestValue) and -[decodable](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::RequestValue) and +[decodable](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-appengine1_beta5/5.0.2-beta-1+20181005/google_appengine1_beta5/client::RequestValue) are moved +* [request values](https://docs.rs/google-appengine1_beta5/5.0.2+20181005/google_appengine1_beta5/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/appengine1_beta5/src/api.rs b/gen/appengine1_beta5/src/api.rs index 6f64507134..c065f229b2 100644 --- a/gen/appengine1_beta5/src/api.rs +++ b/gen/appengine1_beta5/src/api.rs @@ -133,7 +133,7 @@ impl<'a, S> Appengine { Appengine { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://appengine.googleapis.com/".to_string(), _root_url: "https://appengine.googleapis.com/".to_string(), } @@ -144,7 +144,7 @@ impl<'a, S> Appengine { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/appengine1_beta5/src/client.rs b/gen/appengine1_beta5/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/appengine1_beta5/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/appengine1_beta5/src/lib.rs b/gen/appengine1_beta5/src/lib.rs index 621950daa9..41c7f1ce2b 100644 --- a/gen/appengine1_beta5/src/lib.rs +++ b/gen/appengine1_beta5/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *appengine* crate version *5.0.2-beta-1+20181005*, where *20181005* is the exact revision of the *appengine:v1beta5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *appengine* crate version *5.0.2+20181005*, where *20181005* is the exact revision of the *appengine:v1beta5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *appengine* *v1_beta5* API can be found at the //! [official documentation site](https://cloud.google.com/appengine/docs/admin-api/). diff --git a/gen/appsactivity1-cli/Cargo.toml b/gen/appsactivity1-cli/Cargo.toml index eff3dfefcb..da99f3e22f 100644 --- a/gen/appsactivity1-cli/Cargo.toml +++ b/gen/appsactivity1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-appsactivity1-cli" -version = "4.0.1+20200628" +version = "5.0.2+20200628" authors = ["Sebastian Thiel "] description = "A complete library to interact with appsactivity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appsactivity1-cli" @@ -20,13 +20,13 @@ name = "appsactivity1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-appsactivity1] path = "../appsactivity1" -version = "4.0.1+20200628" +version = "5.0.2+20200628" + diff --git a/gen/appsactivity1-cli/README.md b/gen/appsactivity1-cli/README.md index bad61319e7..28a1d52360 100644 --- a/gen/appsactivity1-cli/README.md +++ b/gen/appsactivity1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *appsactivity* API at revision *20200628*. The CLI is at version *4.0.1*. +This documentation was generated from the *appsactivity* API at revision *20200628*. The CLI is at version *5.0.2*. ```bash appsactivity1 [options] diff --git a/gen/appsactivity1-cli/mkdocs.yml b/gen/appsactivity1-cli/mkdocs.yml index b42c09da63..c282093173 100644 --- a/gen/appsactivity1-cli/mkdocs.yml +++ b/gen/appsactivity1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: appsactivity v4.0.1+20200628 +site_name: appsactivity v5.0.2+20200628 site_url: http://byron.github.io/google-apis-rs/google-appsactivity1-cli site_description: A complete library to interact with appsactivity (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/appsactivity1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['activities_list.md', 'Activities', 'List'] +nav: +- Home: 'index.md' +- 'Activities': + - 'List': 'activities_list.md' theme: readthedocs diff --git a/gen/appsactivity1-cli/src/client.rs b/gen/appsactivity1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/appsactivity1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/appsactivity1-cli/src/main.rs b/gen/appsactivity1-cli/src/main.rs index 52a3732c83..93b8232a43 100644 --- a/gen/appsactivity1-cli/src/main.rs +++ b/gen/appsactivity1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_appsactivity1::{api, Error, oauth2}; +use google_appsactivity1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -67,7 +66,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "grouping-strategy" => { call = call.grouping_strategy(value.unwrap_or("")); @@ -234,7 +233,7 @@ async fn main() { let mut app = App::new("appsactivity1") .author("Sebastian Thiel ") - .version("4.0.1+20200628") + .version("5.0.2+20200628") .about("Provides a historical view of activity.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_appsactivity1_cli") .arg(Arg::with_name("url") diff --git a/gen/appsactivity1/Cargo.toml b/gen/appsactivity1/Cargo.toml index 2095ce6605..817c4dd9e9 100644 --- a/gen/appsactivity1/Cargo.toml +++ b/gen/appsactivity1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-appsactivity1" -version = "5.0.2-beta-1+20200628" +version = "5.0.2+20200628" authors = ["Sebastian Thiel "] description = "A complete library to interact with appsactivity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appsactivity1" homepage = "https://developers.google.com/google-apps/activity/" -documentation = "https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628" +documentation = "https://docs.rs/google-appsactivity1/5.0.2+20200628" license = "MIT" keywords = ["appsactivity", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/appsactivity1/README.md b/gen/appsactivity1/README.md index e72ab2ee25..30b8cf1b85 100644 --- a/gen/appsactivity1/README.md +++ b/gen/appsactivity1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-appsactivity1` library allows access to all features of the *Google appsactivity* service. -This documentation was generated from *appsactivity* crate version *5.0.2-beta-1+20200628*, where *20200628* is the exact revision of the *appsactivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *appsactivity* crate version *5.0.2+20200628*, where *20200628* is the exact revision of the *appsactivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *appsactivity* *v1* API can be found at the [official documentation site](https://developers.google.com/google-apps/activity/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/Appsactivity) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/Appsactivity) ... -* [activities](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/api::Activity) - * [*list*](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/api::ActivityListCall) +* [activities](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/api::Activity) + * [*list*](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/api::ActivityListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/Appsactivity)** +* **[Hub](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/Appsactivity)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::CallBuilder) -* **[Resources](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::CallBuilder) +* **[Resources](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Part)** + * **[Parts](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Delegate) to the -[Method Builder](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Delegate) to the +[Method Builder](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::RequestValue) and -[decodable](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::RequestValue) and +[decodable](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-appsactivity1/5.0.2-beta-1+20200628/google_appsactivity1/client::RequestValue) are moved +* [request values](https://docs.rs/google-appsactivity1/5.0.2+20200628/google_appsactivity1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/appsactivity1/src/api.rs b/gen/appsactivity1/src/api.rs index 7b26a20641..27f494cbea 100644 --- a/gen/appsactivity1/src/api.rs +++ b/gen/appsactivity1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Appsactivity { Appsactivity { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/appsactivity/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Appsactivity { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/appsactivity1/src/client.rs b/gen/appsactivity1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/appsactivity1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/appsactivity1/src/lib.rs b/gen/appsactivity1/src/lib.rs index 79c50e630c..9a1ef9c749 100644 --- a/gen/appsactivity1/src/lib.rs +++ b/gen/appsactivity1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *appsactivity* crate version *5.0.2-beta-1+20200628*, where *20200628* is the exact revision of the *appsactivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *appsactivity* crate version *5.0.2+20200628*, where *20200628* is the exact revision of the *appsactivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *appsactivity* *v1* API can be found at the //! [official documentation site](https://developers.google.com/google-apps/activity/). diff --git a/gen/appstate1-cli/Cargo.toml b/gen/appstate1-cli/Cargo.toml index 2dc555788b..8bb8b3fb1e 100644 --- a/gen/appstate1-cli/Cargo.toml +++ b/gen/appstate1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-appstate1-cli" -version = "4.0.1+20190627" +version = "5.0.2+20190627" authors = ["Sebastian Thiel "] description = "A complete library to interact with App State (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appstate1-cli" @@ -20,13 +20,13 @@ name = "appstate1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-appstate1] path = "../appstate1" -version = "4.0.1+20190627" +version = "5.0.2+20190627" + diff --git a/gen/appstate1-cli/README.md b/gen/appstate1-cli/README.md index 7a77fba4bf..ecca050caf 100644 --- a/gen/appstate1-cli/README.md +++ b/gen/appstate1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *App State* API at revision *20190627*. The CLI is at version *4.0.1*. +This documentation was generated from the *App State* API at revision *20190627*. The CLI is at version *5.0.2*. ```bash appstate1 [options] diff --git a/gen/appstate1-cli/mkdocs.yml b/gen/appstate1-cli/mkdocs.yml index 0cbe50f3b1..e4faf11531 100644 --- a/gen/appstate1-cli/mkdocs.yml +++ b/gen/appstate1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: App State v4.0.1+20190627 +site_name: App State v5.0.2+20190627 site_url: http://byron.github.io/google-apis-rs/google-appstate1-cli site_description: A complete library to interact with App State (protocol v1) @@ -7,13 +7,14 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/appstate1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['states_clear.md', 'States', 'Clear'] -- ['states_delete.md', 'States', 'Delete'] -- ['states_get.md', 'States', 'Get'] -- ['states_list.md', 'States', 'List'] -- ['states_update.md', 'States', 'Update'] +nav: +- Home: 'index.md' +- 'States': + - 'Clear': 'states_clear.md' + - 'Delete': 'states_delete.md' + - 'Get': 'states_get.md' + - 'List': 'states_list.md' + - 'Update': 'states_update.md' theme: readthedocs diff --git a/gen/appstate1-cli/src/client.rs b/gen/appstate1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/appstate1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/appstate1-cli/src/main.rs b/gen/appstate1-cli/src/main.rs index 3be6700151..ce6e539cd7 100644 --- a/gen/appstate1-cli/src/main.rs +++ b/gen/appstate1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_appstate1::{api, Error, oauth2}; +use google_appstate1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -213,7 +212,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-data" => { - call = call.include_data(arg_from_str(value.unwrap_or("false"), err, "include-data", "boolean")); + call = call.include_data( value.map(|v| arg_from_str(v, err, "include-data", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -562,7 +561,7 @@ async fn main() { let mut app = App::new("appstate1") .author("Sebastian Thiel ") - .version("4.0.1+20190627") + .version("5.0.2+20190627") .about("The Google App State API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_appstate1_cli") .arg(Arg::with_name("url") diff --git a/gen/appstate1/Cargo.toml b/gen/appstate1/Cargo.toml index f4cd310991..1200cffecf 100644 --- a/gen/appstate1/Cargo.toml +++ b/gen/appstate1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-appstate1" -version = "5.0.2-beta-1+20190627" +version = "5.0.2+20190627" authors = ["Sebastian Thiel "] description = "A complete library to interact with App State (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/appstate1" homepage = "https://developers.google.com/games/services/web/api/states" -documentation = "https://docs.rs/google-appstate1/5.0.2-beta-1+20190627" +documentation = "https://docs.rs/google-appstate1/5.0.2+20190627" license = "MIT" keywords = ["appstate", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/appstate1/README.md b/gen/appstate1/README.md index 12c28989e9..7c80e92d8b 100644 --- a/gen/appstate1/README.md +++ b/gen/appstate1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-appstate1` library allows access to all features of the *Google App State* service. -This documentation was generated from *App State* crate version *5.0.2-beta-1+20190627*, where *20190627* is the exact revision of the *appstate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *App State* crate version *5.0.2+20190627*, where *20190627* is the exact revision of the *appstate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *App State* *v1* API can be found at the [official documentation site](https://developers.google.com/games/services/web/api/states). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/AppState) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/AppState) ... * states - * [*clear*](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/api::StateClearCall), [*delete*](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/api::StateDeleteCall), [*get*](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/api::StateGetCall), [*list*](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/api::StateListCall) and [*update*](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/api::StateUpdateCall) + * [*clear*](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/api::StateClearCall), [*delete*](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/api::StateDeleteCall), [*get*](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/api::StateGetCall), [*list*](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/api::StateListCall) and [*update*](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/api::StateUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/AppState)** +* **[Hub](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/AppState)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::CallBuilder) -* **[Resources](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::CallBuilder) +* **[Resources](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Part)** + * **[Parts](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Delegate) to the -[Method Builder](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Delegate) to the +[Method Builder](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::RequestValue) and -[decodable](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::RequestValue) and +[decodable](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-appstate1/5.0.2-beta-1+20190627/google_appstate1/client::RequestValue) are moved +* [request values](https://docs.rs/google-appstate1/5.0.2+20190627/google_appstate1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/appstate1/src/api.rs b/gen/appstate1/src/api.rs index d012f1d2ea..2a8871651d 100644 --- a/gen/appstate1/src/api.rs +++ b/gen/appstate1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> AppState { AppState { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/appstate/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> AppState { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/appstate1/src/client.rs b/gen/appstate1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/appstate1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/appstate1/src/lib.rs b/gen/appstate1/src/lib.rs index 243711175a..a5c2e7dba1 100644 --- a/gen/appstate1/src/lib.rs +++ b/gen/appstate1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *App State* crate version *5.0.2-beta-1+20190627*, where *20190627* is the exact revision of the *appstate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *App State* crate version *5.0.2+20190627*, where *20190627* is the exact revision of the *appstate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *App State* *v1* API can be found at the //! [official documentation site](https://developers.google.com/games/services/web/api/states). diff --git a/gen/area120tables1_alpha1-cli/Cargo.toml b/gen/area120tables1_alpha1-cli/Cargo.toml index 15c62825f1..012a6c6091 100644 --- a/gen/area120tables1_alpha1-cli/Cargo.toml +++ b/gen/area120tables1_alpha1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-area120tables1_alpha1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Area120 Tables (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/area120tables1_alpha1-cli" @@ -20,13 +20,13 @@ name = "area120tables1-alpha1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-area120tables1_alpha1] path = "../area120tables1_alpha1" -version = "4.0.1+20220301" +version = "5.0.2+20230123" + diff --git a/gen/area120tables1_alpha1-cli/README.md b/gen/area120tables1_alpha1-cli/README.md index ae3af60e6f..48f43f8f7c 100644 --- a/gen/area120tables1_alpha1-cli/README.md +++ b/gen/area120tables1_alpha1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Area120 Tables* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Area120 Tables* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash area120tables1-alpha1 [options] diff --git a/gen/area120tables1_alpha1-cli/mkdocs.yml b/gen/area120tables1_alpha1-cli/mkdocs.yml index ffc6c7be89..dce53da3b3 100644 --- a/gen/area120tables1_alpha1-cli/mkdocs.yml +++ b/gen/area120tables1_alpha1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Area120 Tables v4.0.1+20220301 +site_name: Area120 Tables v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-area120tables1_alpha1-cli site_description: A complete library to interact with Area120 Tables (protocol v1alpha1) @@ -7,20 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/area120tables1_a docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['tables_get.md', 'Tables', 'Get'] -- ['tables_list.md', 'Tables', 'List'] -- ['tables_rows-batch-create.md', 'Tables', 'Rows Batch Create'] -- ['tables_rows-batch-delete.md', 'Tables', 'Rows Batch Delete'] -- ['tables_rows-batch-update.md', 'Tables', 'Rows Batch Update'] -- ['tables_rows-create.md', 'Tables', 'Rows Create'] -- ['tables_rows-delete.md', 'Tables', 'Rows Delete'] -- ['tables_rows-get.md', 'Tables', 'Rows Get'] -- ['tables_rows-list.md', 'Tables', 'Rows List'] -- ['tables_rows-patch.md', 'Tables', 'Rows Patch'] -- ['workspaces_get.md', 'Workspaces', 'Get'] -- ['workspaces_list.md', 'Workspaces', 'List'] +nav: +- Home: 'index.md' +- 'Tables': + - 'Get': 'tables_get.md' + - 'List': 'tables_list.md' + - 'Rows Batch Create': 'tables_rows-batch-create.md' + - 'Rows Batch Delete': 'tables_rows-batch-delete.md' + - 'Rows Batch Update': 'tables_rows-batch-update.md' + - 'Rows Create': 'tables_rows-create.md' + - 'Rows Delete': 'tables_rows-delete.md' + - 'Rows Get': 'tables_rows-get.md' + - 'Rows List': 'tables_rows-list.md' + - 'Rows Patch': 'tables_rows-patch.md' +- 'Workspaces': + - 'Get': 'workspaces_get.md' + - 'List': 'workspaces_list.md' theme: readthedocs diff --git a/gen/area120tables1_alpha1-cli/src/client.rs b/gen/area120tables1_alpha1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/area120tables1_alpha1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/area120tables1_alpha1-cli/src/main.rs b/gen/area120tables1_alpha1-cli/src/main.rs index 7816fe871f..7622c99ad0 100644 --- a/gen/area120tables1_alpha1-cli/src/main.rs +++ b/gen/area120tables1_alpha1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_area120tables1_alpha1::{api, Error, oauth2}; +use google_area120tables1_alpha1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -630,7 +629,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -730,7 +729,7 @@ where call = call.view(value.unwrap_or("")); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -841,7 +840,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1312,7 +1311,7 @@ async fn main() { let mut app = App::new("area120tables1-alpha1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230123") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_area120tables1_alpha1_cli") .arg(Arg::with_name("url") diff --git a/gen/area120tables1_alpha1/Cargo.toml b/gen/area120tables1_alpha1/Cargo.toml index a8eb3a4bea..5592347d79 100644 --- a/gen/area120tables1_alpha1/Cargo.toml +++ b/gen/area120tables1_alpha1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-area120tables1_alpha1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Area120 Tables (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/area120tables1_alpha1" homepage = "https://support.google.com/area120-tables/answer/10011390" -documentation = "https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123" license = "MIT" keywords = ["area120tables", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/area120tables1_alpha1/README.md b/gen/area120tables1_alpha1/README.md index 9c9c8f9f6c..94d87a2256 100644 --- a/gen/area120tables1_alpha1/README.md +++ b/gen/area120tables1_alpha1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-area120tables1_alpha1` library allows access to all features of the *Google Area120 Tables* service. -This documentation was generated from *Area120 Tables* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *area120tables:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Area120 Tables* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *area120tables:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Area120 Tables* *v1_alpha1* API can be found at the [official documentation site](https://support.google.com/area120-tables/answer/10011390). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/Area120Tables) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/Area120Tables) ... -* [tables](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::Table) - * [*get*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableGetCall), [*list*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableListCall), [*rows batch create*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowBatchCreateCall), [*rows batch delete*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowBatchDeleteCall), [*rows batch update*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowBatchUpdateCall), [*rows create*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowCreateCall), [*rows delete*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowDeleteCall), [*rows get*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowGetCall), [*rows list*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowListCall) and [*rows patch*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::TableRowPatchCall) -* [workspaces](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::Workspace) - * [*get*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::WorkspaceGetCall) and [*list*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/api::WorkspaceListCall) +* [tables](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::Table) + * [*get*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableGetCall), [*list*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableListCall), [*rows batch create*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowBatchCreateCall), [*rows batch delete*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowBatchDeleteCall), [*rows batch update*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowBatchUpdateCall), [*rows create*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowCreateCall), [*rows delete*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowDeleteCall), [*rows get*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowGetCall), [*rows list*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowListCall) and [*rows patch*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::TableRowPatchCall) +* [workspaces](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::Workspace) + * [*get*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::WorkspaceGetCall) and [*list*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/api::WorkspaceListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/Area120Tables)** +* **[Hub](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/Area120Tables)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::CallBuilder) -* **[Resources](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::CallBuilder) +* **[Resources](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Part)** + * **[Parts](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Delegate) to the -[Method Builder](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Delegate) to the +[Method Builder](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::RequestValue) and -[decodable](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::RequestValue) and +[decodable](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-area120tables1_alpha1/5.0.2-beta-1+20230123/google_area120tables1_alpha1/client::RequestValue) are moved +* [request values](https://docs.rs/google-area120tables1_alpha1/5.0.2+20230123/google_area120tables1_alpha1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/area120tables1_alpha1/src/api.rs b/gen/area120tables1_alpha1/src/api.rs index 84107293dc..f3087a1c1c 100644 --- a/gen/area120tables1_alpha1/src/api.rs +++ b/gen/area120tables1_alpha1/src/api.rs @@ -144,7 +144,7 @@ impl<'a, S> Area120Tables { Area120Tables { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://area120tables.googleapis.com/".to_string(), _root_url: "https://area120tables.googleapis.com/".to_string(), } @@ -158,7 +158,7 @@ impl<'a, S> Area120Tables { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/area120tables1_alpha1/src/client.rs b/gen/area120tables1_alpha1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/area120tables1_alpha1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/area120tables1_alpha1/src/lib.rs b/gen/area120tables1_alpha1/src/lib.rs index e76ffe8061..8cf9029fc1 100644 --- a/gen/area120tables1_alpha1/src/lib.rs +++ b/gen/area120tables1_alpha1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Area120 Tables* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *area120tables:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Area120 Tables* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *area120tables:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Area120 Tables* *v1_alpha1* API can be found at the //! [official documentation site](https://support.google.com/area120-tables/answer/10011390). diff --git a/gen/artifactregistry1-cli/Cargo.toml b/gen/artifactregistry1-cli/Cargo.toml index 003ec8ed58..2bfe118111 100644 --- a/gen/artifactregistry1-cli/Cargo.toml +++ b/gen/artifactregistry1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-artifactregistry1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Artifact Registry (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/artifactregistry1-cli" @@ -20,13 +20,13 @@ name = "artifactregistry1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-artifactregistry1] path = "../artifactregistry1" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/artifactregistry1-cli/README.md b/gen/artifactregistry1-cli/README.md index afd761570e..86501c0a4c 100644 --- a/gen/artifactregistry1-cli/README.md +++ b/gen/artifactregistry1-cli/README.md @@ -25,12 +25,14 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Artifact Registry* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Artifact Registry* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash artifactregistry1 [options] projects get-project-settings [-p ]... [-o ] + locations-get [-p ]... [-o ] + locations-list [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-repositories-apt-artifacts-import (-r )... [-p ]... [-o ] locations-repositories-apt-artifacts-upload (-r )... (-u simple -f [-m ]) [-p ]... [-o ] @@ -42,9 +44,12 @@ artifactregistry1 [options] locations-repositories-files-list [-p ]... [-o ] locations-repositories-get [-p ]... [-o ] locations-repositories-get-iam-policy [-p ]... [-o ] - locations-repositories-goo-get-artifacts-import (-r )... [-p ]... [-o ] - locations-repositories-googet-artifacts-upload (-r )... (-u simple -f [-m ]) [-p ]... [-o ] + locations-repositories-kfp-artifacts-upload (-r )... (-u simple -f [-m ]) [-p ]... [-o ] locations-repositories-list [-p ]... [-o ] + locations-repositories-maven-artifacts-get [-p ]... [-o ] + locations-repositories-maven-artifacts-list [-p ]... [-o ] + locations-repositories-npm-packages-get [-p ]... [-o ] + locations-repositories-npm-packages-list [-p ]... [-o ] locations-repositories-packages-delete [-p ]... [-o ] locations-repositories-packages-get [-p ]... [-o ] locations-repositories-packages-list [-p ]... [-o ] @@ -57,6 +62,8 @@ artifactregistry1 [options] locations-repositories-packages-versions-get [-p ]... [-o ] locations-repositories-packages-versions-list [-p ]... [-o ] locations-repositories-patch (-r )... [-p ]... [-o ] + locations-repositories-python-packages-get [-p ]... [-o ] + locations-repositories-python-packages-list [-p ]... [-o ] locations-repositories-set-iam-policy (-r )... [-p ]... [-o ] locations-repositories-test-iam-permissions (-r )... [-p ]... [-o ] locations-repositories-yum-artifacts-import (-r )... [-p ]... [-o ] diff --git a/gen/artifactregistry1-cli/mkdocs.yml b/gen/artifactregistry1-cli/mkdocs.yml index 52e0026bc5..6704679a50 100644 --- a/gen/artifactregistry1-cli/mkdocs.yml +++ b/gen/artifactregistry1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Artifact Registry v4.0.1+20220225 +site_name: Artifact Registry v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-artifactregistry1-cli site_description: A complete library to interact with Artifact Registry (protocol v1) @@ -7,40 +7,48 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/artifactregistry docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_get-project-settings.md', 'Projects', 'Get Project Settings'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-repositories-apt-artifacts-import.md', 'Projects', 'Locations Repositories Apt Artifacts Import'] -- ['projects_locations-repositories-apt-artifacts-upload.md', 'Projects', 'Locations Repositories Apt Artifacts Upload'] -- ['projects_locations-repositories-create.md', 'Projects', 'Locations Repositories Create'] -- ['projects_locations-repositories-delete.md', 'Projects', 'Locations Repositories Delete'] -- ['projects_locations-repositories-docker-images-get.md', 'Projects', 'Locations Repositories Docker Images Get'] -- ['projects_locations-repositories-docker-images-list.md', 'Projects', 'Locations Repositories Docker Images List'] -- ['projects_locations-repositories-files-get.md', 'Projects', 'Locations Repositories Files Get'] -- ['projects_locations-repositories-files-list.md', 'Projects', 'Locations Repositories Files List'] -- ['projects_locations-repositories-get.md', 'Projects', 'Locations Repositories Get'] -- ['projects_locations-repositories-get-iam-policy.md', 'Projects', 'Locations Repositories Get Iam Policy'] -- ['projects_locations-repositories-goo-get-artifacts-import.md', 'Projects', 'Locations Repositories Goo Get Artifacts Import'] -- ['projects_locations-repositories-googet-artifacts-upload.md', 'Projects', 'Locations Repositories Googet Artifacts Upload'] -- ['projects_locations-repositories-list.md', 'Projects', 'Locations Repositories List'] -- ['projects_locations-repositories-packages-delete.md', 'Projects', 'Locations Repositories Packages Delete'] -- ['projects_locations-repositories-packages-get.md', 'Projects', 'Locations Repositories Packages Get'] -- ['projects_locations-repositories-packages-list.md', 'Projects', 'Locations Repositories Packages List'] -- ['projects_locations-repositories-packages-tags-create.md', 'Projects', 'Locations Repositories Packages Tags Create'] -- ['projects_locations-repositories-packages-tags-delete.md', 'Projects', 'Locations Repositories Packages Tags Delete'] -- ['projects_locations-repositories-packages-tags-get.md', 'Projects', 'Locations Repositories Packages Tags Get'] -- ['projects_locations-repositories-packages-tags-list.md', 'Projects', 'Locations Repositories Packages Tags List'] -- ['projects_locations-repositories-packages-tags-patch.md', 'Projects', 'Locations Repositories Packages Tags Patch'] -- ['projects_locations-repositories-packages-versions-delete.md', 'Projects', 'Locations Repositories Packages Versions Delete'] -- ['projects_locations-repositories-packages-versions-get.md', 'Projects', 'Locations Repositories Packages Versions Get'] -- ['projects_locations-repositories-packages-versions-list.md', 'Projects', 'Locations Repositories Packages Versions List'] -- ['projects_locations-repositories-patch.md', 'Projects', 'Locations Repositories Patch'] -- ['projects_locations-repositories-set-iam-policy.md', 'Projects', 'Locations Repositories Set Iam Policy'] -- ['projects_locations-repositories-test-iam-permissions.md', 'Projects', 'Locations Repositories Test Iam Permissions'] -- ['projects_locations-repositories-yum-artifacts-import.md', 'Projects', 'Locations Repositories Yum Artifacts Import'] -- ['projects_locations-repositories-yum-artifacts-upload.md', 'Projects', 'Locations Repositories Yum Artifacts Upload'] -- ['projects_update-project-settings.md', 'Projects', 'Update Project Settings'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Get Project Settings': 'projects_get-project-settings.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Repositories Apt Artifacts Import': 'projects_locations-repositories-apt-artifacts-import.md' + - 'Locations Repositories Apt Artifacts Upload': 'projects_locations-repositories-apt-artifacts-upload.md' + - 'Locations Repositories Create': 'projects_locations-repositories-create.md' + - 'Locations Repositories Delete': 'projects_locations-repositories-delete.md' + - 'Locations Repositories Docker Images Get': 'projects_locations-repositories-docker-images-get.md' + - 'Locations Repositories Docker Images List': 'projects_locations-repositories-docker-images-list.md' + - 'Locations Repositories Files Get': 'projects_locations-repositories-files-get.md' + - 'Locations Repositories Files List': 'projects_locations-repositories-files-list.md' + - 'Locations Repositories Get': 'projects_locations-repositories-get.md' + - 'Locations Repositories Get Iam Policy': 'projects_locations-repositories-get-iam-policy.md' + - 'Locations Repositories Kfp Artifacts Upload': 'projects_locations-repositories-kfp-artifacts-upload.md' + - 'Locations Repositories List': 'projects_locations-repositories-list.md' + - 'Locations Repositories Maven Artifacts Get': 'projects_locations-repositories-maven-artifacts-get.md' + - 'Locations Repositories Maven Artifacts List': 'projects_locations-repositories-maven-artifacts-list.md' + - 'Locations Repositories Npm Packages Get': 'projects_locations-repositories-npm-packages-get.md' + - 'Locations Repositories Npm Packages List': 'projects_locations-repositories-npm-packages-list.md' + - 'Locations Repositories Packages Delete': 'projects_locations-repositories-packages-delete.md' + - 'Locations Repositories Packages Get': 'projects_locations-repositories-packages-get.md' + - 'Locations Repositories Packages List': 'projects_locations-repositories-packages-list.md' + - 'Locations Repositories Packages Tags Create': 'projects_locations-repositories-packages-tags-create.md' + - 'Locations Repositories Packages Tags Delete': 'projects_locations-repositories-packages-tags-delete.md' + - 'Locations Repositories Packages Tags Get': 'projects_locations-repositories-packages-tags-get.md' + - 'Locations Repositories Packages Tags List': 'projects_locations-repositories-packages-tags-list.md' + - 'Locations Repositories Packages Tags Patch': 'projects_locations-repositories-packages-tags-patch.md' + - 'Locations Repositories Packages Versions Delete': 'projects_locations-repositories-packages-versions-delete.md' + - 'Locations Repositories Packages Versions Get': 'projects_locations-repositories-packages-versions-get.md' + - 'Locations Repositories Packages Versions List': 'projects_locations-repositories-packages-versions-list.md' + - 'Locations Repositories Patch': 'projects_locations-repositories-patch.md' + - 'Locations Repositories Python Packages Get': 'projects_locations-repositories-python-packages-get.md' + - 'Locations Repositories Python Packages List': 'projects_locations-repositories-python-packages-list.md' + - 'Locations Repositories Set Iam Policy': 'projects_locations-repositories-set-iam-policy.md' + - 'Locations Repositories Test Iam Permissions': 'projects_locations-repositories-test-iam-permissions.md' + - 'Locations Repositories Yum Artifacts Import': 'projects_locations-repositories-yum-artifacts-import.md' + - 'Locations Repositories Yum Artifacts Upload': 'projects_locations-repositories-yum-artifacts-upload.md' + - 'Update Project Settings': 'projects_update-project-settings.md' theme: readthedocs diff --git a/gen/artifactregistry1-cli/src/client.rs b/gen/artifactregistry1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/artifactregistry1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/artifactregistry1-cli/src/main.rs b/gen/artifactregistry1-cli/src/main.rs index b215a60aca..48a3556d75 100644 --- a/gen/artifactregistry1-cli/src/main.rs +++ b/gen/artifactregistry1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_artifactregistry1::{api, Error, oauth2}; +use google_artifactregistry1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -103,6 +102,120 @@ where } } + async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_operations_get(opt.value_of("name").unwrap_or("")); @@ -359,9 +472,11 @@ where "maven-config.allow-snapshot-overwrites" => Some(("mavenConfig.allowSnapshotOverwrites", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "maven-config.version-policy" => Some(("mavenConfig.versionPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "size-bytes" => Some(("sizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-snapshot-overwrites", "create-time", "description", "format", "kms-key-name", "labels", "maven-config", "name", "update-time", "version-policy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-snapshot-overwrites", "create-time", "description", "format", "kms-key-name", "labels", "maven-config", "name", "satisfies-pzs", "size-bytes", "update-time", "version-policy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -539,7 +654,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); }, _ => { let mut found = false; @@ -554,7 +672,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["order-by", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -650,7 +768,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -764,7 +882,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -813,7 +931,7 @@ where } } - async fn _projects_locations_repositories_goo_get_artifacts_import(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _projects_locations_repositories_kfp_artifacts_upload(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); @@ -836,10 +954,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { - "gcs-source.uris" => Some(("gcsSource.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), - "gcs-source.use-wildcards" => Some(("gcsSource.useWildcards", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tags" => Some(("tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["gcs-source", "uris", "use-wildcards"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "tags"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -848,92 +966,8 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::ImportGooGetArtifactsRequest = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_repositories_goo_get_artifacts_import(request, opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_repositories_googet_artifacts_upload(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::UploadGooGetArtifactRequest = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_repositories_googet_artifacts_upload(request, opt.value_of("parent").unwrap_or("")); + let mut request: api::UploadKfpArtifactRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_repositories_kfp_artifacts_upload(request, opt.value_of("parent").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -996,7 +1030,229 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_repositories_maven_artifacts_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_repositories_maven_artifacts_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_repositories_maven_artifacts_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_repositories_maven_artifacts_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_repositories_npm_packages_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_repositories_npm_packages_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_repositories_npm_packages_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_repositories_npm_packages_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1159,7 +1415,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1412,7 +1668,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1505,7 +1761,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1561,7 +1817,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1679,7 +1935,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1762,9 +2018,11 @@ where "maven-config.allow-snapshot-overwrites" => Some(("mavenConfig.allowSnapshotOverwrites", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "maven-config.version-policy" => Some(("mavenConfig.versionPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "size-bytes" => Some(("sizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-snapshot-overwrites", "create-time", "description", "format", "kms-key-name", "labels", "maven-config", "name", "update-time", "version-policy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-snapshot-overwrites", "create-time", "description", "format", "kms-key-name", "labels", "maven-config", "name", "satisfies-pzs", "size-bytes", "update-time", "version-policy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1779,7 +2037,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1828,6 +2086,117 @@ where } } + async fn _projects_locations_repositories_python_packages_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_repositories_python_packages_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_repositories_python_packages_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_repositories_python_packages_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_repositories_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2213,7 +2582,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2272,6 +2641,12 @@ where ("get-project-settings", Some(opt)) => { call_result = self._projects_get_project_settings(opt, dry_run, &mut err).await; }, + ("locations-get", Some(opt)) => { + call_result = self._projects_locations_get(opt, dry_run, &mut err).await; + }, + ("locations-list", Some(opt)) => { + call_result = self._projects_locations_list(opt, dry_run, &mut err).await; + }, ("locations-operations-get", Some(opt)) => { call_result = self._projects_locations_operations_get(opt, dry_run, &mut err).await; }, @@ -2305,15 +2680,24 @@ where ("locations-repositories-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_repositories_get_iam_policy(opt, dry_run, &mut err).await; }, - ("locations-repositories-goo-get-artifacts-import", Some(opt)) => { - call_result = self._projects_locations_repositories_goo_get_artifacts_import(opt, dry_run, &mut err).await; - }, - ("locations-repositories-googet-artifacts-upload", Some(opt)) => { - call_result = self._projects_locations_repositories_googet_artifacts_upload(opt, dry_run, &mut err).await; + ("locations-repositories-kfp-artifacts-upload", Some(opt)) => { + call_result = self._projects_locations_repositories_kfp_artifacts_upload(opt, dry_run, &mut err).await; }, ("locations-repositories-list", Some(opt)) => { call_result = self._projects_locations_repositories_list(opt, dry_run, &mut err).await; }, + ("locations-repositories-maven-artifacts-get", Some(opt)) => { + call_result = self._projects_locations_repositories_maven_artifacts_get(opt, dry_run, &mut err).await; + }, + ("locations-repositories-maven-artifacts-list", Some(opt)) => { + call_result = self._projects_locations_repositories_maven_artifacts_list(opt, dry_run, &mut err).await; + }, + ("locations-repositories-npm-packages-get", Some(opt)) => { + call_result = self._projects_locations_repositories_npm_packages_get(opt, dry_run, &mut err).await; + }, + ("locations-repositories-npm-packages-list", Some(opt)) => { + call_result = self._projects_locations_repositories_npm_packages_list(opt, dry_run, &mut err).await; + }, ("locations-repositories-packages-delete", Some(opt)) => { call_result = self._projects_locations_repositories_packages_delete(opt, dry_run, &mut err).await; }, @@ -2350,6 +2734,12 @@ where ("locations-repositories-patch", Some(opt)) => { call_result = self._projects_locations_repositories_patch(opt, dry_run, &mut err).await; }, + ("locations-repositories-python-packages-get", Some(opt)) => { + call_result = self._projects_locations_repositories_python_packages_get(opt, dry_run, &mut err).await; + }, + ("locations-repositories-python-packages-list", Some(opt)) => { + call_result = self._projects_locations_repositories_python_packages_list(opt, dry_run, &mut err).await; + }, ("locations-repositories-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_repositories_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -2445,7 +2835,7 @@ async fn main() { let mut exit_status = 0i32; let upload_value_names = ["mode", "file"]; let arg_data = [ - ("projects", "methods: 'get-project-settings', 'locations-operations-get', 'locations-repositories-apt-artifacts-import', 'locations-repositories-apt-artifacts-upload', 'locations-repositories-create', 'locations-repositories-delete', 'locations-repositories-docker-images-get', 'locations-repositories-docker-images-list', 'locations-repositories-files-get', 'locations-repositories-files-list', 'locations-repositories-get', 'locations-repositories-get-iam-policy', 'locations-repositories-goo-get-artifacts-import', 'locations-repositories-googet-artifacts-upload', 'locations-repositories-list', 'locations-repositories-packages-delete', 'locations-repositories-packages-get', 'locations-repositories-packages-list', 'locations-repositories-packages-tags-create', 'locations-repositories-packages-tags-delete', 'locations-repositories-packages-tags-get', 'locations-repositories-packages-tags-list', 'locations-repositories-packages-tags-patch', 'locations-repositories-packages-versions-delete', 'locations-repositories-packages-versions-get', 'locations-repositories-packages-versions-list', 'locations-repositories-patch', 'locations-repositories-set-iam-policy', 'locations-repositories-test-iam-permissions', 'locations-repositories-yum-artifacts-import', 'locations-repositories-yum-artifacts-upload' and 'update-project-settings'", vec![ + ("projects", "methods: 'get-project-settings', 'locations-get', 'locations-list', 'locations-operations-get', 'locations-repositories-apt-artifacts-import', 'locations-repositories-apt-artifacts-upload', 'locations-repositories-create', 'locations-repositories-delete', 'locations-repositories-docker-images-get', 'locations-repositories-docker-images-list', 'locations-repositories-files-get', 'locations-repositories-files-list', 'locations-repositories-get', 'locations-repositories-get-iam-policy', 'locations-repositories-kfp-artifacts-upload', 'locations-repositories-list', 'locations-repositories-maven-artifacts-get', 'locations-repositories-maven-artifacts-list', 'locations-repositories-npm-packages-get', 'locations-repositories-npm-packages-list', 'locations-repositories-packages-delete', 'locations-repositories-packages-get', 'locations-repositories-packages-list', 'locations-repositories-packages-tags-create', 'locations-repositories-packages-tags-delete', 'locations-repositories-packages-tags-get', 'locations-repositories-packages-tags-list', 'locations-repositories-packages-tags-patch', 'locations-repositories-packages-versions-delete', 'locations-repositories-packages-versions-get', 'locations-repositories-packages-versions-list', 'locations-repositories-patch', 'locations-repositories-python-packages-get', 'locations-repositories-python-packages-list', 'locations-repositories-set-iam-policy', 'locations-repositories-test-iam-permissions', 'locations-repositories-yum-artifacts-import', 'locations-repositories-yum-artifacts-upload' and 'update-project-settings'", vec![ ("get-project-settings", Some(r##"Retrieves the Settings for the Project."##), "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_get-project-settings", @@ -2462,6 +2852,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-get", + Some(r##"Gets information about a location."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Resource name for the location."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-list", + Some(r##"Lists information about the supported locations for this service."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-list", + vec![ + (Some(r##"name"##), + None, + Some(r##"The resource that owns the locations collection, if applicable."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2674,7 +3108,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The name of the parent resource whose files will be listed."##), + Some(r##"The name of the repository whose files will be listed. For example: "projects/p1/locations/us-central1/repositories/repo1"##), Some(true), Some(false)), @@ -2718,7 +3152,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2734,41 +3168,13 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-repositories-goo-get-artifacts-import", - Some(r##"Imports GooGet artifacts. The returned Operation will complete once the resources are imported. Package, Version, and File resources are created based on the imported artifacts. Imported artifacts that conflict with existing resources are ignored."##), - "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-goo-get-artifacts-import", + ("locations-repositories-kfp-artifacts-upload", + Some(r##"Directly uploads a KFP artifact. The returned Operation will complete once the resource is uploaded. Package, Version, and File resources will be created based on the uploaded artifact. Uploaded artifacts that conflict with existing resources will be overwritten."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-kfp-artifacts-upload", vec![ (Some(r##"parent"##), None, - Some(r##"The name of the parent resource where the artifacts will be imported."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-repositories-googet-artifacts-upload", - Some(r##"Directly uploads a GooGet artifact. The returned Operation will complete once the resources are uploaded. Package, Version, and File resources are created based on the imported artifact. Imported artifacts that conflict with existing resources are ignored."##), - "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-googet-artifacts-upload", - vec![ - (Some(r##"parent"##), - None, - Some(r##"The name of the parent resource where the artifacts will be uploaded."##), + Some(r##"The resource name of the repository where the KFP artifact will be uploaded."##), Some(true), Some(false)), @@ -2812,6 +3218,94 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-repositories-maven-artifacts-get", + Some(r##"Gets a maven artifact."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-maven-artifacts-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the maven artifact."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-repositories-maven-artifacts-list", + Some(r##"Lists maven artifacts."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-maven-artifacts-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The name of the parent resource whose maven artifacts will be listed."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-repositories-npm-packages-get", + Some(r##"Gets a npm package."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-npm-packages-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the npm package."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-repositories-npm-packages-list", + Some(r##"Lists npm packages."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-npm-packages-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The name of the parent resource whose npm packages will be listed."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3094,6 +3588,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-repositories-python-packages-get", + Some(r##"Gets a python package."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-python-packages-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the python package."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-repositories-python-packages-list", + Some(r##"Lists python packages."##), + "Details at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli/projects_locations-repositories-python-packages-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The name of the parent resource whose python packages will be listed."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3106,7 +3644,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3134,7 +3672,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3252,7 +3790,7 @@ async fn main() { let mut app = App::new("artifactregistry1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("Store and manage build artifacts in a scalable and integrated service built on Google infrastructure.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_artifactregistry1_cli") .arg(Arg::with_name("url") diff --git a/gen/artifactregistry1/Cargo.toml b/gen/artifactregistry1/Cargo.toml index 332c0dcd86..3682b0a4fb 100644 --- a/gen/artifactregistry1/Cargo.toml +++ b/gen/artifactregistry1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-artifactregistry1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Artifact Registry (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/artifactregistry1" homepage = "https://cloud.google.com/artifacts/docs/" -documentation = "https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-artifactregistry1/5.0.2+20230113" license = "MIT" keywords = ["artifactregistry", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/artifactregistry1/README.md b/gen/artifactregistry1/README.md index 5179675592..904f0a096c 100644 --- a/gen/artifactregistry1/README.md +++ b/gen/artifactregistry1/README.md @@ -5,23 +5,23 @@ DO NOT EDIT ! --> The `google-artifactregistry1` library allows access to all features of the *Google Artifact Registry* service. -This documentation was generated from *Artifact Registry* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Artifact Registry* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Artifact Registry* *v1* API can be found at the [official documentation site](https://cloud.google.com/artifacts/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/ArtifactRegistry) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/ArtifactRegistry) ... * projects - * [*get project settings*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectGetProjectSettingCall), [*locations get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationOperationGetCall), [*locations repositories apt artifacts import*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryAptArtifactImportCall), [*locations repositories apt artifacts upload*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryAptArtifactUploadCall), [*locations repositories create*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryCreateCall), [*locations repositories delete*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryDeleteCall), [*locations repositories docker images get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryDockerImageGetCall), [*locations repositories docker images list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryDockerImageListCall), [*locations repositories files get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryFileGetCall), [*locations repositories files list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryFileListCall), [*locations repositories get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryGetCall), [*locations repositories get iam policy*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryGetIamPolicyCall), [*locations repositories kfp artifacts upload*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryKfpArtifactUploadCall), [*locations repositories list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryListCall), [*locations repositories maven artifacts get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryMavenArtifactGetCall), [*locations repositories maven artifacts list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryMavenArtifactListCall), [*locations repositories npm packages get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryNpmPackageGetCall), [*locations repositories npm packages list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryNpmPackageListCall), [*locations repositories packages delete*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageDeleteCall), [*locations repositories packages get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageGetCall), [*locations repositories packages list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageListCall), [*locations repositories packages tags create*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagCreateCall), [*locations repositories packages tags delete*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagDeleteCall), [*locations repositories packages tags get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagGetCall), [*locations repositories packages tags list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagListCall), [*locations repositories packages tags patch*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagPatchCall), [*locations repositories packages versions delete*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageVersionDeleteCall), [*locations repositories packages versions get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageVersionGetCall), [*locations repositories packages versions list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageVersionListCall), [*locations repositories patch*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPatchCall), [*locations repositories python packages get*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPythonPackageGetCall), [*locations repositories python packages list*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPythonPackageListCall), [*locations repositories set iam policy*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositorySetIamPolicyCall), [*locations repositories test iam permissions*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryTestIamPermissionCall), [*locations repositories yum artifacts import*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryYumArtifactImportCall), [*locations repositories yum artifacts upload*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryYumArtifactUploadCall) and [*update project settings*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectUpdateProjectSettingCall) + * [*get project settings*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectGetProjectSettingCall), [*locations get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationOperationGetCall), [*locations repositories apt artifacts import*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryAptArtifactImportCall), [*locations repositories apt artifacts upload*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryAptArtifactUploadCall), [*locations repositories create*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryCreateCall), [*locations repositories delete*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryDeleteCall), [*locations repositories docker images get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryDockerImageGetCall), [*locations repositories docker images list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryDockerImageListCall), [*locations repositories files get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryFileGetCall), [*locations repositories files list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryFileListCall), [*locations repositories get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryGetCall), [*locations repositories get iam policy*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryGetIamPolicyCall), [*locations repositories kfp artifacts upload*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryKfpArtifactUploadCall), [*locations repositories list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryListCall), [*locations repositories maven artifacts get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryMavenArtifactGetCall), [*locations repositories maven artifacts list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryMavenArtifactListCall), [*locations repositories npm packages get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryNpmPackageGetCall), [*locations repositories npm packages list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryNpmPackageListCall), [*locations repositories packages delete*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageDeleteCall), [*locations repositories packages get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageGetCall), [*locations repositories packages list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageListCall), [*locations repositories packages tags create*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagCreateCall), [*locations repositories packages tags delete*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagDeleteCall), [*locations repositories packages tags get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagGetCall), [*locations repositories packages tags list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagListCall), [*locations repositories packages tags patch*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageTagPatchCall), [*locations repositories packages versions delete*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageVersionDeleteCall), [*locations repositories packages versions get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageVersionGetCall), [*locations repositories packages versions list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPackageVersionListCall), [*locations repositories patch*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPatchCall), [*locations repositories python packages get*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPythonPackageGetCall), [*locations repositories python packages list*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryPythonPackageListCall), [*locations repositories set iam policy*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositorySetIamPolicyCall), [*locations repositories test iam permissions*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryTestIamPermissionCall), [*locations repositories yum artifacts import*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryYumArtifactImportCall), [*locations repositories yum artifacts upload*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryYumArtifactUploadCall) and [*update project settings*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectUpdateProjectSettingCall) Upload supported by ... -* [*locations repositories apt artifacts upload projects*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryAptArtifactUploadCall) -* [*locations repositories kfp artifacts upload projects*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryKfpArtifactUploadCall) -* [*locations repositories yum artifacts upload projects*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/api::ProjectLocationRepositoryYumArtifactUploadCall) +* [*locations repositories apt artifacts upload projects*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryAptArtifactUploadCall) +* [*locations repositories kfp artifacts upload projects*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryKfpArtifactUploadCall) +* [*locations repositories yum artifacts upload projects*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/api::ProjectLocationRepositoryYumArtifactUploadCall) @@ -29,17 +29,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/ArtifactRegistry)** +* **[Hub](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/ArtifactRegistry)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::CallBuilder) -* **[Resources](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::CallBuilder) +* **[Resources](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Part)** + * **[Parts](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Delegate) to the -[Method Builder](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Delegate) to the +[Method Builder](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::RequestValue) and -[decodable](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::RequestValue) and +[decodable](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-artifactregistry1/5.0.2-beta-1+20230113/google_artifactregistry1/client::RequestValue) are moved +* [request values](https://docs.rs/google-artifactregistry1/5.0.2+20230113/google_artifactregistry1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/artifactregistry1/src/api.rs b/gen/artifactregistry1/src/api.rs index 441d8ec412..0d3627e026 100644 --- a/gen/artifactregistry1/src/api.rs +++ b/gen/artifactregistry1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> ArtifactRegistry { ArtifactRegistry { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://artifactregistry.googleapis.com/".to_string(), _root_url: "https://artifactregistry.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> ArtifactRegistry { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/artifactregistry1/src/client.rs b/gen/artifactregistry1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/artifactregistry1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/artifactregistry1/src/lib.rs b/gen/artifactregistry1/src/lib.rs index fc93c08ec5..3fc22ca7ca 100644 --- a/gen/artifactregistry1/src/lib.rs +++ b/gen/artifactregistry1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Artifact Registry* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Artifact Registry* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Artifact Registry* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/artifacts/docs/). diff --git a/gen/artifactregistry1_beta1-cli/Cargo.toml b/gen/artifactregistry1_beta1-cli/Cargo.toml index 9af7d13c47..c69df680ce 100644 --- a/gen/artifactregistry1_beta1-cli/Cargo.toml +++ b/gen/artifactregistry1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-artifactregistry1_beta1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Artifact Registry (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/artifactregistry1_beta1-cli" @@ -20,13 +20,13 @@ name = "artifactregistry1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-artifactregistry1_beta1] path = "../artifactregistry1_beta1" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/artifactregistry1_beta1-cli/README.md b/gen/artifactregistry1_beta1-cli/README.md index dab3049490..a4a24e4da5 100644 --- a/gen/artifactregistry1_beta1-cli/README.md +++ b/gen/artifactregistry1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Artifact Registry* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Artifact Registry* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash artifactregistry1-beta1 [options] diff --git a/gen/artifactregistry1_beta1-cli/mkdocs.yml b/gen/artifactregistry1_beta1-cli/mkdocs.yml index 66c5f05af8..ce8ed69341 100644 --- a/gen/artifactregistry1_beta1-cli/mkdocs.yml +++ b/gen/artifactregistry1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Artifact Registry v4.0.1+20220225 +site_name: Artifact Registry v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-artifactregistry1_beta1-cli site_description: A complete library to interact with Artifact Registry (protocol v1beta1) @@ -7,32 +7,33 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/artifactregistry docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-repositories-create.md', 'Projects', 'Locations Repositories Create'] -- ['projects_locations-repositories-delete.md', 'Projects', 'Locations Repositories Delete'] -- ['projects_locations-repositories-files-get.md', 'Projects', 'Locations Repositories Files Get'] -- ['projects_locations-repositories-files-list.md', 'Projects', 'Locations Repositories Files List'] -- ['projects_locations-repositories-get.md', 'Projects', 'Locations Repositories Get'] -- ['projects_locations-repositories-get-iam-policy.md', 'Projects', 'Locations Repositories Get Iam Policy'] -- ['projects_locations-repositories-list.md', 'Projects', 'Locations Repositories List'] -- ['projects_locations-repositories-packages-delete.md', 'Projects', 'Locations Repositories Packages Delete'] -- ['projects_locations-repositories-packages-get.md', 'Projects', 'Locations Repositories Packages Get'] -- ['projects_locations-repositories-packages-list.md', 'Projects', 'Locations Repositories Packages List'] -- ['projects_locations-repositories-packages-tags-create.md', 'Projects', 'Locations Repositories Packages Tags Create'] -- ['projects_locations-repositories-packages-tags-delete.md', 'Projects', 'Locations Repositories Packages Tags Delete'] -- ['projects_locations-repositories-packages-tags-get.md', 'Projects', 'Locations Repositories Packages Tags Get'] -- ['projects_locations-repositories-packages-tags-list.md', 'Projects', 'Locations Repositories Packages Tags List'] -- ['projects_locations-repositories-packages-tags-patch.md', 'Projects', 'Locations Repositories Packages Tags Patch'] -- ['projects_locations-repositories-packages-versions-delete.md', 'Projects', 'Locations Repositories Packages Versions Delete'] -- ['projects_locations-repositories-packages-versions-get.md', 'Projects', 'Locations Repositories Packages Versions Get'] -- ['projects_locations-repositories-packages-versions-list.md', 'Projects', 'Locations Repositories Packages Versions List'] -- ['projects_locations-repositories-patch.md', 'Projects', 'Locations Repositories Patch'] -- ['projects_locations-repositories-set-iam-policy.md', 'Projects', 'Locations Repositories Set Iam Policy'] -- ['projects_locations-repositories-test-iam-permissions.md', 'Projects', 'Locations Repositories Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Repositories Create': 'projects_locations-repositories-create.md' + - 'Locations Repositories Delete': 'projects_locations-repositories-delete.md' + - 'Locations Repositories Files Get': 'projects_locations-repositories-files-get.md' + - 'Locations Repositories Files List': 'projects_locations-repositories-files-list.md' + - 'Locations Repositories Get': 'projects_locations-repositories-get.md' + - 'Locations Repositories Get Iam Policy': 'projects_locations-repositories-get-iam-policy.md' + - 'Locations Repositories List': 'projects_locations-repositories-list.md' + - 'Locations Repositories Packages Delete': 'projects_locations-repositories-packages-delete.md' + - 'Locations Repositories Packages Get': 'projects_locations-repositories-packages-get.md' + - 'Locations Repositories Packages List': 'projects_locations-repositories-packages-list.md' + - 'Locations Repositories Packages Tags Create': 'projects_locations-repositories-packages-tags-create.md' + - 'Locations Repositories Packages Tags Delete': 'projects_locations-repositories-packages-tags-delete.md' + - 'Locations Repositories Packages Tags Get': 'projects_locations-repositories-packages-tags-get.md' + - 'Locations Repositories Packages Tags List': 'projects_locations-repositories-packages-tags-list.md' + - 'Locations Repositories Packages Tags Patch': 'projects_locations-repositories-packages-tags-patch.md' + - 'Locations Repositories Packages Versions Delete': 'projects_locations-repositories-packages-versions-delete.md' + - 'Locations Repositories Packages Versions Get': 'projects_locations-repositories-packages-versions-get.md' + - 'Locations Repositories Packages Versions List': 'projects_locations-repositories-packages-versions-list.md' + - 'Locations Repositories Patch': 'projects_locations-repositories-patch.md' + - 'Locations Repositories Set Iam Policy': 'projects_locations-repositories-set-iam-policy.md' + - 'Locations Repositories Test Iam Permissions': 'projects_locations-repositories-test-iam-permissions.md' theme: readthedocs diff --git a/gen/artifactregistry1_beta1-cli/src/client.rs b/gen/artifactregistry1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/artifactregistry1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/artifactregistry1_beta1-cli/src/main.rs b/gen/artifactregistry1_beta1-cli/src/main.rs index fb02fd5e8b..f8ec96f77e 100644 --- a/gen/artifactregistry1_beta1-cli/src/main.rs +++ b/gen/artifactregistry1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_artifactregistry1_beta1::{api, Error, oauth2}; +use google_artifactregistry1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -246,9 +245,11 @@ where "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "size-bytes" => Some(("sizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "format", "kms-key-name", "labels", "name", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "format", "kms-key-name", "labels", "name", "satisfies-pzs", "size-bytes", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -426,7 +427,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -537,7 +538,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -596,7 +597,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -759,7 +760,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1012,7 +1013,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1105,7 +1106,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1161,7 +1162,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1279,7 +1280,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1360,9 +1361,11 @@ where "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "size-bytes" => Some(("sizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "format", "kms-key-name", "labels", "name", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "format", "kms-key-name", "labels", "name", "satisfies-pzs", "size-bytes", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1377,7 +1380,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1900,7 +1903,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The name of the parent resource whose files will be listed."##), + Some(r##"The name of the repository whose files will be listed. For example: "projects/p1/locations/us-central1/repositories/repo1"##), Some(true), Some(false)), @@ -1944,7 +1947,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2270,7 +2273,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2298,7 +2301,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2326,7 +2329,7 @@ async fn main() { let mut app = App::new("artifactregistry1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("Store and manage build artifacts in a scalable and integrated service built on Google infrastructure.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_artifactregistry1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/artifactregistry1_beta1/Cargo.toml b/gen/artifactregistry1_beta1/Cargo.toml index 3964360883..247f10b3b0 100644 --- a/gen/artifactregistry1_beta1/Cargo.toml +++ b/gen/artifactregistry1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-artifactregistry1_beta1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Artifact Registry (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/artifactregistry1_beta1" homepage = "https://cloud.google.com/artifacts/docs/" -documentation = "https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113" license = "MIT" keywords = ["artifactregistry", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/artifactregistry1_beta1/README.md b/gen/artifactregistry1_beta1/README.md index ba92622a21..d446524783 100644 --- a/gen/artifactregistry1_beta1/README.md +++ b/gen/artifactregistry1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-artifactregistry1_beta1` library allows access to all features of the *Google Artifact Registry* service. -This documentation was generated from *Artifact Registry* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Artifact Registry* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Artifact Registry* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/artifacts/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/ArtifactRegistry) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/ArtifactRegistry) ... * projects - * [*locations get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationOperationGetCall), [*locations repositories create*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryCreateCall), [*locations repositories delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryDeleteCall), [*locations repositories files get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryFileGetCall), [*locations repositories files list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryFileListCall), [*locations repositories get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryGetCall), [*locations repositories get iam policy*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryGetIamPolicyCall), [*locations repositories list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryListCall), [*locations repositories packages delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageDeleteCall), [*locations repositories packages get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageGetCall), [*locations repositories packages list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageListCall), [*locations repositories packages tags create*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagCreateCall), [*locations repositories packages tags delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagDeleteCall), [*locations repositories packages tags get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagGetCall), [*locations repositories packages tags list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagListCall), [*locations repositories packages tags patch*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagPatchCall), [*locations repositories packages versions delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageVersionDeleteCall), [*locations repositories packages versions get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageVersionGetCall), [*locations repositories packages versions list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageVersionListCall), [*locations repositories patch*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPatchCall), [*locations repositories set iam policy*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositorySetIamPolicyCall) and [*locations repositories test iam permissions*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationOperationGetCall), [*locations repositories create*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryCreateCall), [*locations repositories delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryDeleteCall), [*locations repositories files get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryFileGetCall), [*locations repositories files list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryFileListCall), [*locations repositories get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryGetCall), [*locations repositories get iam policy*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryGetIamPolicyCall), [*locations repositories list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryListCall), [*locations repositories packages delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageDeleteCall), [*locations repositories packages get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageGetCall), [*locations repositories packages list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageListCall), [*locations repositories packages tags create*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagCreateCall), [*locations repositories packages tags delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagDeleteCall), [*locations repositories packages tags get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagGetCall), [*locations repositories packages tags list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagListCall), [*locations repositories packages tags patch*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageTagPatchCall), [*locations repositories packages versions delete*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageVersionDeleteCall), [*locations repositories packages versions get*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageVersionGetCall), [*locations repositories packages versions list*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPackageVersionListCall), [*locations repositories patch*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryPatchCall), [*locations repositories set iam policy*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositorySetIamPolicyCall) and [*locations repositories test iam permissions*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/api::ProjectLocationRepositoryTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/ArtifactRegistry)** +* **[Hub](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/ArtifactRegistry)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-artifactregistry1_beta1/5.0.2-beta-1+20230113/google_artifactregistry1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-artifactregistry1_beta1/5.0.2+20230113/google_artifactregistry1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/artifactregistry1_beta1/src/api.rs b/gen/artifactregistry1_beta1/src/api.rs index b0db9dbd07..dc17bfdfad 100644 --- a/gen/artifactregistry1_beta1/src/api.rs +++ b/gen/artifactregistry1_beta1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> ArtifactRegistry { ArtifactRegistry { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://artifactregistry.googleapis.com/".to_string(), _root_url: "https://artifactregistry.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> ArtifactRegistry { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/artifactregistry1_beta1/src/client.rs b/gen/artifactregistry1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/artifactregistry1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/artifactregistry1_beta1/src/lib.rs b/gen/artifactregistry1_beta1/src/lib.rs index 1710707ae2..26298810a5 100644 --- a/gen/artifactregistry1_beta1/src/lib.rs +++ b/gen/artifactregistry1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Artifact Registry* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Artifact Registry* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *artifactregistry:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Artifact Registry* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/artifacts/docs/). diff --git a/gen/assuredworkloads1-cli/Cargo.toml b/gen/assuredworkloads1-cli/Cargo.toml index 89502999e5..6da5453625 100644 --- a/gen/assuredworkloads1-cli/Cargo.toml +++ b/gen/assuredworkloads1-cli/Cargo.toml @@ -4,11 +4,11 @@ [package] name = "google-assuredworkloads1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230116" authors = ["Sebastian Thiel "] description = "A complete library to interact with Assuredworkloads (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/assuredworkloads1-cli" -homepage = "https://cloud.google.com" +homepage = "https://cloud.google.com/learnmoreurl" documentation = "http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli" license = "MIT" keywords = ["assuredworkloads", "google", "cli"] @@ -20,13 +20,13 @@ name = "assuredworkloads1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-assuredworkloads1] path = "../assuredworkloads1" -version = "4.0.1+20220224" +version = "5.0.2+20230116" + diff --git a/gen/assuredworkloads1-cli/README.md b/gen/assuredworkloads1-cli/README.md index 4b2d1e588b..e4a6f8181b 100644 --- a/gen/assuredworkloads1-cli/README.md +++ b/gen/assuredworkloads1-cli/README.md @@ -11,7 +11,7 @@ capabilities. Errors will be printed to standard error, and cause the program's If data-structures are requested, these will be returned as pretty-printed JSON, to be useful as input to other tools. Everything else about the *Assuredworkloads* API can be found at the -[official documentation site](https://cloud.google.com). +[official documentation site](https://cloud.google.com/learnmoreurl). # Installation and Source Code @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Assuredworkloads* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Assuredworkloads* API at revision *20230116*. The CLI is at version *5.0.2*. ```bash assuredworkloads1 [options] @@ -36,7 +36,12 @@ assuredworkloads1 [options] locations-workloads-delete [-p ]... [-o ] locations-workloads-get [-p ]... [-o ] locations-workloads-list [-p ]... [-o ] + locations-workloads-mutate-partner-permissions (-r )... [-p ]... [-o ] locations-workloads-patch (-r )... [-p ]... [-o ] + locations-workloads-restrict-allowed-resources (-r )... [-p ]... [-o ] + locations-workloads-violations-acknowledge (-r )... [-p ]... [-o ] + locations-workloads-violations-get [-p ]... [-o ] + locations-workloads-violations-list [-p ]... [-o ] assuredworkloads1 --help Configuration: diff --git a/gen/assuredworkloads1-cli/mkdocs.yml b/gen/assuredworkloads1-cli/mkdocs.yml index 57f525b68c..df180cea23 100644 --- a/gen/assuredworkloads1-cli/mkdocs.yml +++ b/gen/assuredworkloads1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Assuredworkloads v4.0.1+20220224 +site_name: Assuredworkloads v5.0.2+20230116 site_url: http://byron.github.io/google-apis-rs/google-assuredworkloads1-cli site_description: A complete library to interact with Assuredworkloads (protocol v1) @@ -7,15 +7,21 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/assuredworkloads docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['organizations_locations-operations-get.md', 'Organizations', 'Locations Operations Get'] -- ['organizations_locations-operations-list.md', 'Organizations', 'Locations Operations List'] -- ['organizations_locations-workloads-create.md', 'Organizations', 'Locations Workloads Create'] -- ['organizations_locations-workloads-delete.md', 'Organizations', 'Locations Workloads Delete'] -- ['organizations_locations-workloads-get.md', 'Organizations', 'Locations Workloads Get'] -- ['organizations_locations-workloads-list.md', 'Organizations', 'Locations Workloads List'] -- ['organizations_locations-workloads-patch.md', 'Organizations', 'Locations Workloads Patch'] +nav: +- Home: 'index.md' +- 'Organizations': + - 'Locations Operations Get': 'organizations_locations-operations-get.md' + - 'Locations Operations List': 'organizations_locations-operations-list.md' + - 'Locations Workloads Create': 'organizations_locations-workloads-create.md' + - 'Locations Workloads Delete': 'organizations_locations-workloads-delete.md' + - 'Locations Workloads Get': 'organizations_locations-workloads-get.md' + - 'Locations Workloads List': 'organizations_locations-workloads-list.md' + - 'Locations Workloads Mutate Partner Permissions': 'organizations_locations-workloads-mutate-partner-permissions.md' + - 'Locations Workloads Patch': 'organizations_locations-workloads-patch.md' + - 'Locations Workloads Restrict Allowed Resources': 'organizations_locations-workloads-restrict-allowed-resources.md' + - 'Locations Workloads Violations Acknowledge': 'organizations_locations-workloads-violations-acknowledge.md' + - 'Locations Workloads Violations Get': 'organizations_locations-workloads-violations-get.md' + - 'Locations Workloads Violations List': 'organizations_locations-workloads-violations-list.md' theme: readthedocs diff --git a/gen/assuredworkloads1-cli/src/client.rs b/gen/assuredworkloads1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/assuredworkloads1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/assuredworkloads1-cli/src/main.rs b/gen/assuredworkloads1-cli/src/main.rs index c59ae24d86..4d16710b9f 100644 --- a/gen/assuredworkloads1-cli/src/main.rs +++ b/gen/assuredworkloads1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_assuredworkloads1::{api, Error, oauth2}; +use google_assuredworkloads1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -190,6 +189,9 @@ where match &temp_cursor.to_string()[..] { "billing-account" => Some(("billingAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compliance-regime" => Some(("complianceRegime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "compliance-status.acknowledged-violation-count" => Some(("complianceStatus.acknowledgedViolationCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compliance-status.active-violation-count" => Some(("complianceStatus.activeViolationCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compliant-but-disallowed-services" => Some(("compliantButDisallowedServices", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "enable-sovereign-controls" => Some(("enableSovereignControls", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -199,11 +201,12 @@ where "kms-settings.rotation-period" => Some(("kmsSettings.rotationPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partner" => Some(("partner", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "provisioned-resources-parent" => Some(("provisionedResourcesParent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "saa-enrollment-response.setup-errors" => Some(("saaEnrollmentResponse.setupErrors", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "saa-enrollment-response.setup-status" => Some(("saaEnrollmentResponse.setupStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["billing-account", "compliance-regime", "create-time", "display-name", "enable-sovereign-controls", "etag", "kaj-enrollment-state", "kms-settings", "labels", "name", "next-rotation-time", "provisioned-resources-parent", "rotation-period", "saa-enrollment-response", "setup-errors", "setup-status"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["acknowledged-violation-count", "active-violation-count", "billing-account", "compliance-regime", "compliance-status", "compliant-but-disallowed-services", "create-time", "display-name", "enable-sovereign-controls", "etag", "kaj-enrollment-state", "kms-settings", "labels", "name", "next-rotation-time", "partner", "provisioned-resources-parent", "rotation-period", "saa-enrollment-response", "setup-errors", "setup-status"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -385,7 +388,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -437,6 +440,95 @@ where } } + async fn _organizations_locations_workloads_mutate_partner_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partner-permissions.data-logs-viewer" => Some(("partnerPermissions.dataLogsViewer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "partner-permissions.remediate-folder-violations" => Some(("partnerPermissions.remediateFolderViolations", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "partner-permissions.service-access-approver" => Some(("partnerPermissions.serviceAccessApprover", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["data-logs-viewer", "etag", "partner-permissions", "remediate-folder-violations", "service-access-approver", "update-mask"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudAssuredworkloadsV1MutatePartnerPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_workloads_mutate_partner_permissions(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_locations_workloads_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -462,6 +554,9 @@ where match &temp_cursor.to_string()[..] { "billing-account" => Some(("billingAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compliance-regime" => Some(("complianceRegime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "compliance-status.acknowledged-violation-count" => Some(("complianceStatus.acknowledgedViolationCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compliance-status.active-violation-count" => Some(("complianceStatus.activeViolationCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compliant-but-disallowed-services" => Some(("compliantButDisallowedServices", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "enable-sovereign-controls" => Some(("enableSovereignControls", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -471,11 +566,12 @@ where "kms-settings.rotation-period" => Some(("kmsSettings.rotationPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partner" => Some(("partner", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "provisioned-resources-parent" => Some(("provisionedResourcesParent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "saa-enrollment-response.setup-errors" => Some(("saaEnrollmentResponse.setupErrors", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "saa-enrollment-response.setup-status" => Some(("saaEnrollmentResponse.setupStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["billing-account", "compliance-regime", "create-time", "display-name", "enable-sovereign-controls", "etag", "kaj-enrollment-state", "kms-settings", "labels", "name", "next-rotation-time", "provisioned-resources-parent", "rotation-period", "saa-enrollment-response", "setup-errors", "setup-status"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["acknowledged-violation-count", "active-violation-count", "billing-account", "compliance-regime", "compliance-status", "compliant-but-disallowed-services", "create-time", "display-name", "enable-sovereign-controls", "etag", "kaj-enrollment-state", "kms-settings", "labels", "name", "next-rotation-time", "partner", "provisioned-resources-parent", "rotation-period", "saa-enrollment-response", "setup-errors", "setup-status"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -490,7 +586,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -539,6 +635,297 @@ where } } + async fn _organizations_locations_workloads_restrict_allowed_resources(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "restriction-type" => Some(("restrictionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["restriction-type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudAssuredworkloadsV1RestrictAllowedResourcesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_workloads_restrict_allowed_resources(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_locations_workloads_violations_acknowledge(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "comment" => Some(("comment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "non-compliant-org-policy" => Some(("nonCompliantOrgPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["comment", "non-compliant-org-policy"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudAssuredworkloadsV1AcknowledgeViolationRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_workloads_violations_acknowledge(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_locations_workloads_violations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().locations_workloads_violations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_locations_workloads_violations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().locations_workloads_violations_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "interval-start-time" => { + call = call.interval_start_time( value.map(|v| arg_from_str(v, err, "interval-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); + }, + "interval-end-time" => { + call = call.interval_end_time( value.map(|v| arg_from_str(v, err, "interval-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "interval-end-time", "interval-start-time", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -564,9 +951,24 @@ where ("locations-workloads-list", Some(opt)) => { call_result = self._organizations_locations_workloads_list(opt, dry_run, &mut err).await; }, + ("locations-workloads-mutate-partner-permissions", Some(opt)) => { + call_result = self._organizations_locations_workloads_mutate_partner_permissions(opt, dry_run, &mut err).await; + }, ("locations-workloads-patch", Some(opt)) => { call_result = self._organizations_locations_workloads_patch(opt, dry_run, &mut err).await; }, + ("locations-workloads-restrict-allowed-resources", Some(opt)) => { + call_result = self._organizations_locations_workloads_restrict_allowed_resources(opt, dry_run, &mut err).await; + }, + ("locations-workloads-violations-acknowledge", Some(opt)) => { + call_result = self._organizations_locations_workloads_violations_acknowledge(opt, dry_run, &mut err).await; + }, + ("locations-workloads-violations-get", Some(opt)) => { + call_result = self._organizations_locations_workloads_violations_get(opt, dry_run, &mut err).await; + }, + ("locations-workloads-violations-list", Some(opt)) => { + call_result = self._organizations_locations_workloads_violations_list(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("organizations".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -646,7 +1048,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("organizations", "methods: 'locations-operations-get', 'locations-operations-list', 'locations-workloads-create', 'locations-workloads-delete', 'locations-workloads-get', 'locations-workloads-list' and 'locations-workloads-patch'", vec![ + ("organizations", "methods: 'locations-operations-get', 'locations-operations-list', 'locations-workloads-create', 'locations-workloads-delete', 'locations-workloads-get', 'locations-workloads-list', 'locations-workloads-mutate-partner-permissions', 'locations-workloads-patch', 'locations-workloads-restrict-allowed-resources', 'locations-workloads-violations-acknowledge', 'locations-workloads-violations-get' and 'locations-workloads-violations-list'", vec![ ("locations-operations-get", Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), "Details at http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli/organizations_locations-operations-get", @@ -747,7 +1149,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the Workload to fetch. This is the workloads's relative path in the API, formatted as "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". For example, "organizations/123/locations/us-east1/workloads/assured-workload-1"."##), + Some(r##"Required. The resource name of the Workload to fetch. This is the workload's relative path in the API, formatted as "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". For example, "organizations/123/locations/us-east1/workloads/assured-workload-1"."##), Some(true), Some(false)), @@ -779,6 +1181,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-workloads-mutate-partner-permissions", + Some(r##"Update the permissions settings for an existing partner workload. For force updates don't set etag field in the Workload. Only one update operation per workload can be in progress."##), + "Details at http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli/organizations_locations-workloads-mutate-partner-permissions", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The `name` field is used to identify the workload. Format: organizations/{org_id}/locations/{location_id}/workloads/{workload_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -807,6 +1237,106 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-workloads-restrict-allowed-resources", + Some(r##"Restrict the list of resources allowed in the Workload environment. The current list of allowed products can be found at https://cloud.google.com/assured-workloads/docs/supported-products In addition to assuredworkloads.workload.update permission, the user should also have orgpolicy.policy.set permission on the folder resource to use this functionality."##), + "Details at http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli/organizations_locations-workloads-restrict-allowed-resources", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the Workload. This is the workloads's relative path in the API, formatted as "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". For example, "organizations/123/locations/us-east1/workloads/assured-workload-1"."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-workloads-violations-acknowledge", + Some(r##"Acknowledges an existing violation. By acknowledging a violation, users acknowledge the existence of a compliance violation in their workload and decide to ignore it due to a valid business justification. Acknowledgement is a permanent operation and it cannot be reverted."##), + "Details at http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli/organizations_locations-workloads-violations-acknowledge", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the Violation to acknowledge. Format: organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-workloads-violations-get", + Some(r##"Retrieves Assured Workload Violation based on ID."##), + "Details at http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli/organizations_locations-workloads-violations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the Violation to fetch (ie. Violation.name). Format: organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-workloads-violations-list", + Some(r##"Lists the Violations in the AssuredWorkload Environment. Callers may also choose to read across multiple Workloads as per [AIP-159](https://google.aip.dev/159) by using '-' (the hyphen or dash character) as a wildcard character instead of workload-id in the parent. Format `organizations/{org_id}/locations/{location}/workloads/-`"##), + "Details at http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli/organizations_locations-workloads-violations-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The Workload name. Format `organizations/{org_id}/locations/{location}/workloads/{workload}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -819,7 +1349,7 @@ async fn main() { let mut app = App::new("assuredworkloads1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230116") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_assuredworkloads1_cli") .arg(Arg::with_name("url") diff --git a/gen/assuredworkloads1/Cargo.toml b/gen/assuredworkloads1/Cargo.toml index d3089dc47b..935a88807f 100644 --- a/gen/assuredworkloads1/Cargo.toml +++ b/gen/assuredworkloads1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-assuredworkloads1" -version = "5.0.2-beta-1+20230116" +version = "5.0.2+20230116" authors = ["Sebastian Thiel "] description = "A complete library to interact with Assuredworkloads (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/assuredworkloads1" homepage = "https://cloud.google.com/learnmoreurl" -documentation = "https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116" +documentation = "https://docs.rs/google-assuredworkloads1/5.0.2+20230116" license = "MIT" keywords = ["assuredworkloads", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/assuredworkloads1/README.md b/gen/assuredworkloads1/README.md index a8708a0779..2ea4c4df15 100644 --- a/gen/assuredworkloads1/README.md +++ b/gen/assuredworkloads1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-assuredworkloads1` library allows access to all features of the *Google Assuredworkloads* service. -This documentation was generated from *Assuredworkloads* crate version *5.0.2-beta-1+20230116*, where *20230116* is the exact revision of the *assuredworkloads:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Assuredworkloads* crate version *5.0.2+20230116*, where *20230116* is the exact revision of the *assuredworkloads:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Assuredworkloads* *v1* API can be found at the [official documentation site](https://cloud.google.com/learnmoreurl). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/Assuredworkloads) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/Assuredworkloads) ... * organizations - * [*locations operations get*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationOperationListCall), [*locations workloads create*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadCreateCall), [*locations workloads delete*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadDeleteCall), [*locations workloads get*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadGetCall), [*locations workloads list*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadListCall), [*locations workloads mutate partner permissions*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadMutatePartnerPermissionCall), [*locations workloads patch*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadPatchCall), [*locations workloads restrict allowed resources*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadRestrictAllowedResourceCall), [*locations workloads violations acknowledge*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadViolationAcknowledgeCall), [*locations workloads violations get*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadViolationGetCall) and [*locations workloads violations list*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadViolationListCall) + * [*locations operations get*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationOperationListCall), [*locations workloads create*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadCreateCall), [*locations workloads delete*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadDeleteCall), [*locations workloads get*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadGetCall), [*locations workloads list*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadListCall), [*locations workloads mutate partner permissions*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadMutatePartnerPermissionCall), [*locations workloads patch*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadPatchCall), [*locations workloads restrict allowed resources*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadRestrictAllowedResourceCall), [*locations workloads violations acknowledge*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadViolationAcknowledgeCall), [*locations workloads violations get*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadViolationGetCall) and [*locations workloads violations list*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/api::OrganizationLocationWorkloadViolationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/Assuredworkloads)** +* **[Hub](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/Assuredworkloads)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::CallBuilder) -* **[Resources](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::CallBuilder) +* **[Resources](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Part)** + * **[Parts](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Delegate) to the -[Method Builder](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Delegate) to the +[Method Builder](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::RequestValue) and -[decodable](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::RequestValue) and +[decodable](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-assuredworkloads1/5.0.2-beta-1+20230116/google_assuredworkloads1/client::RequestValue) are moved +* [request values](https://docs.rs/google-assuredworkloads1/5.0.2+20230116/google_assuredworkloads1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/assuredworkloads1/src/api.rs b/gen/assuredworkloads1/src/api.rs index bdd3f8b4af..3114996098 100644 --- a/gen/assuredworkloads1/src/api.rs +++ b/gen/assuredworkloads1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Assuredworkloads { Assuredworkloads { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://assuredworkloads.googleapis.com/".to_string(), _root_url: "https://assuredworkloads.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Assuredworkloads { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/assuredworkloads1/src/client.rs b/gen/assuredworkloads1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/assuredworkloads1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/assuredworkloads1/src/lib.rs b/gen/assuredworkloads1/src/lib.rs index bc6d08d690..3525752194 100644 --- a/gen/assuredworkloads1/src/lib.rs +++ b/gen/assuredworkloads1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Assuredworkloads* crate version *5.0.2-beta-1+20230116*, where *20230116* is the exact revision of the *assuredworkloads:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Assuredworkloads* crate version *5.0.2+20230116*, where *20230116* is the exact revision of the *assuredworkloads:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Assuredworkloads* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/learnmoreurl). diff --git a/gen/authorizedbuyersmarketplace1-cli/Cargo.toml b/gen/authorizedbuyersmarketplace1-cli/Cargo.toml index dc36ccad70..2ddda4cef2 100644 --- a/gen/authorizedbuyersmarketplace1-cli/Cargo.toml +++ b/gen/authorizedbuyersmarketplace1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-authorizedbuyersmarketplace1-cli" -version = "4.0.1+20220307" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Authorized Buyers Marketplace (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/authorizedbuyersmarketplace1-cli" @@ -20,13 +20,13 @@ name = "authorizedbuyersmarketplace1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-authorizedbuyersmarketplace1] path = "../authorizedbuyersmarketplace1" -version = "4.0.1+20220307" +version = "5.0.2+20230124" + diff --git a/gen/authorizedbuyersmarketplace1-cli/README.md b/gen/authorizedbuyersmarketplace1-cli/README.md index 03749eb201..d866eb79d1 100644 --- a/gen/authorizedbuyersmarketplace1-cli/README.md +++ b/gen/authorizedbuyersmarketplace1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Authorized Buyers Marketplace* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *Authorized Buyers Marketplace* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash authorizedbuyersmarketplace1 [options] diff --git a/gen/authorizedbuyersmarketplace1-cli/mkdocs.yml b/gen/authorizedbuyersmarketplace1-cli/mkdocs.yml index 4a06b26ddf..9983696372 100644 --- a/gen/authorizedbuyersmarketplace1-cli/mkdocs.yml +++ b/gen/authorizedbuyersmarketplace1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Authorized Buyers Marketplace v4.0.1+20220307 +site_name: Authorized Buyers Marketplace v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-authorizedbuyersmarketplace1-cli site_description: A complete library to interact with Authorized Buyers Marketplace (protocol v1) @@ -7,46 +7,48 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/authorizedbuyers docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['bidders_finalized-deals-list.md', 'Bidders', 'Finalized Deals List'] -- ['buyers_auction-packages-get.md', 'Buyers', 'Auction Packages Get'] -- ['buyers_auction-packages-list.md', 'Buyers', 'Auction Packages List'] -- ['buyers_auction-packages-subscribe.md', 'Buyers', 'Auction Packages Subscribe'] -- ['buyers_auction-packages-subscribe-clients.md', 'Buyers', 'Auction Packages Subscribe Clients'] -- ['buyers_auction-packages-unsubscribe.md', 'Buyers', 'Auction Packages Unsubscribe'] -- ['buyers_auction-packages-unsubscribe-clients.md', 'Buyers', 'Auction Packages Unsubscribe Clients'] -- ['buyers_clients-activate.md', 'Buyers', 'Clients Activate'] -- ['buyers_clients-create.md', 'Buyers', 'Clients Create'] -- ['buyers_clients-deactivate.md', 'Buyers', 'Clients Deactivate'] -- ['buyers_clients-get.md', 'Buyers', 'Clients Get'] -- ['buyers_clients-list.md', 'Buyers', 'Clients List'] -- ['buyers_clients-patch.md', 'Buyers', 'Clients Patch'] -- ['buyers_clients-users-activate.md', 'Buyers', 'Clients Users Activate'] -- ['buyers_clients-users-create.md', 'Buyers', 'Clients Users Create'] -- ['buyers_clients-users-deactivate.md', 'Buyers', 'Clients Users Deactivate'] -- ['buyers_clients-users-delete.md', 'Buyers', 'Clients Users Delete'] -- ['buyers_clients-users-get.md', 'Buyers', 'Clients Users Get'] -- ['buyers_clients-users-list.md', 'Buyers', 'Clients Users List'] -- ['buyers_finalized-deals-add-creative.md', 'Buyers', 'Finalized Deals Add Creative'] -- ['buyers_finalized-deals-get.md', 'Buyers', 'Finalized Deals Get'] -- ['buyers_finalized-deals-list.md', 'Buyers', 'Finalized Deals List'] -- ['buyers_finalized-deals-pause.md', 'Buyers', 'Finalized Deals Pause'] -- ['buyers_finalized-deals-resume.md', 'Buyers', 'Finalized Deals Resume'] -- ['buyers_finalized-deals-set-ready-to-serve.md', 'Buyers', 'Finalized Deals Set Ready To Serve'] -- ['buyers_proposals-accept.md', 'Buyers', 'Proposals Accept'] -- ['buyers_proposals-add-note.md', 'Buyers', 'Proposals Add Note'] -- ['buyers_proposals-cancel-negotiation.md', 'Buyers', 'Proposals Cancel Negotiation'] -- ['buyers_proposals-deals-batch-update.md', 'Buyers', 'Proposals Deals Batch Update'] -- ['buyers_proposals-deals-get.md', 'Buyers', 'Proposals Deals Get'] -- ['buyers_proposals-deals-list.md', 'Buyers', 'Proposals Deals List'] -- ['buyers_proposals-deals-patch.md', 'Buyers', 'Proposals Deals Patch'] -- ['buyers_proposals-get.md', 'Buyers', 'Proposals Get'] -- ['buyers_proposals-list.md', 'Buyers', 'Proposals List'] -- ['buyers_proposals-patch.md', 'Buyers', 'Proposals Patch'] -- ['buyers_proposals-send-rfp.md', 'Buyers', 'Proposals Send Rfp'] -- ['buyers_publisher-profiles-get.md', 'Buyers', 'Publisher Profiles Get'] -- ['buyers_publisher-profiles-list.md', 'Buyers', 'Publisher Profiles List'] +nav: +- Home: 'index.md' +- 'Bidders': + - 'Finalized Deals List': 'bidders_finalized-deals-list.md' +- 'Buyers': + - 'Auction Packages Get': 'buyers_auction-packages-get.md' + - 'Auction Packages List': 'buyers_auction-packages-list.md' + - 'Auction Packages Subscribe': 'buyers_auction-packages-subscribe.md' + - 'Auction Packages Subscribe Clients': 'buyers_auction-packages-subscribe-clients.md' + - 'Auction Packages Unsubscribe': 'buyers_auction-packages-unsubscribe.md' + - 'Auction Packages Unsubscribe Clients': 'buyers_auction-packages-unsubscribe-clients.md' + - 'Clients Activate': 'buyers_clients-activate.md' + - 'Clients Create': 'buyers_clients-create.md' + - 'Clients Deactivate': 'buyers_clients-deactivate.md' + - 'Clients Get': 'buyers_clients-get.md' + - 'Clients List': 'buyers_clients-list.md' + - 'Clients Patch': 'buyers_clients-patch.md' + - 'Clients Users Activate': 'buyers_clients-users-activate.md' + - 'Clients Users Create': 'buyers_clients-users-create.md' + - 'Clients Users Deactivate': 'buyers_clients-users-deactivate.md' + - 'Clients Users Delete': 'buyers_clients-users-delete.md' + - 'Clients Users Get': 'buyers_clients-users-get.md' + - 'Clients Users List': 'buyers_clients-users-list.md' + - 'Finalized Deals Add Creative': 'buyers_finalized-deals-add-creative.md' + - 'Finalized Deals Get': 'buyers_finalized-deals-get.md' + - 'Finalized Deals List': 'buyers_finalized-deals-list.md' + - 'Finalized Deals Pause': 'buyers_finalized-deals-pause.md' + - 'Finalized Deals Resume': 'buyers_finalized-deals-resume.md' + - 'Finalized Deals Set Ready To Serve': 'buyers_finalized-deals-set-ready-to-serve.md' + - 'Proposals Accept': 'buyers_proposals-accept.md' + - 'Proposals Add Note': 'buyers_proposals-add-note.md' + - 'Proposals Cancel Negotiation': 'buyers_proposals-cancel-negotiation.md' + - 'Proposals Deals Batch Update': 'buyers_proposals-deals-batch-update.md' + - 'Proposals Deals Get': 'buyers_proposals-deals-get.md' + - 'Proposals Deals List': 'buyers_proposals-deals-list.md' + - 'Proposals Deals Patch': 'buyers_proposals-deals-patch.md' + - 'Proposals Get': 'buyers_proposals-get.md' + - 'Proposals List': 'buyers_proposals-list.md' + - 'Proposals Patch': 'buyers_proposals-patch.md' + - 'Proposals Send Rfp': 'buyers_proposals-send-rfp.md' + - 'Publisher Profiles Get': 'buyers_publisher-profiles-get.md' + - 'Publisher Profiles List': 'buyers_publisher-profiles-list.md' theme: readthedocs diff --git a/gen/authorizedbuyersmarketplace1-cli/src/client.rs b/gen/authorizedbuyersmarketplace1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/authorizedbuyersmarketplace1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/authorizedbuyersmarketplace1-cli/src/main.rs b/gen/authorizedbuyersmarketplace1-cli/src/main.rs index 77f8127e69..c24731805a 100644 --- a/gen/authorizedbuyersmarketplace1-cli/src/main.rs +++ b/gen/authorizedbuyersmarketplace1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_authorizedbuyersmarketplace1::{api, Error, oauth2}; +use google_authorizedbuyersmarketplace1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -178,7 +177,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -885,7 +884,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -982,7 +981,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1400,7 +1399,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1596,7 +1595,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2306,7 +2305,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2426,6 +2425,7 @@ where "targeting.daypart-targeting.time-zone-type" => Some(("targeting.daypartTargeting.timeZoneType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "targeting.geo-targeting.excluded-criteria-ids" => Some(("targeting.geoTargeting.excludedCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "targeting.geo-targeting.targeted-criteria-ids" => Some(("targeting.geoTargeting.targetedCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "targeting.inventory-type-targeting.inventory-types" => Some(("targeting.inventoryTypeTargeting.inventoryTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "targeting.placement-targeting.mobile-application-targeting.first-party-targeting.excluded-app-ids" => Some(("targeting.placementTargeting.mobileApplicationTargeting.firstPartyTargeting.excludedAppIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "targeting.placement-targeting.mobile-application-targeting.first-party-targeting.targeted-app-ids" => Some(("targeting.placementTargeting.mobileApplicationTargeting.firstPartyTargeting.targetedAppIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "targeting.placement-targeting.uri-targeting.excluded-uris" => Some(("targeting.placementTargeting.uriTargeting.excludedUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2444,7 +2444,7 @@ where "targeting.video-targeting.targeted-position-types" => Some(("targeting.videoTargeting.targetedPositionTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["amount", "billed-buyer", "buyer", "client", "companion-delivery-type", "create-time", "creative-format", "creative-pre-approval-policy", "creative-requirements", "creative-rotation-type", "creative-safe-frame-compatibility", "currency-code", "daypart-targeting", "deal-type", "delivery-control", "delivery-rate-type", "description", "device-capability-targeting", "device-category-targeting", "display-name", "estimated-gross-spend", "excluded-app-ids", "excluded-criteria-ids", "excluded-position-types", "excluded-uris", "first-party-targeting", "fixed-price", "flight-end-time", "flight-start-time", "floor-price", "geo-targeting", "guaranteed-looks", "id", "impression-cap", "max-ad-duration-ms", "minimum-daily-looks", "mobile-application-targeting", "name", "nanos", "open-auction-allowed", "operating-system-criteria", "operating-system-targeting", "operating-system-version-criteria", "percent-share-of-voice", "placement-targeting", "preferred-deal-terms", "private-auction-terms", "programmatic-creative-source", "programmatic-guaranteed-terms", "proposal-revision", "publisher-profile", "reservation-type", "roadblocking-type", "seller-time-zone", "skippable-ad-type", "targeted-app-ids", "targeted-criteria-ids", "targeted-position-types", "targeted-uris", "targeting", "technology-targeting", "time-zone-type", "type", "units", "update-time", "uri-targeting", "user-list-targeting", "version", "video-targeting"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["amount", "billed-buyer", "buyer", "client", "companion-delivery-type", "create-time", "creative-format", "creative-pre-approval-policy", "creative-requirements", "creative-rotation-type", "creative-safe-frame-compatibility", "currency-code", "daypart-targeting", "deal-type", "delivery-control", "delivery-rate-type", "description", "device-capability-targeting", "device-category-targeting", "display-name", "estimated-gross-spend", "excluded-app-ids", "excluded-criteria-ids", "excluded-position-types", "excluded-uris", "first-party-targeting", "fixed-price", "flight-end-time", "flight-start-time", "floor-price", "geo-targeting", "guaranteed-looks", "id", "impression-cap", "inventory-type-targeting", "inventory-types", "max-ad-duration-ms", "minimum-daily-looks", "mobile-application-targeting", "name", "nanos", "open-auction-allowed", "operating-system-criteria", "operating-system-targeting", "operating-system-version-criteria", "percent-share-of-voice", "placement-targeting", "preferred-deal-terms", "private-auction-terms", "programmatic-creative-source", "programmatic-guaranteed-terms", "proposal-revision", "publisher-profile", "reservation-type", "roadblocking-type", "seller-time-zone", "skippable-ad-type", "targeted-app-ids", "targeted-criteria-ids", "targeted-position-types", "targeted-uris", "targeting", "technology-targeting", "time-zone-type", "type", "units", "update-time", "uri-targeting", "user-list-targeting", "version", "video-targeting"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2459,7 +2459,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2570,7 +2570,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2677,7 +2677,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2896,7 +2896,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3773,7 +3773,7 @@ async fn main() { Some(false)), ]), ("finalized-deals-set-ready-to-serve", - Some(r##"Sets the given finalized deal as ready to serve. By default, deals are ready to serve as soon as they're finalized. A bidder can opt out of this feature by asking to be included in an allowlist. Once opted out, finalized deals belonging to the bidder and its child seats will not start serving until this method is called. This method is useful to the bidders who prefer to not receive bid requests before the creative is ready. This method only applies to programmatic guaranteed deals."##), + Some(r##"Sets the given finalized deal as ready to serve. By default, deals are set as ready to serve as soon as they're finalized. If you want to opt out of the default behavior, and manually indicate that deals are ready to serve, ask your Technical Account Manager to add you to the allowlist. If you choose to use this method, finalized deals belonging to the bidder and its child seats don't start serving until after you call `setReadyToServe`, and after the deals become active. For example, you can use this method to delay receiving bid requests until your creative is ready. This method only applies to programmatic guaranteed deals."##), "Details at http://byron.github.io/google-apis-rs/google_authorizedbuyersmarketplace1_cli/buyers_finalized-deals-set-ready-to-serve", vec![ (Some(r##"deal"##), @@ -3957,7 +3957,7 @@ async fn main() { Some(false)), ]), ("proposals-deals-patch", - Some(r##"Updates the given deal at the buyer known revision number. If the server revision has advanced since the passed-in proposal.proposal_revision an ABORTED error message will be returned. The revision number is incremented by the server whenever the proposal or its constituent deals are updated. Note: The revision number is kept at a proposal level. The buyer of the API is expected to keep track of the revision number after the last update operation and send it in as part of the next update request. This way, if there are further changes on the server (e.g., seller making new updates), then the server can detect conflicts and reject the proposed changes."##), + Some(r##"Updates the given deal at the buyer known revision number. If the server revision has advanced since the passed-in proposal.proposal_revision an ABORTED error message will be returned. The revision number is incremented by the server whenever the proposal or its constituent deals are updated. Note: The revision number is kept at a proposal level. The buyer of the API is expected to keep track of the revision number after the last update operation and send it in as part of the next update request. This way, if there are further changes on the server (for example, seller making new updates), then the server can detect conflicts and reject the proposed changes."##), "Details at http://byron.github.io/google-apis-rs/google_authorizedbuyersmarketplace1_cli/buyers_proposals-deals-patch", vec![ (Some(r##"name"##), @@ -4107,7 +4107,7 @@ async fn main() { Some(false)), ]), ("publisher-profiles-list", - Some(r##"Lists publisher profiles"##), + Some(r##"Lists publisher profiles. The returned publisher profiles aren't in any defined order. The order of the results might change. A new publisher profile can appear in any place in the list of returned results."##), "Details at http://byron.github.io/google-apis-rs/google_authorizedbuyersmarketplace1_cli/buyers_publisher-profiles-list", vec![ (Some(r##"parent"##), @@ -4134,8 +4134,8 @@ async fn main() { let mut app = App::new("authorizedbuyersmarketplace1") .author("Sebastian Thiel ") - .version("4.0.1+20220307") - .about("The Authorized Buyers Marketplace API allows buyers programmatically discover inventory; propose, retrieve and negotiate deals with publishers.") + .version("5.0.2+20230124") + .about("The Authorized Buyers Marketplace API lets buyers programmatically discover inventory; propose, retrieve and negotiate deals with publishers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_authorizedbuyersmarketplace1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/authorizedbuyersmarketplace1/Cargo.toml b/gen/authorizedbuyersmarketplace1/Cargo.toml index 8676f4c33d..3a5f4a1292 100644 --- a/gen/authorizedbuyersmarketplace1/Cargo.toml +++ b/gen/authorizedbuyersmarketplace1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-authorizedbuyersmarketplace1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Authorized Buyers Marketplace (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/authorizedbuyersmarketplace1" homepage = "https://developers.google.com/authorized-buyers/apis/marketplace/reference/rest/" -documentation = "https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124" license = "MIT" keywords = ["authorizedbuyersmark", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/authorizedbuyersmarketplace1/README.md b/gen/authorizedbuyersmarketplace1/README.md index 287f22223b..156bd96fee 100644 --- a/gen/authorizedbuyersmarketplace1/README.md +++ b/gen/authorizedbuyersmarketplace1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-authorizedbuyersmarketplace1` library allows access to all features of the *Google Authorized Buyers Marketplace* service. -This documentation was generated from *Authorized Buyers Marketplace* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *authorizedbuyersmarketplace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Authorized Buyers Marketplace* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *authorizedbuyersmarketplace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Authorized Buyers Marketplace* *v1* API can be found at the [official documentation site](https://developers.google.com/authorized-buyers/apis/marketplace/reference/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/AuthorizedBuyersMarketplace) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/AuthorizedBuyersMarketplace) ... * bidders - * [*finalized deals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BidderFinalizedDealListCall) + * [*finalized deals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BidderFinalizedDealListCall) * buyers - * [*auction packages get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageGetCall), [*auction packages list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageListCall), [*auction packages subscribe*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageSubscribeCall), [*auction packages subscribe clients*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageSubscribeClientCall), [*auction packages unsubscribe*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageUnsubscribeCall), [*auction packages unsubscribe clients*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageUnsubscribeClientCall), [*clients activate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientActivateCall), [*clients create*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientCreateCall), [*clients deactivate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientDeactivateCall), [*clients get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientGetCall), [*clients list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientListCall), [*clients patch*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientPatchCall), [*clients users activate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserActivateCall), [*clients users create*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserCreateCall), [*clients users deactivate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserDeactivateCall), [*clients users delete*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserDeleteCall), [*clients users get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserGetCall), [*clients users list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserListCall), [*finalized deals add creative*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealAddCreativeCall), [*finalized deals get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealGetCall), [*finalized deals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealListCall), [*finalized deals pause*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealPauseCall), [*finalized deals resume*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealResumeCall), [*finalized deals set ready to serve*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealSetReadyToServeCall), [*proposals accept*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalAcceptCall), [*proposals add note*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalAddNoteCall), [*proposals cancel negotiation*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalCancelNegotiationCall), [*proposals deals batch update*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealBatchUpdateCall), [*proposals deals get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealGetCall), [*proposals deals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealListCall), [*proposals deals patch*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealPatchCall), [*proposals get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalGetCall), [*proposals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalListCall), [*proposals patch*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalPatchCall), [*proposals send rfp*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalSendRfpCall), [*publisher profiles get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerPublisherProfileGetCall) and [*publisher profiles list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/api::BuyerPublisherProfileListCall) + * [*auction packages get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageGetCall), [*auction packages list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageListCall), [*auction packages subscribe*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageSubscribeCall), [*auction packages subscribe clients*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageSubscribeClientCall), [*auction packages unsubscribe*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageUnsubscribeCall), [*auction packages unsubscribe clients*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerAuctionPackageUnsubscribeClientCall), [*clients activate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientActivateCall), [*clients create*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientCreateCall), [*clients deactivate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientDeactivateCall), [*clients get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientGetCall), [*clients list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientListCall), [*clients patch*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientPatchCall), [*clients users activate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserActivateCall), [*clients users create*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserCreateCall), [*clients users deactivate*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserDeactivateCall), [*clients users delete*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserDeleteCall), [*clients users get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserGetCall), [*clients users list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerClientUserListCall), [*finalized deals add creative*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealAddCreativeCall), [*finalized deals get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealGetCall), [*finalized deals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealListCall), [*finalized deals pause*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealPauseCall), [*finalized deals resume*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealResumeCall), [*finalized deals set ready to serve*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerFinalizedDealSetReadyToServeCall), [*proposals accept*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalAcceptCall), [*proposals add note*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalAddNoteCall), [*proposals cancel negotiation*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalCancelNegotiationCall), [*proposals deals batch update*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealBatchUpdateCall), [*proposals deals get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealGetCall), [*proposals deals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealListCall), [*proposals deals patch*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalDealPatchCall), [*proposals get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalGetCall), [*proposals list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalListCall), [*proposals patch*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalPatchCall), [*proposals send rfp*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerProposalSendRfpCall), [*publisher profiles get*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerPublisherProfileGetCall) and [*publisher profiles list*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/api::BuyerPublisherProfileListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/AuthorizedBuyersMarketplace)** +* **[Hub](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/AuthorizedBuyersMarketplace)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder) -* **[Resources](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder) +* **[Resources](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Part)** + * **[Parts](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Delegate) to the -[Method Builder](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Delegate) to the +[Method Builder](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::RequestValue) and -[decodable](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::RequestValue) and +[decodable](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2-beta-1+20230124/google_authorizedbuyersmarketplace1/client::RequestValue) are moved +* [request values](https://docs.rs/google-authorizedbuyersmarketplace1/5.0.2+20230124/google_authorizedbuyersmarketplace1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/authorizedbuyersmarketplace1/src/api.rs b/gen/authorizedbuyersmarketplace1/src/api.rs index 346df32a81..74fb48e488 100644 --- a/gen/authorizedbuyersmarketplace1/src/api.rs +++ b/gen/authorizedbuyersmarketplace1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> AuthorizedBuyersMarketplace { AuthorizedBuyersMarketplace { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://authorizedbuyersmarketplace.googleapis.com/".to_string(), _root_url: "https://authorizedbuyersmarketplace.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> AuthorizedBuyersMarketplace { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/authorizedbuyersmarketplace1/src/client.rs b/gen/authorizedbuyersmarketplace1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/authorizedbuyersmarketplace1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/authorizedbuyersmarketplace1/src/lib.rs b/gen/authorizedbuyersmarketplace1/src/lib.rs index f2861ae493..d628b8891e 100644 --- a/gen/authorizedbuyersmarketplace1/src/lib.rs +++ b/gen/authorizedbuyersmarketplace1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Authorized Buyers Marketplace* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *authorizedbuyersmarketplace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Authorized Buyers Marketplace* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *authorizedbuyersmarketplace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Authorized Buyers Marketplace* *v1* API can be found at the //! [official documentation site](https://developers.google.com/authorized-buyers/apis/marketplace/reference/rest/). diff --git a/gen/autoscaler1_beta2-cli/Cargo.toml b/gen/autoscaler1_beta2-cli/Cargo.toml index 47192bb71b..fe304272c7 100644 --- a/gen/autoscaler1_beta2-cli/Cargo.toml +++ b/gen/autoscaler1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-autoscaler1_beta2-cli" -version = "4.0.1+20150629" +version = "5.0.2+20150629" authors = ["Sebastian Thiel "] description = "A complete library to interact with autoscaler (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/autoscaler1_beta2-cli" @@ -20,13 +20,13 @@ name = "autoscaler1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-autoscaler1_beta2] path = "../autoscaler1_beta2" -version = "4.0.1+20150629" +version = "5.0.2+20150629" + diff --git a/gen/autoscaler1_beta2-cli/README.md b/gen/autoscaler1_beta2-cli/README.md index 8d16646076..90467e1652 100644 --- a/gen/autoscaler1_beta2-cli/README.md +++ b/gen/autoscaler1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *autoscaler* API at revision *20150629*. The CLI is at version *4.0.1*. +This documentation was generated from the *autoscaler* API at revision *20150629*. The CLI is at version *5.0.2*. ```bash autoscaler1-beta2 [options] diff --git a/gen/autoscaler1_beta2-cli/mkdocs.yml b/gen/autoscaler1_beta2-cli/mkdocs.yml index 45204dead3..c6902a4d46 100644 --- a/gen/autoscaler1_beta2-cli/mkdocs.yml +++ b/gen/autoscaler1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: autoscaler v4.0.1+20150629 +site_name: autoscaler v5.0.2+20150629 site_url: http://byron.github.io/google-apis-rs/google-autoscaler1_beta2-cli site_description: A complete library to interact with autoscaler (protocol v1beta2) @@ -7,18 +7,21 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/autoscaler1_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['autoscalers_delete.md', 'Autoscalers', 'Delete'] -- ['autoscalers_get.md', 'Autoscalers', 'Get'] -- ['autoscalers_insert.md', 'Autoscalers', 'Insert'] -- ['autoscalers_list.md', 'Autoscalers', 'List'] -- ['autoscalers_patch.md', 'Autoscalers', 'Patch'] -- ['autoscalers_update.md', 'Autoscalers', 'Update'] -- ['zone-operations_delete.md', 'Zone Operations', 'Delete'] -- ['zone-operations_get.md', 'Zone Operations', 'Get'] -- ['zone-operations_list.md', 'Zone Operations', 'List'] -- ['zones_list.md', 'Zones', 'List'] +nav: +- Home: 'index.md' +- 'Autoscalers': + - 'Delete': 'autoscalers_delete.md' + - 'Get': 'autoscalers_get.md' + - 'Insert': 'autoscalers_insert.md' + - 'List': 'autoscalers_list.md' + - 'Patch': 'autoscalers_patch.md' + - 'Update': 'autoscalers_update.md' +- 'Zone Operations': + - 'Delete': 'zone-operations_delete.md' + - 'Get': 'zone-operations_get.md' + - 'List': 'zone-operations_list.md' +- 'Zones': + - 'List': 'zones_list.md' theme: readthedocs diff --git a/gen/autoscaler1_beta2-cli/src/client.rs b/gen/autoscaler1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/autoscaler1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/autoscaler1_beta2-cli/src/main.rs b/gen/autoscaler1_beta2-cli/src/main.rs index 27bbe91d76..5dd68f15bc 100644 --- a/gen/autoscaler1_beta2-cli/src/main.rs +++ b/gen/autoscaler1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_autoscaler1_beta2::{api, Error, oauth2}; +use google_autoscaler1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -261,7 +260,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -611,7 +610,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -673,7 +672,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1189,7 +1188,7 @@ async fn main() { let mut app = App::new("autoscaler1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20150629") + .version("5.0.2+20150629") .about("The Google Compute Engine Autoscaler API provides autoscaling for groups of Cloud VMs.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_autoscaler1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/autoscaler1_beta2/Cargo.toml b/gen/autoscaler1_beta2/Cargo.toml index 76da1314f0..45798d7402 100644 --- a/gen/autoscaler1_beta2/Cargo.toml +++ b/gen/autoscaler1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-autoscaler1_beta2" -version = "5.0.2-beta-1+20150629" +version = "5.0.2+20150629" authors = ["Sebastian Thiel "] description = "A complete library to interact with autoscaler (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/autoscaler1_beta2" homepage = "http://developers.google.com/compute/docs/autoscaler" -documentation = "https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629" +documentation = "https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629" license = "MIT" keywords = ["autoscaler", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/autoscaler1_beta2/README.md b/gen/autoscaler1_beta2/README.md index 08d343b654..5cd04defa2 100644 --- a/gen/autoscaler1_beta2/README.md +++ b/gen/autoscaler1_beta2/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-autoscaler1_beta2` library allows access to all features of the *Google autoscaler* service. -This documentation was generated from *autoscaler* crate version *5.0.2-beta-1+20150629*, where *20150629* is the exact revision of the *autoscaler:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *autoscaler* crate version *5.0.2+20150629*, where *20150629* is the exact revision of the *autoscaler:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *autoscaler* *v1_beta2* API can be found at the [official documentation site](http://developers.google.com/compute/docs/autoscaler). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/AutoscalerHub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/AutoscalerHub) ... -* [autoscalers](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::Autoscaler) - * [*delete*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::AutoscalerDeleteCall), [*get*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::AutoscalerGetCall), [*insert*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::AutoscalerInsertCall), [*list*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::AutoscalerListCall), [*patch*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::AutoscalerPatchCall) and [*update*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::AutoscalerUpdateCall) +* [autoscalers](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::Autoscaler) + * [*delete*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::AutoscalerDeleteCall), [*get*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::AutoscalerGetCall), [*insert*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::AutoscalerInsertCall), [*list*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::AutoscalerListCall), [*patch*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::AutoscalerPatchCall) and [*update*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::AutoscalerUpdateCall) * zone operations - * [*delete*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::ZoneOperationDeleteCall), [*get*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::ZoneOperationListCall) -* [zones](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::Zone) - * [*list*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/api::ZoneListCall) + * [*delete*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::ZoneOperationDeleteCall), [*get*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::ZoneOperationListCall) +* [zones](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::Zone) + * [*list*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/api::ZoneListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/AutoscalerHub)** +* **[Hub](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/AutoscalerHub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-autoscaler1_beta2/5.0.2-beta-1+20150629/google_autoscaler1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-autoscaler1_beta2/5.0.2+20150629/google_autoscaler1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/autoscaler1_beta2/src/api.rs b/gen/autoscaler1_beta2/src/api.rs index d7131ba0f8..b088414e6b 100644 --- a/gen/autoscaler1_beta2/src/api.rs +++ b/gen/autoscaler1_beta2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> AutoscalerHub { AutoscalerHub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/autoscaler/v1beta2/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> AutoscalerHub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/autoscaler1_beta2/src/client.rs b/gen/autoscaler1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/autoscaler1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/autoscaler1_beta2/src/lib.rs b/gen/autoscaler1_beta2/src/lib.rs index 9a4b8bf708..87a31e84c9 100644 --- a/gen/autoscaler1_beta2/src/lib.rs +++ b/gen/autoscaler1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *autoscaler* crate version *5.0.2-beta-1+20150629*, where *20150629* is the exact revision of the *autoscaler:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *autoscaler* crate version *5.0.2+20150629*, where *20150629* is the exact revision of the *autoscaler:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *autoscaler* *v1_beta2* API can be found at the //! [official documentation site](http://developers.google.com/compute/docs/autoscaler). diff --git a/gen/baremetalsolution2-cli/Cargo.toml b/gen/baremetalsolution2-cli/Cargo.toml index d5ef5d857e..a407c921ac 100644 --- a/gen/baremetalsolution2-cli/Cargo.toml +++ b/gen/baremetalsolution2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-baremetalsolution2-cli" -version = "4.0.1+20220209" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with baremetalsolution (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/baremetalsolution2-cli" @@ -20,13 +20,13 @@ name = "baremetalsolution2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-baremetalsolution2] path = "../baremetalsolution2" -version = "4.0.1+20220209" +version = "5.0.2+20230113" + diff --git a/gen/baremetalsolution2-cli/README.md b/gen/baremetalsolution2-cli/README.md index a5465667e5..f746b53660 100644 --- a/gen/baremetalsolution2-cli/README.md +++ b/gen/baremetalsolution2-cli/README.md @@ -25,37 +25,48 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *baremetalsolution* API at revision *20220209*. The CLI is at version *4.0.1*. +This documentation was generated from the *baremetalsolution* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash baremetalsolution2 [options] projects locations-get [-p ]... [-o ] + locations-instance-provisioning-settings-fetch [-p ]... [-o ] + locations-instances-create (-r )... [-p ]... [-o ] + locations-instances-detach-lun (-r )... [-p ]... [-o ] + locations-instances-disable-interactive-serial-console (-r )... [-p ]... [-o ] + locations-instances-enable-interactive-serial-console (-r )... [-p ]... [-o ] locations-instances-get [-p ]... [-o ] locations-instances-list [-p ]... [-o ] locations-instances-patch (-r )... [-p ]... [-o ] locations-instances-reset (-r )... [-p ]... [-o ] locations-instances-start (-r )... [-p ]... [-o ] + locations-instances-stop (-r )... [-p ]... [-o ] locations-list [-p ]... [-o ] locations-networks-get [-p ]... [-o ] locations-networks-list [-p ]... [-o ] locations-networks-list-network-usage [-p ]... [-o ] locations-networks-patch (-r )... [-p ]... [-o ] + locations-nfs-shares-create (-r )... [-p ]... [-o ] + locations-nfs-shares-delete [-p ]... [-o ] locations-nfs-shares-get [-p ]... [-o ] locations-nfs-shares-list [-p ]... [-o ] locations-nfs-shares-patch (-r )... [-p ]... [-o ] + locations-operations-get [-p ]... [-o ] + locations-provisioning-configs-create (-r )... [-p ]... [-o ] + locations-provisioning-configs-get [-p ]... [-o ] + locations-provisioning-configs-patch (-r )... [-p ]... [-o ] locations-provisioning-configs-submit (-r )... [-p ]... [-o ] locations-provisioning-quotas-list [-p ]... [-o ] - locations-snapshot-schedule-policies-create (-r )... [-p ]... [-o ] - locations-snapshot-schedule-policies-delete [-p ]... [-o ] - locations-snapshot-schedule-policies-get [-p ]... [-o ] - locations-snapshot-schedule-policies-list [-p ]... [-o ] - locations-snapshot-schedule-policies-patch (-r )... [-p ]... [-o ] + locations-ssh-keys-create (-r )... [-p ]... [-o ] + locations-ssh-keys-delete [-p ]... [-o ] + locations-ssh-keys-list [-p ]... [-o ] locations-volumes-get [-p ]... [-o ] locations-volumes-list [-p ]... [-o ] locations-volumes-luns-get [-p ]... [-o ] locations-volumes-luns-list [-p ]... [-o ] locations-volumes-patch (-r )... [-p ]... [-o ] + locations-volumes-resize (-r )... [-p ]... [-o ] locations-volumes-snapshots-create (-r )... [-p ]... [-o ] locations-volumes-snapshots-delete [-p ]... [-o ] locations-volumes-snapshots-get [-p ]... [-o ] diff --git a/gen/baremetalsolution2-cli/mkdocs.yml b/gen/baremetalsolution2-cli/mkdocs.yml index be5cffacbe..055d762542 100644 --- a/gen/baremetalsolution2-cli/mkdocs.yml +++ b/gen/baremetalsolution2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: baremetalsolution v4.0.1+20220209 +site_name: baremetalsolution v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-baremetalsolution2-cli site_description: A complete library to interact with baremetalsolution (protocol v2) @@ -7,39 +7,51 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/baremetalsolutio docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-reset.md', 'Projects', 'Locations Instances Reset'] -- ['projects_locations-instances-start.md', 'Projects', 'Locations Instances Start'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-networks-get.md', 'Projects', 'Locations Networks Get'] -- ['projects_locations-networks-list.md', 'Projects', 'Locations Networks List'] -- ['projects_locations-networks-list-network-usage.md', 'Projects', 'Locations Networks List Network Usage'] -- ['projects_locations-networks-patch.md', 'Projects', 'Locations Networks Patch'] -- ['projects_locations-nfs-shares-get.md', 'Projects', 'Locations Nfs Shares Get'] -- ['projects_locations-nfs-shares-list.md', 'Projects', 'Locations Nfs Shares List'] -- ['projects_locations-nfs-shares-patch.md', 'Projects', 'Locations Nfs Shares Patch'] -- ['projects_locations-provisioning-configs-submit.md', 'Projects', 'Locations Provisioning Configs Submit'] -- ['projects_locations-provisioning-quotas-list.md', 'Projects', 'Locations Provisioning Quotas List'] -- ['projects_locations-snapshot-schedule-policies-create.md', 'Projects', 'Locations Snapshot Schedule Policies Create'] -- ['projects_locations-snapshot-schedule-policies-delete.md', 'Projects', 'Locations Snapshot Schedule Policies Delete'] -- ['projects_locations-snapshot-schedule-policies-get.md', 'Projects', 'Locations Snapshot Schedule Policies Get'] -- ['projects_locations-snapshot-schedule-policies-list.md', 'Projects', 'Locations Snapshot Schedule Policies List'] -- ['projects_locations-snapshot-schedule-policies-patch.md', 'Projects', 'Locations Snapshot Schedule Policies Patch'] -- ['projects_locations-volumes-get.md', 'Projects', 'Locations Volumes Get'] -- ['projects_locations-volumes-list.md', 'Projects', 'Locations Volumes List'] -- ['projects_locations-volumes-luns-get.md', 'Projects', 'Locations Volumes Luns Get'] -- ['projects_locations-volumes-luns-list.md', 'Projects', 'Locations Volumes Luns List'] -- ['projects_locations-volumes-patch.md', 'Projects', 'Locations Volumes Patch'] -- ['projects_locations-volumes-snapshots-create.md', 'Projects', 'Locations Volumes Snapshots Create'] -- ['projects_locations-volumes-snapshots-delete.md', 'Projects', 'Locations Volumes Snapshots Delete'] -- ['projects_locations-volumes-snapshots-get.md', 'Projects', 'Locations Volumes Snapshots Get'] -- ['projects_locations-volumes-snapshots-list.md', 'Projects', 'Locations Volumes Snapshots List'] -- ['projects_locations-volumes-snapshots-restore-volume-snapshot.md', 'Projects', 'Locations Volumes Snapshots Restore Volume Snapshot'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instance Provisioning Settings Fetch': 'projects_locations-instance-provisioning-settings-fetch.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Detach Lun': 'projects_locations-instances-detach-lun.md' + - 'Locations Instances Disable Interactive Serial Console': 'projects_locations-instances-disable-interactive-serial-console.md' + - 'Locations Instances Enable Interactive Serial Console': 'projects_locations-instances-enable-interactive-serial-console.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Reset': 'projects_locations-instances-reset.md' + - 'Locations Instances Start': 'projects_locations-instances-start.md' + - 'Locations Instances Stop': 'projects_locations-instances-stop.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Networks Get': 'projects_locations-networks-get.md' + - 'Locations Networks List': 'projects_locations-networks-list.md' + - 'Locations Networks List Network Usage': 'projects_locations-networks-list-network-usage.md' + - 'Locations Networks Patch': 'projects_locations-networks-patch.md' + - 'Locations Nfs Shares Create': 'projects_locations-nfs-shares-create.md' + - 'Locations Nfs Shares Delete': 'projects_locations-nfs-shares-delete.md' + - 'Locations Nfs Shares Get': 'projects_locations-nfs-shares-get.md' + - 'Locations Nfs Shares List': 'projects_locations-nfs-shares-list.md' + - 'Locations Nfs Shares Patch': 'projects_locations-nfs-shares-patch.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Provisioning Configs Create': 'projects_locations-provisioning-configs-create.md' + - 'Locations Provisioning Configs Get': 'projects_locations-provisioning-configs-get.md' + - 'Locations Provisioning Configs Patch': 'projects_locations-provisioning-configs-patch.md' + - 'Locations Provisioning Configs Submit': 'projects_locations-provisioning-configs-submit.md' + - 'Locations Provisioning Quotas List': 'projects_locations-provisioning-quotas-list.md' + - 'Locations Ssh Keys Create': 'projects_locations-ssh-keys-create.md' + - 'Locations Ssh Keys Delete': 'projects_locations-ssh-keys-delete.md' + - 'Locations Ssh Keys List': 'projects_locations-ssh-keys-list.md' + - 'Locations Volumes Get': 'projects_locations-volumes-get.md' + - 'Locations Volumes List': 'projects_locations-volumes-list.md' + - 'Locations Volumes Luns Get': 'projects_locations-volumes-luns-get.md' + - 'Locations Volumes Luns List': 'projects_locations-volumes-luns-list.md' + - 'Locations Volumes Patch': 'projects_locations-volumes-patch.md' + - 'Locations Volumes Resize': 'projects_locations-volumes-resize.md' + - 'Locations Volumes Snapshots Create': 'projects_locations-volumes-snapshots-create.md' + - 'Locations Volumes Snapshots Delete': 'projects_locations-volumes-snapshots-delete.md' + - 'Locations Volumes Snapshots Get': 'projects_locations-volumes-snapshots-get.md' + - 'Locations Volumes Snapshots List': 'projects_locations-volumes-snapshots-list.md' + - 'Locations Volumes Snapshots Restore Volume Snapshot': 'projects_locations-volumes-snapshots-restore-volume-snapshot.md' theme: readthedocs diff --git a/gen/baremetalsolution2-cli/src/client.rs b/gen/baremetalsolution2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/baremetalsolution2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/baremetalsolution2-cli/src/main.rs b/gen/baremetalsolution2-cli/src/main.rs index a36176de1b..c974b06031 100644 --- a/gen/baremetalsolution2-cli/src/main.rs +++ b/gen/baremetalsolution2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_baremetalsolution2::{api, Error, oauth2}; +use google_baremetalsolution2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -103,6 +102,410 @@ where } } + async fn _projects_locations_instance_provisioning_settings_fetch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_instance_provisioning_settings_fetch(opt.value_of("location").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hyperthreading-enabled" => Some(("hyperthreadingEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "interactive-serial-console-enabled" => Some(("interactiveSerialConsoleEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "login-info" => Some(("loginInfo", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "machine-type" => Some(("machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network-template" => Some(("networkTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "os-image" => Some(("osImage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pod" => Some(("pod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "workload-profile" => Some(("workloadProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "hyperthreading-enabled", "id", "interactive-serial-console-enabled", "labels", "login-info", "machine-type", "name", "network-template", "os-image", "pod", "state", "update-time", "workload-profile"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Instance = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_detach_lun(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "lun" => Some(("lun", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "skip-reboot" => Some(("skipReboot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["lun", "skip-reboot"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DetachLunRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_detach_lun(request, opt.value_of("instance").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_disable_interactive_serial_console(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DisableInteractiveSerialConsoleRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_disable_interactive_serial_console(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_enable_interactive_serial_console(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::EnableInteractiveSerialConsoleRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_enable_interactive_serial_console(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_instances_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_instances_get(opt.value_of("name").unwrap_or("")); @@ -165,7 +568,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -245,12 +648,17 @@ where "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "interactive-serial-console-enabled" => Some(("interactiveSerialConsoleEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "login-info" => Some(("loginInfo", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "machine-type" => Some(("machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network-template" => Some(("networkTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "os-image" => Some(("osImage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pod" => Some(("pod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "workload-profile" => Some(("workloadProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "hyperthreading-enabled", "id", "interactive-serial-console-enabled", "labels", "machine-type", "name", "state", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "hyperthreading-enabled", "id", "interactive-serial-console-enabled", "labels", "login-info", "machine-type", "name", "network-template", "os-image", "pod", "state", "update-time", "workload-profile"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -265,7 +673,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -482,6 +890,90 @@ where } } + async fn _projects_locations_instances_stop(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::StopInstanceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_stop(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or("")); @@ -492,7 +984,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -606,7 +1098,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -734,11 +1226,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cidr" => Some(("cidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateway-ip" => Some(("gatewayIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ip-address" => Some(("ipAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "jumbo-frames-enabled" => Some(("jumboFramesEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "mac-address" => Some(("macAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pod" => Some(("pod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "services-cidr" => Some(("servicesCidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -747,7 +1242,7 @@ where "vrf.qos-policy.bandwidth-gbps" => Some(("vrf.qosPolicy.bandwidthGbps", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "vrf.state" => Some(("vrf.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bandwidth-gbps", "cidr", "id", "ip-address", "labels", "mac-address", "name", "qos-policy", "services-cidr", "state", "type", "vlan-id", "vrf"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bandwidth-gbps", "cidr", "gateway-ip", "id", "ip-address", "jumbo-frames-enabled", "labels", "mac-address", "name", "pod", "qos-policy", "services-cidr", "state", "type", "vlan-id", "vrf"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -762,7 +1257,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -811,6 +1306,150 @@ where } } + async fn _projects_locations_nfs_shares_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nfs-share-id" => Some(("nfsShareId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "requested-size-gib" => Some(("requestedSizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "storage-type" => Some(("storageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "volume" => Some(("volume", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["id", "labels", "name", "nfs-share-id", "requested-size-gib", "state", "storage-type", "volume"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NfsShare = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_nfs_shares_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_nfs_shares_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_nfs_shares_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_nfs_shares_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_nfs_shares_get(opt.value_of("name").unwrap_or("")); @@ -873,7 +1512,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -948,13 +1587,16 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "nfs-share-id" => Some(("nfsShareId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "requested-size-gib" => Some(("requestedSizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "storage-type" => Some(("storageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "volume" => Some(("volume", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["labels", "name", "nfs-share-id", "state", "volume"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["id", "labels", "name", "nfs-share-id", "requested-size-gib", "state", "storage-type", "volume"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -969,7 +1611,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1018,6 +1660,311 @@ where } } + async fn _projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_provisioning_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "cloud-console-uri" => Some(("cloudConsoleUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "custom-id" => Some(("customId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "email" => Some(("email", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "handover-service-account" => Some(("handoverServiceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ticket-id" => Some(("ticketId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vpc-sc-enabled" => Some(("vpcScEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-console-uri", "custom-id", "email", "handover-service-account", "location", "name", "state", "status-message", "ticket-id", "update-time", "vpc-sc-enabled"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ProvisioningConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_provisioning_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "email" => { + call = call.email(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["email"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_provisioning_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_provisioning_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_provisioning_configs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "cloud-console-uri" => Some(("cloudConsoleUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "custom-id" => Some(("customId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "email" => Some(("email", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "handover-service-account" => Some(("handoverServiceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ticket-id" => Some(("ticketId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vpc-sc-enabled" => Some(("vpcScEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-console-uri", "custom-id", "email", "handover-service-account", "location", "name", "state", "status-message", "ticket-id", "update-time", "vpc-sc-enabled"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ProvisioningConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_provisioning_configs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "email" => { + call = call.email(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["email", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_provisioning_configs_submit(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1042,11 +1989,19 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "email" => Some(("email", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.cloud-console-uri" => Some(("provisioningConfig.cloudConsoleUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.custom-id" => Some(("provisioningConfig.customId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.email" => Some(("provisioningConfig.email", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "provisioning-config.handover-service-account" => Some(("provisioningConfig.handoverServiceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.location" => Some(("provisioningConfig.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "provisioning-config.name" => Some(("provisioningConfig.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.state" => Some(("provisioningConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.status-message" => Some(("provisioningConfig.statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "provisioning-config.ticket-id" => Some(("provisioningConfig.ticketId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.update-time" => Some(("provisioningConfig.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provisioning-config.vpc-sc-enabled" => Some(("provisioningConfig.vpcScEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["email", "handover-service-account", "name", "provisioning-config", "ticket-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-console-uri", "custom-id", "email", "handover-service-account", "location", "name", "provisioning-config", "state", "status-message", "ticket-id", "update-time", "vpc-sc-enabled"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1116,7 +2071,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1165,7 +2120,7 @@ where } } - async fn _projects_locations_snapshot_schedule_policies_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _projects_locations_ssh_keys_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); @@ -1188,13 +2143,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "public-key" => Some(("publicKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "labels", "name", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "public-key"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1203,13 +2155,13 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::SnapshotSchedulePolicy = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_snapshot_schedule_policies_create(request, opt.value_of("parent").unwrap_or("")); + let mut request: api::SSHKey = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_ssh_keys_create(request, opt.value_of("parent").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { - "snapshot-schedule-policy-id" => { - call = call.snapshot_schedule_policy_id(value.unwrap_or("")); + "ssh-key-id" => { + call = call.ssh_key_id(value.unwrap_or("")); }, _ => { let mut found = false; @@ -1224,7 +2176,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["snapshot-schedule-policy-id"].iter().map(|v|*v)); + v.extend(["ssh-key-id"].iter().map(|v|*v)); v } )); } } @@ -1258,9 +2210,9 @@ where } } - async fn _projects_locations_snapshot_schedule_policies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _projects_locations_ssh_keys_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_snapshot_schedule_policies_delete(opt.value_of("name").unwrap_or("")); + let mut call = self.hub.projects().locations_ssh_keys_delete(opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -1310,61 +2262,9 @@ where } } - async fn _projects_locations_snapshot_schedule_policies_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _projects_locations_ssh_keys_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_snapshot_schedule_policies_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_snapshot_schedule_policies_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_snapshot_schedule_policies_list(opt.value_of("parent").unwrap_or("")); + let mut call = self.hub.projects().locations_ssh_keys_list(opt.value_of("parent").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -1372,10 +2272,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1390,100 +2287,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_snapshot_schedule_policies_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "labels", "name", "state"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::SnapshotSchedulePolicy = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_snapshot_schedule_policies_patch(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["update-mask"].iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -1579,7 +2383,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1693,7 +2497,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1766,21 +2570,33 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "auto-grown-size-gib" => Some(("autoGrownSizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "boot-volume" => Some(("bootVolume", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "current-size-gib" => Some(("currentSizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "emergency-size-gib" => Some(("emergencySizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "max-size-gib" => Some(("maxSizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notes" => Some(("notes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "originally-requested-size-gib" => Some(("originallyRequestedSizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "performance-tier" => Some(("performanceTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pod" => Some(("pod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "protocol" => Some(("protocol", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "remaining-space-gib" => Some(("remainingSpaceGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "requested-size-gib" => Some(("requestedSizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-auto-delete-behavior" => Some(("snapshotAutoDeleteBehavior", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snapshot-enabled" => Some(("snapshotEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "snapshot-reservation-detail.reserved-space-gib" => Some(("snapshotReservationDetail.reservedSpaceGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snapshot-reservation-detail.reserved-space-percent" => Some(("snapshotReservationDetail.reservedSpacePercent", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "snapshot-reservation-detail.reserved-space-remaining-gib" => Some(("snapshotReservationDetail.reservedSpaceRemainingGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-reservation-detail.reserved-space-used-percent" => Some(("snapshotReservationDetail.reservedSpaceUsedPercent", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "snapshot-schedule-policy" => Some(("snapshotSchedulePolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "storage-aggregate-pool" => Some(("storageAggregatePool", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-type" => Some(("storageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "workload-profile" => Some(("workloadProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-grown-size-gib", "current-size-gib", "id", "labels", "name", "remaining-space-gib", "requested-size-gib", "reserved-space-gib", "reserved-space-remaining-gib", "reserved-space-used-percent", "snapshot-auto-delete-behavior", "snapshot-reservation-detail", "snapshot-schedule-policy", "state", "storage-type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-grown-size-gib", "boot-volume", "current-size-gib", "emergency-size-gib", "id", "labels", "max-size-gib", "name", "notes", "originally-requested-size-gib", "performance-tier", "pod", "protocol", "remaining-space-gib", "requested-size-gib", "reserved-space-gib", "reserved-space-percent", "reserved-space-remaining-gib", "reserved-space-used-percent", "snapshot-auto-delete-behavior", "snapshot-enabled", "snapshot-reservation-detail", "snapshot-schedule-policy", "state", "storage-aggregate-pool", "storage-type", "workload-profile"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1795,7 +2611,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1844,6 +2660,91 @@ where } } + async fn _projects_locations_volumes_resize(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "size-gib" => Some(("sizeGib", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["size-gib"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ResizeVolumeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_volumes_resize(request, opt.value_of("volume").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_volumes_snapshots_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1871,10 +2772,10 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "size-bytes" => Some(("sizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-volume" => Some(("storageVolume", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "id", "name", "size-bytes", "storage-volume"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "id", "name", "storage-volume", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2048,7 +2949,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2191,6 +3092,21 @@ where ("locations-get", Some(opt)) => { call_result = self._projects_locations_get(opt, dry_run, &mut err).await; }, + ("locations-instance-provisioning-settings-fetch", Some(opt)) => { + call_result = self._projects_locations_instance_provisioning_settings_fetch(opt, dry_run, &mut err).await; + }, + ("locations-instances-create", Some(opt)) => { + call_result = self._projects_locations_instances_create(opt, dry_run, &mut err).await; + }, + ("locations-instances-detach-lun", Some(opt)) => { + call_result = self._projects_locations_instances_detach_lun(opt, dry_run, &mut err).await; + }, + ("locations-instances-disable-interactive-serial-console", Some(opt)) => { + call_result = self._projects_locations_instances_disable_interactive_serial_console(opt, dry_run, &mut err).await; + }, + ("locations-instances-enable-interactive-serial-console", Some(opt)) => { + call_result = self._projects_locations_instances_enable_interactive_serial_console(opt, dry_run, &mut err).await; + }, ("locations-instances-get", Some(opt)) => { call_result = self._projects_locations_instances_get(opt, dry_run, &mut err).await; }, @@ -2206,6 +3122,9 @@ where ("locations-instances-start", Some(opt)) => { call_result = self._projects_locations_instances_start(opt, dry_run, &mut err).await; }, + ("locations-instances-stop", Some(opt)) => { + call_result = self._projects_locations_instances_stop(opt, dry_run, &mut err).await; + }, ("locations-list", Some(opt)) => { call_result = self._projects_locations_list(opt, dry_run, &mut err).await; }, @@ -2221,6 +3140,12 @@ where ("locations-networks-patch", Some(opt)) => { call_result = self._projects_locations_networks_patch(opt, dry_run, &mut err).await; }, + ("locations-nfs-shares-create", Some(opt)) => { + call_result = self._projects_locations_nfs_shares_create(opt, dry_run, &mut err).await; + }, + ("locations-nfs-shares-delete", Some(opt)) => { + call_result = self._projects_locations_nfs_shares_delete(opt, dry_run, &mut err).await; + }, ("locations-nfs-shares-get", Some(opt)) => { call_result = self._projects_locations_nfs_shares_get(opt, dry_run, &mut err).await; }, @@ -2230,26 +3155,32 @@ where ("locations-nfs-shares-patch", Some(opt)) => { call_result = self._projects_locations_nfs_shares_patch(opt, dry_run, &mut err).await; }, + ("locations-operations-get", Some(opt)) => { + call_result = self._projects_locations_operations_get(opt, dry_run, &mut err).await; + }, + ("locations-provisioning-configs-create", Some(opt)) => { + call_result = self._projects_locations_provisioning_configs_create(opt, dry_run, &mut err).await; + }, + ("locations-provisioning-configs-get", Some(opt)) => { + call_result = self._projects_locations_provisioning_configs_get(opt, dry_run, &mut err).await; + }, + ("locations-provisioning-configs-patch", Some(opt)) => { + call_result = self._projects_locations_provisioning_configs_patch(opt, dry_run, &mut err).await; + }, ("locations-provisioning-configs-submit", Some(opt)) => { call_result = self._projects_locations_provisioning_configs_submit(opt, dry_run, &mut err).await; }, ("locations-provisioning-quotas-list", Some(opt)) => { call_result = self._projects_locations_provisioning_quotas_list(opt, dry_run, &mut err).await; }, - ("locations-snapshot-schedule-policies-create", Some(opt)) => { - call_result = self._projects_locations_snapshot_schedule_policies_create(opt, dry_run, &mut err).await; + ("locations-ssh-keys-create", Some(opt)) => { + call_result = self._projects_locations_ssh_keys_create(opt, dry_run, &mut err).await; }, - ("locations-snapshot-schedule-policies-delete", Some(opt)) => { - call_result = self._projects_locations_snapshot_schedule_policies_delete(opt, dry_run, &mut err).await; + ("locations-ssh-keys-delete", Some(opt)) => { + call_result = self._projects_locations_ssh_keys_delete(opt, dry_run, &mut err).await; }, - ("locations-snapshot-schedule-policies-get", Some(opt)) => { - call_result = self._projects_locations_snapshot_schedule_policies_get(opt, dry_run, &mut err).await; - }, - ("locations-snapshot-schedule-policies-list", Some(opt)) => { - call_result = self._projects_locations_snapshot_schedule_policies_list(opt, dry_run, &mut err).await; - }, - ("locations-snapshot-schedule-policies-patch", Some(opt)) => { - call_result = self._projects_locations_snapshot_schedule_policies_patch(opt, dry_run, &mut err).await; + ("locations-ssh-keys-list", Some(opt)) => { + call_result = self._projects_locations_ssh_keys_list(opt, dry_run, &mut err).await; }, ("locations-volumes-get", Some(opt)) => { call_result = self._projects_locations_volumes_get(opt, dry_run, &mut err).await; @@ -2266,6 +3197,9 @@ where ("locations-volumes-patch", Some(opt)) => { call_result = self._projects_locations_volumes_patch(opt, dry_run, &mut err).await; }, + ("locations-volumes-resize", Some(opt)) => { + call_result = self._projects_locations_volumes_resize(opt, dry_run, &mut err).await; + }, ("locations-volumes-snapshots-create", Some(opt)) => { call_result = self._projects_locations_volumes_snapshots_create(opt, dry_run, &mut err).await; }, @@ -2360,7 +3294,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-reset', 'locations-instances-start', 'locations-list', 'locations-networks-get', 'locations-networks-list', 'locations-networks-list-network-usage', 'locations-networks-patch', 'locations-nfs-shares-get', 'locations-nfs-shares-list', 'locations-nfs-shares-patch', 'locations-provisioning-configs-submit', 'locations-provisioning-quotas-list', 'locations-snapshot-schedule-policies-create', 'locations-snapshot-schedule-policies-delete', 'locations-snapshot-schedule-policies-get', 'locations-snapshot-schedule-policies-list', 'locations-snapshot-schedule-policies-patch', 'locations-volumes-get', 'locations-volumes-list', 'locations-volumes-luns-get', 'locations-volumes-luns-list', 'locations-volumes-patch', 'locations-volumes-snapshots-create', 'locations-volumes-snapshots-delete', 'locations-volumes-snapshots-get', 'locations-volumes-snapshots-list' and 'locations-volumes-snapshots-restore-volume-snapshot'", vec![ + ("projects", "methods: 'locations-get', 'locations-instance-provisioning-settings-fetch', 'locations-instances-create', 'locations-instances-detach-lun', 'locations-instances-disable-interactive-serial-console', 'locations-instances-enable-interactive-serial-console', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-reset', 'locations-instances-start', 'locations-instances-stop', 'locations-list', 'locations-networks-get', 'locations-networks-list', 'locations-networks-list-network-usage', 'locations-networks-patch', 'locations-nfs-shares-create', 'locations-nfs-shares-delete', 'locations-nfs-shares-get', 'locations-nfs-shares-list', 'locations-nfs-shares-patch', 'locations-operations-get', 'locations-provisioning-configs-create', 'locations-provisioning-configs-get', 'locations-provisioning-configs-patch', 'locations-provisioning-configs-submit', 'locations-provisioning-quotas-list', 'locations-ssh-keys-create', 'locations-ssh-keys-delete', 'locations-ssh-keys-list', 'locations-volumes-get', 'locations-volumes-list', 'locations-volumes-luns-get', 'locations-volumes-luns-list', 'locations-volumes-patch', 'locations-volumes-resize', 'locations-volumes-snapshots-create', 'locations-volumes-snapshots-delete', 'locations-volumes-snapshots-get', 'locations-volumes-snapshots-list' and 'locations-volumes-snapshots-restore-volume-snapshot'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-get", @@ -2377,6 +3311,140 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instance-provisioning-settings-fetch", + Some(r##"Get instance provisioning settings for a given project. This is hidden method used by UI only."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-instance-provisioning-settings-fetch", + vec![ + (Some(r##"location"##), + None, + Some(r##"Required. The parent project and location containing the ProvisioningSettings."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-create", + Some(r##"Create an Instance."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-instances-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent project and location."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-detach-lun", + Some(r##"Detach LUN from Instance."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-instances-detach-lun", + vec![ + (Some(r##"instance"##), + None, + Some(r##"Required. Name of the instance."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-disable-interactive-serial-console", + Some(r##"Disable the interactive serial console feature on an instance."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-instances-disable-interactive-serial-console", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the resource."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-enable-interactive-serial-console", + Some(r##"Enable the interactive serial console feature on an instance."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-instances-enable-interactive-serial-console", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the resource."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2433,7 +3501,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Output only. The resource name of this `Instance`. Resource names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. Format: `projects/{project}/locations/{location}/instances/{instance}`"##), + Some(r##"Immutable. The resource name of this `Instance`. Resource names are schemeless URIs that follow the conventions in https://cloud.google.com/apis/design/resource_names. Format: `projects/{project}/locations/{location}/instances/{instance}`"##), Some(true), Some(false)), @@ -2505,6 +3573,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-stop", + Some(r##"Stop a running server."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-instances-stop", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the resource."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2621,6 +3717,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-nfs-shares-create", + Some(r##"Create an NFS share."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-nfs-shares-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent project and location."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-nfs-shares-delete", + Some(r##"Delete an NFS share. The underlying volume is automatically deleted."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-nfs-shares-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the NFS share to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2677,7 +3823,107 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Output only. The name of the NFS share."##), + Some(r##"Immutable. The name of the NFS share."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-get", + Some(r##"Get details about an operation. This method used only to work around CCFE lack of passthrough LRO support (b/221498758)."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-provisioning-configs-create", + Some(r##"Create new ProvisioningConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-provisioning-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent project and location containing the ProvisioningConfig."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-provisioning-configs-get", + Some(r##"Get ProvisioningConfig by name."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-provisioning-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the ProvisioningConfig."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-provisioning-configs-patch", + Some(r##"Update existing ProvisioningConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-provisioning-configs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The system-generated name of the provisioning config. This follows the UUID format."##), Some(true), Some(false)), @@ -2749,13 +3995,13 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-snapshot-schedule-policies-create", - Some(r##"Create a snapshot schedule policy in the specified project."##), - "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-snapshot-schedule-policies-create", + ("locations-ssh-keys-create", + Some(r##"Register a public SSH key in the specified project for use with the interactive serial console feature."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-ssh-keys-create", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The parent project and location containing the SnapshotSchedulePolicy."##), + Some(r##"Required. The parent containing the SSH keys."##), Some(true), Some(false)), @@ -2777,13 +4023,13 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-snapshot-schedule-policies-delete", - Some(r##"Delete a named snapshot schedule policy."##), - "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-snapshot-schedule-policies-delete", + ("locations-ssh-keys-delete", + Some(r##"Deletes a public SSH key registered in the specified project."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-ssh-keys-delete", vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the snapshot schedule policy to delete."##), + Some(r##"Required. The name of the SSH key to delete. Currently, the only valid value for the location is "global"."##), Some(true), Some(false)), @@ -2799,35 +4045,13 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-snapshot-schedule-policies-get", - Some(r##"Get details of a single snapshot schedule policy."##), - "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-snapshot-schedule-policies-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. Name of the resource."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-snapshot-schedule-policies-list", - Some(r##"List snapshot schedule policies in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-snapshot-schedule-policies-list", + ("locations-ssh-keys-list", + Some(r##"Lists the public SSH keys registered for the specified project. These SSH keys are used only for the interactive serial console feature."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-ssh-keys-list", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The parent project containing the Snapshot Schedule Policies."##), + Some(r##"Required. The parent containing the SSH keys. Currently, the only valid value for the location is "global"."##), Some(true), Some(false)), @@ -2837,34 +4061,6 @@ async fn main() { Some(false), Some(true)), - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-snapshot-schedule-policies-patch", - Some(r##"Update a snapshot schedule policy in the specified project."##), - "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-snapshot-schedule-policies-patch", - vec![ - (Some(r##"name"##), - None, - Some(r##"Output only. The name of the snapshot schedule policy."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2981,6 +4177,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-volumes-resize", + Some(r##"Emergency Volume resize."##), + "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-volumes-resize", + vec![ + (Some(r##"volume"##), + None, + Some(r##"Required. Volume to resize."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2988,7 +4212,7 @@ async fn main() { Some(false)), ]), ("locations-volumes-snapshots-create", - Some(r##"Create a storage volume snapshot in a containing volume."##), + Some(r##"Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if called for a non-boot volume."##), "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-volumes-snapshots-create", vec![ (Some(r##"parent"##), @@ -3016,7 +4240,7 @@ async fn main() { Some(false)), ]), ("locations-volumes-snapshots-delete", - Some(r##"Deletes a storage volume snapshot for a given volume."##), + Some(r##"Deletes a volume snapshot. Returns INVALID_ARGUMENT if called for a non-boot volume."##), "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-volumes-snapshots-delete", vec![ (Some(r##"name"##), @@ -3038,12 +4262,12 @@ async fn main() { Some(false)), ]), ("locations-volumes-snapshots-get", - Some(r##"Get details of a single storage volume snapshot."##), + Some(r##"Returns the specified snapshot resource. Returns INVALID_ARGUMENT if called for a non-boot volume."##), "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-volumes-snapshots-get", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the resource."##), + Some(r##"Required. The name of the snapshot."##), Some(true), Some(false)), @@ -3060,7 +4284,7 @@ async fn main() { Some(false)), ]), ("locations-volumes-snapshots-list", - Some(r##"List storage volume snapshots for given storage volume."##), + Some(r##"Retrieves the list of snapshots for the specified volume. Returns a response with an empty list of snapshots if called for a non-boot volume."##), "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-volumes-snapshots-list", vec![ (Some(r##"parent"##), @@ -3082,12 +4306,12 @@ async fn main() { Some(false)), ]), ("locations-volumes-snapshots-restore-volume-snapshot", - Some(r##"Restore a storage volume snapshot to its containing volume."##), + Some(r##"Uses the specified snapshot to restore its parent volume. Returns INVALID_ARGUMENT if called for a non-boot volume."##), "Details at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli/projects_locations-volumes-snapshots-restore-volume-snapshot", vec![ (Some(r##"volume-snapshot"##), None, - Some(r##"Required. Name of the resource."##), + Some(r##"Required. Name of the snapshot which will be used to restore its parent volume."##), Some(true), Some(false)), @@ -3115,7 +4339,7 @@ async fn main() { let mut app = App::new("baremetalsolution2") .author("Sebastian Thiel ") - .version("4.0.1+20220209") + .version("5.0.2+20230113") .about("Provides ways to manage Bare Metal Solution hardware installed in a regional extension located near a Google Cloud data center.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_baremetalsolution2_cli") .arg(Arg::with_name("url") diff --git a/gen/baremetalsolution2/Cargo.toml b/gen/baremetalsolution2/Cargo.toml index 94960ed027..7bb7a31b86 100644 --- a/gen/baremetalsolution2/Cargo.toml +++ b/gen/baremetalsolution2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-baremetalsolution2" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with baremetalsolution (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/baremetalsolution2" homepage = "https://cloud.google.com/bare-metal" -documentation = "https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-baremetalsolution2/5.0.2+20230113" license = "MIT" keywords = ["baremetalsolution", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/baremetalsolution2/README.md b/gen/baremetalsolution2/README.md index 732c307b9c..ef42b1b3db 100644 --- a/gen/baremetalsolution2/README.md +++ b/gen/baremetalsolution2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-baremetalsolution2` library allows access to all features of the *Google baremetalsolution* service. -This documentation was generated from *baremetalsolution* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *baremetalsolution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *baremetalsolution* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *baremetalsolution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *baremetalsolution* *v2* API can be found at the [official documentation site](https://cloud.google.com/bare-metal). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/Baremetalsolution) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/Baremetalsolution) ... * projects - * [*locations get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationGetCall), [*locations instance provisioning settings fetch*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceProvisioningSettingFetchCall), [*locations instances create*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceCreateCall), [*locations instances detach lun*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceDetachLunCall), [*locations instances disable interactive serial console*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceDisableInteractiveSerialConsoleCall), [*locations instances enable interactive serial console*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceEnableInteractiveSerialConsoleCall), [*locations instances get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstancePatchCall), [*locations instances reset*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceResetCall), [*locations instances start*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceStartCall), [*locations instances stop*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationInstanceStopCall), [*locations list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationListCall), [*locations networks get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNetworkGetCall), [*locations networks list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNetworkListCall), [*locations networks list network usage*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNetworkListNetworkUsageCall), [*locations networks patch*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNetworkPatchCall), [*locations nfs shares create*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareCreateCall), [*locations nfs shares delete*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareDeleteCall), [*locations nfs shares get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareGetCall), [*locations nfs shares list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareListCall), [*locations nfs shares patch*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationNfsSharePatchCall), [*locations operations get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationOperationGetCall), [*locations provisioning configs create*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigCreateCall), [*locations provisioning configs get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigGetCall), [*locations provisioning configs patch*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigPatchCall), [*locations provisioning configs submit*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigSubmitCall), [*locations provisioning quotas list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningQuotaListCall), [*locations ssh keys create*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationSshKeyCreateCall), [*locations ssh keys delete*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationSshKeyDeleteCall), [*locations ssh keys list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationSshKeyListCall), [*locations volumes get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeGetCall), [*locations volumes list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeListCall), [*locations volumes luns get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeLunGetCall), [*locations volumes luns list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeLunListCall), [*locations volumes patch*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumePatchCall), [*locations volumes resize*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeResizeCall), [*locations volumes snapshots create*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotCreateCall), [*locations volumes snapshots delete*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotDeleteCall), [*locations volumes snapshots get*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotGetCall), [*locations volumes snapshots list*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotListCall) and [*locations volumes snapshots restore volume snapshot*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotRestoreVolumeSnapshotCall) + * [*locations get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationGetCall), [*locations instance provisioning settings fetch*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceProvisioningSettingFetchCall), [*locations instances create*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceCreateCall), [*locations instances detach lun*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceDetachLunCall), [*locations instances disable interactive serial console*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceDisableInteractiveSerialConsoleCall), [*locations instances enable interactive serial console*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceEnableInteractiveSerialConsoleCall), [*locations instances get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstancePatchCall), [*locations instances reset*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceResetCall), [*locations instances start*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceStartCall), [*locations instances stop*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationInstanceStopCall), [*locations list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationListCall), [*locations networks get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNetworkGetCall), [*locations networks list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNetworkListCall), [*locations networks list network usage*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNetworkListNetworkUsageCall), [*locations networks patch*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNetworkPatchCall), [*locations nfs shares create*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareCreateCall), [*locations nfs shares delete*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareDeleteCall), [*locations nfs shares get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareGetCall), [*locations nfs shares list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNfsShareListCall), [*locations nfs shares patch*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationNfsSharePatchCall), [*locations operations get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationOperationGetCall), [*locations provisioning configs create*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigCreateCall), [*locations provisioning configs get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigGetCall), [*locations provisioning configs patch*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigPatchCall), [*locations provisioning configs submit*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningConfigSubmitCall), [*locations provisioning quotas list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationProvisioningQuotaListCall), [*locations ssh keys create*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationSshKeyCreateCall), [*locations ssh keys delete*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationSshKeyDeleteCall), [*locations ssh keys list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationSshKeyListCall), [*locations volumes get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeGetCall), [*locations volumes list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeListCall), [*locations volumes luns get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeLunGetCall), [*locations volumes luns list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeLunListCall), [*locations volumes patch*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumePatchCall), [*locations volumes resize*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeResizeCall), [*locations volumes snapshots create*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotCreateCall), [*locations volumes snapshots delete*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotDeleteCall), [*locations volumes snapshots get*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotGetCall), [*locations volumes snapshots list*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotListCall) and [*locations volumes snapshots restore volume snapshot*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/api::ProjectLocationVolumeSnapshotRestoreVolumeSnapshotCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/Baremetalsolution)** +* **[Hub](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/Baremetalsolution)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::CallBuilder) -* **[Resources](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::CallBuilder) +* **[Resources](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Part)** + * **[Parts](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -140,17 +140,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -160,29 +160,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Delegate) to the -[Method Builder](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Delegate) to the +[Method Builder](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::RequestValue) and -[decodable](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::RequestValue) and +[decodable](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-baremetalsolution2/5.0.2-beta-1+20230113/google_baremetalsolution2/client::RequestValue) are moved +* [request values](https://docs.rs/google-baremetalsolution2/5.0.2+20230113/google_baremetalsolution2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/baremetalsolution2/src/api.rs b/gen/baremetalsolution2/src/api.rs index 0404df8e3d..7d2d4dd898 100644 --- a/gen/baremetalsolution2/src/api.rs +++ b/gen/baremetalsolution2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Baremetalsolution { Baremetalsolution { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://baremetalsolution.googleapis.com/".to_string(), _root_url: "https://baremetalsolution.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Baremetalsolution { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/baremetalsolution2/src/client.rs b/gen/baremetalsolution2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/baremetalsolution2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/baremetalsolution2/src/lib.rs b/gen/baremetalsolution2/src/lib.rs index 69d1b07aef..55993b4f58 100644 --- a/gen/baremetalsolution2/src/lib.rs +++ b/gen/baremetalsolution2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *baremetalsolution* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *baremetalsolution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *baremetalsolution* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *baremetalsolution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *baremetalsolution* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/bare-metal). diff --git a/gen/bigquery2-cli/Cargo.toml b/gen/bigquery2-cli/Cargo.toml index 7f94300487..fc892925d6 100644 --- a/gen/bigquery2-cli/Cargo.toml +++ b/gen/bigquery2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-bigquery2-cli" -version = "4.0.1+20220222" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with bigquery (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigquery2-cli" @@ -20,13 +20,13 @@ name = "bigquery2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-bigquery2] path = "../bigquery2" -version = "4.0.1+20220222" +version = "5.0.2+20230114" + diff --git a/gen/bigquery2-cli/README.md b/gen/bigquery2-cli/README.md index 1f8f8926fb..e85c66d2ba 100644 --- a/gen/bigquery2-cli/README.md +++ b/gen/bigquery2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *bigquery* API at revision *20220222*. The CLI is at version *4.0.1*. +This documentation was generated from the *bigquery* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash bigquery2 [options] diff --git a/gen/bigquery2-cli/mkdocs.yml b/gen/bigquery2-cli/mkdocs.yml index 5703902ab8..96105f0834 100644 --- a/gen/bigquery2-cli/mkdocs.yml +++ b/gen/bigquery2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: bigquery v4.0.1+20220222 +site_name: bigquery v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-bigquery2-cli site_description: A complete library to interact with bigquery (protocol v2) @@ -7,47 +7,55 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/bigquery2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['datasets_delete.md', 'Datasets', 'Delete'] -- ['datasets_get.md', 'Datasets', 'Get'] -- ['datasets_insert.md', 'Datasets', 'Insert'] -- ['datasets_list.md', 'Datasets', 'List'] -- ['datasets_patch.md', 'Datasets', 'Patch'] -- ['datasets_update.md', 'Datasets', 'Update'] -- ['jobs_cancel.md', 'Jobs', 'Cancel'] -- ['jobs_delete.md', 'Jobs', 'Delete'] -- ['jobs_get.md', 'Jobs', 'Get'] -- ['jobs_get-query-results.md', 'Jobs', 'Get Query Results'] -- ['jobs_insert.md', 'Jobs', 'Insert'] -- ['jobs_list.md', 'Jobs', 'List'] -- ['jobs_query.md', 'Jobs', 'Query'] -- ['models_delete.md', 'Models', 'Delete'] -- ['models_get.md', 'Models', 'Get'] -- ['models_list.md', 'Models', 'List'] -- ['models_patch.md', 'Models', 'Patch'] -- ['projects_get-service-account.md', 'Projects', 'Get Service Account'] -- ['projects_list.md', 'Projects', 'List'] -- ['routines_delete.md', 'Routines', 'Delete'] -- ['routines_get.md', 'Routines', 'Get'] -- ['routines_insert.md', 'Routines', 'Insert'] -- ['routines_list.md', 'Routines', 'List'] -- ['routines_update.md', 'Routines', 'Update'] -- ['row-access-policies_get-iam-policy.md', 'Row Access Policies', 'Get Iam Policy'] -- ['row-access-policies_list.md', 'Row Access Policies', 'List'] -- ['row-access-policies_set-iam-policy.md', 'Row Access Policies', 'Set Iam Policy'] -- ['row-access-policies_test-iam-permissions.md', 'Row Access Policies', 'Test Iam Permissions'] -- ['tabledata_insert-all.md', 'Tabledata', 'Insert All'] -- ['tabledata_list.md', 'Tabledata', 'List'] -- ['tables_delete.md', 'Tables', 'Delete'] -- ['tables_get.md', 'Tables', 'Get'] -- ['tables_get-iam-policy.md', 'Tables', 'Get Iam Policy'] -- ['tables_insert.md', 'Tables', 'Insert'] -- ['tables_list.md', 'Tables', 'List'] -- ['tables_patch.md', 'Tables', 'Patch'] -- ['tables_set-iam-policy.md', 'Tables', 'Set Iam Policy'] -- ['tables_test-iam-permissions.md', 'Tables', 'Test Iam Permissions'] -- ['tables_update.md', 'Tables', 'Update'] +nav: +- Home: 'index.md' +- 'Datasets': + - 'Delete': 'datasets_delete.md' + - 'Get': 'datasets_get.md' + - 'Insert': 'datasets_insert.md' + - 'List': 'datasets_list.md' + - 'Patch': 'datasets_patch.md' + - 'Update': 'datasets_update.md' +- 'Jobs': + - 'Cancel': 'jobs_cancel.md' + - 'Delete': 'jobs_delete.md' + - 'Get': 'jobs_get.md' + - 'Get Query Results': 'jobs_get-query-results.md' + - 'Insert': 'jobs_insert.md' + - 'List': 'jobs_list.md' + - 'Query': 'jobs_query.md' +- 'Models': + - 'Delete': 'models_delete.md' + - 'Get': 'models_get.md' + - 'List': 'models_list.md' + - 'Patch': 'models_patch.md' +- 'Projects': + - 'Get Service Account': 'projects_get-service-account.md' + - 'List': 'projects_list.md' +- 'Routines': + - 'Delete': 'routines_delete.md' + - 'Get': 'routines_get.md' + - 'Insert': 'routines_insert.md' + - 'List': 'routines_list.md' + - 'Update': 'routines_update.md' +- 'Row Access Policies': + - 'Get Iam Policy': 'row-access-policies_get-iam-policy.md' + - 'List': 'row-access-policies_list.md' + - 'Set Iam Policy': 'row-access-policies_set-iam-policy.md' + - 'Test Iam Permissions': 'row-access-policies_test-iam-permissions.md' +- 'Tabledata': + - 'Insert All': 'tabledata_insert-all.md' + - 'List': 'tabledata_list.md' +- 'Tables': + - 'Delete': 'tables_delete.md' + - 'Get': 'tables_get.md' + - 'Get Iam Policy': 'tables_get-iam-policy.md' + - 'Insert': 'tables_insert.md' + - 'List': 'tables_list.md' + - 'Patch': 'tables_patch.md' + - 'Set Iam Policy': 'tables_set-iam-policy.md' + - 'Test Iam Permissions': 'tables_test-iam-permissions.md' + - 'Update': 'tables_update.md' theme: readthedocs diff --git a/gen/bigquery2-cli/src/client.rs b/gen/bigquery2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/bigquery2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/bigquery2-cli/src/main.rs b/gen/bigquery2-cli/src/main.rs index cb76563609..1db745fb35 100644 --- a/gen/bigquery2-cli/src/main.rs +++ b/gen/bigquery2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_bigquery2::{api, Error, oauth2}; +use google_bigquery2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "delete-contents" => { - call = call.delete_contents(arg_from_str(value.unwrap_or("false"), err, "delete-contents", "boolean")); + call = call.delete_contents( value.map(|v| arg_from_str(v, err, "delete-contents", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -190,10 +189,12 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "satisfies-pzs" => Some(("satisfiesPZS", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "max-time-travel-hours" => Some(("maxTimeTravelHours", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "storage-billing-model" => Some(("storageBillingModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "dataset-reference", "default-collation", "default-encryption-configuration", "default-partition-expiration-ms", "default-table-expiration-ms", "description", "etag", "friendly-name", "id", "is-case-insensitive", "kind", "kms-key-name", "labels", "last-modified-time", "location", "project-id", "satisfies-pzs", "self-link"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "dataset-reference", "default-collation", "default-encryption-configuration", "default-partition-expiration-ms", "default-table-expiration-ms", "description", "etag", "friendly-name", "id", "is-case-insensitive", "kind", "kms-key-name", "labels", "last-modified-time", "location", "max-time-travel-hours", "project-id", "satisfies-pzs", "self-link", "storage-billing-model"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -263,13 +264,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); }, "all" => { - call = call.all(arg_from_str(value.unwrap_or("false"), err, "all", "boolean")); + call = call.all( value.map(|v| arg_from_str(v, err, "all", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -357,10 +358,12 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "satisfies-pzs" => Some(("satisfiesPZS", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "max-time-travel-hours" => Some(("maxTimeTravelHours", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "storage-billing-model" => Some(("storageBillingModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "dataset-reference", "default-collation", "default-encryption-configuration", "default-partition-expiration-ms", "default-table-expiration-ms", "description", "etag", "friendly-name", "id", "is-case-insensitive", "kind", "kms-key-name", "labels", "last-modified-time", "location", "project-id", "satisfies-pzs", "self-link"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "dataset-reference", "default-collation", "default-encryption-configuration", "default-partition-expiration-ms", "default-table-expiration-ms", "description", "etag", "friendly-name", "id", "is-case-insensitive", "kind", "kms-key-name", "labels", "last-modified-time", "location", "max-time-travel-hours", "project-id", "satisfies-pzs", "self-link", "storage-billing-model"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -459,10 +462,12 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "satisfies-pzs" => Some(("satisfiesPZS", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "max-time-travel-hours" => Some(("maxTimeTravelHours", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "storage-billing-model" => Some(("storageBillingModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "dataset-reference", "default-collation", "default-encryption-configuration", "default-partition-expiration-ms", "default-table-expiration-ms", "description", "etag", "friendly-name", "id", "is-case-insensitive", "kind", "kms-key-name", "labels", "last-modified-time", "location", "project-id", "satisfies-pzs", "self-link"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "dataset-reference", "default-collation", "default-encryption-configuration", "default-partition-expiration-ms", "default-table-expiration-ms", "description", "etag", "friendly-name", "id", "is-case-insensitive", "kind", "kms-key-name", "labels", "last-modified-time", "location", "max-time-travel-hours", "project-id", "satisfies-pzs", "self-link", "storage-billing-model"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -689,16 +694,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "timeout-ms" => { - call = call.timeout_ms(arg_from_str(value.unwrap_or("-0"), err, "timeout-ms", "integer")); + call = call.timeout_ms( value.map(|v| arg_from_str(v, err, "timeout-ms", "uint32")).unwrap_or(0)); }, "start-index" => { - call = call.start_index(value.unwrap_or("")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint64")).unwrap_or(0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "location" => { call = call.location(value.unwrap_or("")); @@ -805,6 +810,7 @@ where "configuration.load.autodetect" => Some(("configuration.load.autodetect", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "configuration.load.clustering.fields" => Some(("configuration.load.clustering.fields", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "configuration.load.create-disposition" => Some(("configuration.load.createDisposition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "configuration.load.create-session" => Some(("configuration.load.createSession", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "configuration.load.decimal-target-types" => Some(("configuration.load.decimalTargetTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "configuration.load.destination-encryption-configuration.kms-key-name" => Some(("configuration.load.destinationEncryptionConfiguration.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "configuration.load.destination-table.dataset-id" => Some(("configuration.load.destinationTable.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -832,6 +838,7 @@ where "configuration.load.range-partitioning.range.end" => Some(("configuration.load.rangePartitioning.range.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "configuration.load.range-partitioning.range.interval" => Some(("configuration.load.rangePartitioning.range.interval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "configuration.load.range-partitioning.range.start" => Some(("configuration.load.rangePartitioning.range.start", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "configuration.load.reference-file-schema-uri" => Some(("configuration.load.referenceFileSchemaUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "configuration.load.schema-inline" => Some(("configuration.load.schemaInline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "configuration.load.schema-inline-format" => Some(("configuration.load.schemaInlineFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "configuration.load.schema-update-options" => Some(("configuration.load.schemaUpdateOptions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -881,7 +888,10 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.completion-ratio" => Some(("statistics.completionRatio", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "statistics.copy.copied-logical-bytes" => Some(("statistics.copy.copied_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.copy.copied-rows" => Some(("statistics.copy.copied_rows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.creation-time" => Some(("statistics.creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.data-masking-statistics.data-masking-applied" => Some(("statistics.dataMaskingStatistics.dataMaskingApplied", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "statistics.end-time" => Some(("statistics.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.extract.destination-uri-file-counts" => Some(("statistics.extract.destinationUriFileCounts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "statistics.extract.input-bytes" => Some(("statistics.extract.inputBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -892,6 +902,7 @@ where "statistics.load.output-rows" => Some(("statistics.load.outputRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.num-child-jobs" => Some(("statistics.numChildJobs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.parent-job-id" => Some(("statistics.parentJobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.query.bi-engine-statistics.acceleration-mode" => Some(("statistics.query.biEngineStatistics.accelerationMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.bi-engine-statistics.bi-engine-mode" => Some(("statistics.query.biEngineStatistics.biEngineMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.billing-tier" => Some(("statistics.query.billingTier", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "statistics.query.cache-hit" => Some(("statistics.query.cacheHit", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -922,12 +933,19 @@ where "statistics.query.model-training-current-iteration" => Some(("statistics.query.modelTrainingCurrentIteration", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "statistics.query.model-training-expected-total-iteration" => Some(("statistics.query.modelTrainingExpectedTotalIteration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.num-dml-affected-rows" => Some(("statistics.query.numDmlAffectedRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.query.search-statistics.index-usage-mode" => Some(("statistics.query.searchStatistics.indexUsageMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.query.spark-statistics.endpoints" => Some(("statistics.query.sparkStatistics.endpoints", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "statistics.query.spark-statistics.logging-info.project-id" => Some(("statistics.query.sparkStatistics.logging_info.project_id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.query.spark-statistics.logging-info.resource-type" => Some(("statistics.query.sparkStatistics.logging_info.resource_type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.query.spark-statistics.spark-job-id" => Some(("statistics.query.sparkStatistics.spark_job_id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.query.spark-statistics.spark-job-location" => Some(("statistics.query.sparkStatistics.spark_job_location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.statement-type" => Some(("statistics.query.statementType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.total-bytes-billed" => Some(("statistics.query.totalBytesBilled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.total-bytes-processed" => Some(("statistics.query.totalBytesProcessed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.total-bytes-processed-accuracy" => Some(("statistics.query.totalBytesProcessedAccuracy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.total-partitions-processed" => Some(("statistics.query.totalPartitionsProcessed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.query.total-slot-ms" => Some(("statistics.query.totalSlotMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.query.transferred-bytes" => Some(("statistics.query.transferredBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.quota-deferments" => Some(("statistics.quotaDeferments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "statistics.reservation-id" => Some(("statistics.reservation_id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.row-level-security-statistics.row-level-security-applied" => Some(("statistics.rowLevelSecurityStatistics.rowLevelSecurityApplied", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -944,7 +962,7 @@ where "status.state" => Some(("status.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-email" => Some(("user_email", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-jagged-rows", "allow-large-results", "allow-quoted-newlines", "autodetect", "bad-records", "bi-engine-mode", "bi-engine-statistics", "billing-tier", "cache-hit", "clustering", "completion-ratio", "compression", "configuration", "copy", "create-disposition", "create-session", "creation-time", "current-iteration", "dataset-id", "ddl-affected-row-access-policy-count", "ddl-destination-table", "ddl-operation-performed", "ddl-target-dataset", "ddl-target-routine", "ddl-target-row-access-policy", "ddl-target-table", "debug-info", "decimal-target-types", "default-dataset", "deleted-row-count", "description", "destination-encryption-configuration", "destination-format", "destination-table", "destination-table-properties", "destination-uri", "destination-uri-file-counts", "destination-uris", "dml-stats", "dry-run", "enable-list-inference", "encoding", "end", "end-time", "enum-as-string", "error-result", "estimated-bytes-processed", "etag", "evaluation-kind", "expected-total-iterations", "expiration-ms", "expiration-time", "extract", "field", "field-delimiter", "fields", "flatten-results", "friendly-name", "hive-partitioning-options", "id", "ignore-unknown-values", "input-bytes", "input-file-bytes", "input-files", "inserted-row-count", "interval", "job-id", "job-reference", "job-timeout-ms", "job-type", "json-extension", "kind", "kms-key-name", "labels", "load", "location", "max-bad-records", "max-iterations", "maximum-billing-tier", "maximum-bytes-billed", "message", "ml-statistics", "mode", "model-id", "model-training", "model-training-current-iteration", "model-training-expected-total-iteration", "null-marker", "num-child-jobs", "num-dml-affected-rows", "operation-type", "output-bytes", "output-rows", "parameter-mode", "parent-job-id", "parquet-options", "policy-id", "preserve-ascii-control-characters", "preserve-nulls", "print-header", "priority", "project-id", "projection-fields", "query", "quota-deferments", "quote", "range", "range-partitioning", "reason", "require-partition-filter", "reservation-id", "routine-id", "row-level-security-applied", "row-level-security-statistics", "schema-inline", "schema-inline-format", "schema-update-options", "script-statistics", "self-link", "session-id", "session-info", "skip-leading-rows", "source-format", "source-model", "source-table", "source-uri-prefix", "source-uris", "start", "start-time", "state", "statement-type", "statistics", "status", "table-id", "time-partitioning", "total-bytes-billed", "total-bytes-processed", "total-bytes-processed-accuracy", "total-partitions-processed", "total-slot-ms", "transaction-id", "transaction-info", "type", "updated-row-count", "use-avro-logical-types", "use-legacy-sql", "use-query-cache", "user-email", "write-disposition"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["acceleration-mode", "allow-jagged-rows", "allow-large-results", "allow-quoted-newlines", "autodetect", "bad-records", "bi-engine-mode", "bi-engine-statistics", "billing-tier", "cache-hit", "clustering", "completion-ratio", "compression", "configuration", "copied-logical-bytes", "copied-rows", "copy", "create-disposition", "create-session", "creation-time", "current-iteration", "data-masking-applied", "data-masking-statistics", "dataset-id", "ddl-affected-row-access-policy-count", "ddl-destination-table", "ddl-operation-performed", "ddl-target-dataset", "ddl-target-routine", "ddl-target-row-access-policy", "ddl-target-table", "debug-info", "decimal-target-types", "default-dataset", "deleted-row-count", "description", "destination-encryption-configuration", "destination-format", "destination-table", "destination-table-properties", "destination-uri", "destination-uri-file-counts", "destination-uris", "dml-stats", "dry-run", "enable-list-inference", "encoding", "end", "end-time", "endpoints", "enum-as-string", "error-result", "estimated-bytes-processed", "etag", "evaluation-kind", "expected-total-iterations", "expiration-ms", "expiration-time", "extract", "field", "field-delimiter", "fields", "flatten-results", "friendly-name", "hive-partitioning-options", "id", "ignore-unknown-values", "index-usage-mode", "input-bytes", "input-file-bytes", "input-files", "inserted-row-count", "interval", "job-id", "job-reference", "job-timeout-ms", "job-type", "json-extension", "kind", "kms-key-name", "labels", "load", "location", "logging-info", "max-bad-records", "max-iterations", "maximum-billing-tier", "maximum-bytes-billed", "message", "ml-statistics", "mode", "model-id", "model-training", "model-training-current-iteration", "model-training-expected-total-iteration", "null-marker", "num-child-jobs", "num-dml-affected-rows", "operation-type", "output-bytes", "output-rows", "parameter-mode", "parent-job-id", "parquet-options", "policy-id", "preserve-ascii-control-characters", "preserve-nulls", "print-header", "priority", "project-id", "project-id", "projection-fields", "query", "quota-deferments", "quote", "range", "range-partitioning", "reason", "reference-file-schema-uri", "require-partition-filter", "reservation-id", "resource-type", "routine-id", "row-level-security-applied", "row-level-security-statistics", "schema-inline", "schema-inline-format", "schema-update-options", "script-statistics", "search-statistics", "self-link", "session-id", "session-info", "skip-leading-rows", "source-format", "source-model", "source-table", "source-uri-prefix", "source-uris", "spark-statistics", "spark-job-id", "spark-job-location", "start", "start-time", "state", "statement-type", "statistics", "status", "table-id", "time-partitioning", "total-bytes-billed", "total-bytes-processed", "total-bytes-processed-accuracy", "total-partitions-processed", "total-slot-ms", "transaction-id", "transaction-info", "transferred-bytes", "type", "updated-row-count", "use-avro-logical-types", "use-legacy-sql", "use-query-cache", "user-email", "write-disposition"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1026,16 +1044,16 @@ where call = call.page_token(value.unwrap_or("")); }, "min-creation-time" => { - call = call.min_creation_time(value.unwrap_or("")); + call = call.min_creation_time( value.map(|v| arg_from_str(v, err, "min-creation-time", "uint64")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "max-creation-time" => { - call = call.max_creation_time(value.unwrap_or("")); + call = call.max_creation_time( value.map(|v| arg_from_str(v, err, "max-creation-time", "uint64")).unwrap_or(0)); }, "all-users" => { - call = call.all_users(arg_from_str(value.unwrap_or("false"), err, "all-users", "boolean")); + call = call.all_users( value.map(|v| arg_from_str(v, err, "all-users", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1290,7 +1308,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1364,11 +1382,65 @@ where match &temp_cursor.to_string()[..] { "best-trial-id" => Some(("bestTrialId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-time" => Some(("creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "default-trial-id" => Some(("defaultTrialId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "encryption-configuration.kms-key-name" => Some(("encryptionConfiguration.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "expiration-time" => Some(("expirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "friendly-name" => Some(("friendlyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.activation-fn.candidates" => Some(("hparamSearchSpaces.activationFn.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.batch-size.candidates.candidates" => Some(("hparamSearchSpaces.batchSize.candidates.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.batch-size.range.max" => Some(("hparamSearchSpaces.batchSize.range.max", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.batch-size.range.min" => Some(("hparamSearchSpaces.batchSize.range.min", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.booster-type.candidates" => Some(("hparamSearchSpaces.boosterType.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.colsample-bylevel.candidates.candidates" => Some(("hparamSearchSpaces.colsampleBylevel.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.colsample-bylevel.range.max" => Some(("hparamSearchSpaces.colsampleBylevel.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.colsample-bylevel.range.min" => Some(("hparamSearchSpaces.colsampleBylevel.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.colsample-bynode.candidates.candidates" => Some(("hparamSearchSpaces.colsampleBynode.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.colsample-bynode.range.max" => Some(("hparamSearchSpaces.colsampleBynode.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.colsample-bynode.range.min" => Some(("hparamSearchSpaces.colsampleBynode.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.colsample-bytree.candidates.candidates" => Some(("hparamSearchSpaces.colsampleBytree.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.colsample-bytree.range.max" => Some(("hparamSearchSpaces.colsampleBytree.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.colsample-bytree.range.min" => Some(("hparamSearchSpaces.colsampleBytree.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.dart-normalize-type.candidates" => Some(("hparamSearchSpaces.dartNormalizeType.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.dropout.candidates.candidates" => Some(("hparamSearchSpaces.dropout.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.dropout.range.max" => Some(("hparamSearchSpaces.dropout.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.dropout.range.min" => Some(("hparamSearchSpaces.dropout.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.l1-reg.candidates.candidates" => Some(("hparamSearchSpaces.l1Reg.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.l1-reg.range.max" => Some(("hparamSearchSpaces.l1Reg.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.l1-reg.range.min" => Some(("hparamSearchSpaces.l1Reg.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.l2-reg.candidates.candidates" => Some(("hparamSearchSpaces.l2Reg.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.l2-reg.range.max" => Some(("hparamSearchSpaces.l2Reg.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.l2-reg.range.min" => Some(("hparamSearchSpaces.l2Reg.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.learn-rate.candidates.candidates" => Some(("hparamSearchSpaces.learnRate.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.learn-rate.range.max" => Some(("hparamSearchSpaces.learnRate.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.learn-rate.range.min" => Some(("hparamSearchSpaces.learnRate.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.max-tree-depth.candidates.candidates" => Some(("hparamSearchSpaces.maxTreeDepth.candidates.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.max-tree-depth.range.max" => Some(("hparamSearchSpaces.maxTreeDepth.range.max", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.max-tree-depth.range.min" => Some(("hparamSearchSpaces.maxTreeDepth.range.min", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.min-split-loss.candidates.candidates" => Some(("hparamSearchSpaces.minSplitLoss.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.min-split-loss.range.max" => Some(("hparamSearchSpaces.minSplitLoss.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.min-split-loss.range.min" => Some(("hparamSearchSpaces.minSplitLoss.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.min-tree-child-weight.candidates.candidates" => Some(("hparamSearchSpaces.minTreeChildWeight.candidates.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.min-tree-child-weight.range.max" => Some(("hparamSearchSpaces.minTreeChildWeight.range.max", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.min-tree-child-weight.range.min" => Some(("hparamSearchSpaces.minTreeChildWeight.range.min", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.num-clusters.candidates.candidates" => Some(("hparamSearchSpaces.numClusters.candidates.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.num-clusters.range.max" => Some(("hparamSearchSpaces.numClusters.range.max", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.num-clusters.range.min" => Some(("hparamSearchSpaces.numClusters.range.min", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.num-factors.candidates.candidates" => Some(("hparamSearchSpaces.numFactors.candidates.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.num-factors.range.max" => Some(("hparamSearchSpaces.numFactors.range.max", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.num-factors.range.min" => Some(("hparamSearchSpaces.numFactors.range.min", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.num-parallel-tree.candidates.candidates" => Some(("hparamSearchSpaces.numParallelTree.candidates.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.num-parallel-tree.range.max" => Some(("hparamSearchSpaces.numParallelTree.range.max", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.num-parallel-tree.range.min" => Some(("hparamSearchSpaces.numParallelTree.range.min", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "hparam-search-spaces.optimizer.candidates" => Some(("hparamSearchSpaces.optimizer.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.subsample.candidates.candidates" => Some(("hparamSearchSpaces.subsample.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.subsample.range.max" => Some(("hparamSearchSpaces.subsample.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.subsample.range.min" => Some(("hparamSearchSpaces.subsample.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.tree-method.candidates" => Some(("hparamSearchSpaces.treeMethod.candidates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hparam-search-spaces.wals-alpha.candidates.candidates" => Some(("hparamSearchSpaces.walsAlpha.candidates.candidates", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), + "hparam-search-spaces.wals-alpha.range.max" => Some(("hparamSearchSpaces.walsAlpha.range.max", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "hparam-search-spaces.wals-alpha.range.min" => Some(("hparamSearchSpaces.walsAlpha.range.min", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1376,8 +1448,9 @@ where "model-reference.model-id" => Some(("modelReference.modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model-reference.project-id" => Some(("modelReference.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model-type" => Some(("modelType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "optimal-trial-ids" => Some(("optimalTrialIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["best-trial-id", "creation-time", "dataset-id", "description", "encryption-configuration", "etag", "expiration-time", "friendly-name", "kms-key-name", "labels", "last-modified-time", "location", "model-id", "model-reference", "model-type", "project-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-fn", "batch-size", "best-trial-id", "booster-type", "candidates", "colsample-bylevel", "colsample-bynode", "colsample-bytree", "creation-time", "dart-normalize-type", "dataset-id", "default-trial-id", "description", "dropout", "encryption-configuration", "etag", "expiration-time", "friendly-name", "hparam-search-spaces", "kms-key-name", "l1-reg", "l2-reg", "labels", "last-modified-time", "learn-rate", "location", "max", "max-tree-depth", "min", "min-split-loss", "min-tree-child-weight", "model-id", "model-reference", "model-type", "num-clusters", "num-factors", "num-parallel-tree", "optimal-trial-ids", "optimizer", "project-id", "range", "subsample", "tree-method", "wals-alpha"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1499,7 +1572,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1599,7 +1672,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1679,14 +1752,27 @@ where "imported-libraries" => Some(("importedLibraries", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "language" => Some(("language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.connection" => Some(("remoteFunctionOptions.connection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.endpoint" => Some(("remoteFunctionOptions.endpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.max-batching-rows" => Some(("remoteFunctionOptions.maxBatchingRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.user-defined-context" => Some(("remoteFunctionOptions.userDefinedContext", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "return-type.type-kind" => Some(("returnType.typeKind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-reference.dataset-id" => Some(("routineReference.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-reference.project-id" => Some(("routineReference.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-reference.routine-id" => Some(("routineReference.routineId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-type" => Some(("routineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.archive-uris" => Some(("sparkOptions.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.connection" => Some(("sparkOptions.connection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.container-image" => Some(("sparkOptions.containerImage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.file-uris" => Some(("sparkOptions.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.jar-uris" => Some(("sparkOptions.jarUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.main-file-uri" => Some(("sparkOptions.mainFileUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.properties" => Some(("sparkOptions.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "spark-options.py-file-uris" => Some(("sparkOptions.pyFileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.runtime-version" => Some(("sparkOptions.runtimeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "strict-mode" => Some(("strictMode", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "definition-body", "description", "determinism-level", "etag", "imported-libraries", "language", "last-modified-time", "project-id", "return-type", "routine-id", "routine-reference", "routine-type", "strict-mode", "type-kind"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "connection", "container-image", "creation-time", "dataset-id", "definition-body", "description", "determinism-level", "endpoint", "etag", "file-uris", "imported-libraries", "jar-uris", "language", "last-modified-time", "main-file-uri", "max-batching-rows", "project-id", "properties", "py-file-uris", "remote-function-options", "return-type", "routine-id", "routine-reference", "routine-type", "runtime-version", "spark-options", "strict-mode", "type-kind", "user-defined-context"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1753,13 +1839,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1842,14 +1928,27 @@ where "imported-libraries" => Some(("importedLibraries", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "language" => Some(("language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.connection" => Some(("remoteFunctionOptions.connection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.endpoint" => Some(("remoteFunctionOptions.endpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.max-batching-rows" => Some(("remoteFunctionOptions.maxBatchingRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "remote-function-options.user-defined-context" => Some(("remoteFunctionOptions.userDefinedContext", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "return-type.type-kind" => Some(("returnType.typeKind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-reference.dataset-id" => Some(("routineReference.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-reference.project-id" => Some(("routineReference.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-reference.routine-id" => Some(("routineReference.routineId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routine-type" => Some(("routineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.archive-uris" => Some(("sparkOptions.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.connection" => Some(("sparkOptions.connection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.container-image" => Some(("sparkOptions.containerImage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.file-uris" => Some(("sparkOptions.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.jar-uris" => Some(("sparkOptions.jarUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.main-file-uri" => Some(("sparkOptions.mainFileUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spark-options.properties" => Some(("sparkOptions.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "spark-options.py-file-uris" => Some(("sparkOptions.pyFileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "spark-options.runtime-version" => Some(("sparkOptions.runtimeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "strict-mode" => Some(("strictMode", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "dataset-id", "definition-body", "description", "determinism-level", "etag", "imported-libraries", "language", "last-modified-time", "project-id", "return-type", "routine-id", "routine-reference", "routine-type", "strict-mode", "type-kind"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "connection", "container-image", "creation-time", "dataset-id", "definition-body", "description", "determinism-level", "endpoint", "etag", "file-uris", "imported-libraries", "jar-uris", "language", "last-modified-time", "main-file-uri", "max-batching-rows", "project-id", "properties", "py-file-uris", "remote-function-options", "return-type", "routine-id", "routine-reference", "routine-type", "runtime-version", "spark-options", "strict-mode", "type-kind", "user-defined-context"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2004,7 +2103,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2320,7 +2419,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(value.unwrap_or("")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint64")).unwrap_or(0)); }, "selected-fields" => { call = call.selected_fields(value.unwrap_or("")); @@ -2329,7 +2428,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2428,6 +2527,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, "selected-fields" => { call = call.selected_fields(value.unwrap_or("")); }, @@ -2444,7 +2546,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["selected-fields"].iter().map(|v|*v)); + v.extend(["selected-fields", "view"].iter().map(|v|*v)); v } )); } } @@ -2586,6 +2688,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "clone-definition.base-table-reference.dataset-id" => Some(("cloneDefinition.baseTableReference.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.base-table-reference.project-id" => Some(("cloneDefinition.baseTableReference.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.base-table-reference.table-id" => Some(("cloneDefinition.baseTableReference.tableId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.clone-time" => Some(("cloneDefinition.cloneTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clustering.fields" => Some(("clustering.fields", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "creation-time" => Some(("creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "default-collation" => Some(("defaultCollation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2604,6 +2710,7 @@ where "external-data-configuration.csv-options.encoding" => Some(("externalDataConfiguration.csvOptions.encoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.field-delimiter" => Some(("externalDataConfiguration.csvOptions.fieldDelimiter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.null-marker" => Some(("externalDataConfiguration.csvOptions.null_marker", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "external-data-configuration.csv-options.preserve-ascii-control-characters" => Some(("externalDataConfiguration.csvOptions.preserveAsciiControlCharacters", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.quote" => Some(("externalDataConfiguration.csvOptions.quote", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.skip-leading-rows" => Some(("externalDataConfiguration.csvOptions.skipLeadingRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.decimal-target-types" => Some(("externalDataConfiguration.decimalTargetTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2614,8 +2721,11 @@ where "external-data-configuration.hive-partitioning-options.source-uri-prefix" => Some(("externalDataConfiguration.hivePartitioningOptions.sourceUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.ignore-unknown-values" => Some(("externalDataConfiguration.ignoreUnknownValues", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.max-bad-records" => Some(("externalDataConfiguration.maxBadRecords", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "external-data-configuration.metadata-cache-mode" => Some(("externalDataConfiguration.metadataCacheMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "external-data-configuration.object-metadata" => Some(("externalDataConfiguration.objectMetadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.parquet-options.enable-list-inference" => Some(("externalDataConfiguration.parquetOptions.enableListInference", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.parquet-options.enum-as-string" => Some(("externalDataConfiguration.parquetOptions.enumAsString", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "external-data-configuration.reference-file-schema-uri" => Some(("externalDataConfiguration.referenceFileSchemaUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.source-format" => Some(("externalDataConfiguration.sourceFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.source-uris" => Some(("externalDataConfiguration.sourceUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "friendly-name" => Some(("friendlyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2624,10 +2734,13 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "materialized-view.allow-non-incremental-definition" => Some(("materializedView.allow_non_incremental_definition", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "materialized-view.enable-refresh" => Some(("materializedView.enableRefresh", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "materialized-view.last-refresh-time" => Some(("materializedView.lastRefreshTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "materialized-view.max-staleness" => Some(("materializedView.maxStaleness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "materialized-view.query" => Some(("materializedView.query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "materialized-view.refresh-interval-ms" => Some(("materializedView.refreshIntervalMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "max-staleness" => Some(("maxStaleness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model.model-options.labels" => Some(("model.modelOptions.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "model.model-options.loss-type" => Some(("model.modelOptions.lossType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model.model-options.model-type" => Some(("model.modelOptions.modelType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2635,6 +2748,14 @@ where "num-long-term-bytes" => Some(("numLongTermBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "num-physical-bytes" => Some(("numPhysicalBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "num-rows" => Some(("numRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-active-logical-bytes" => Some(("num_active_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-active-physical-bytes" => Some(("num_active_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-long-term-logical-bytes" => Some(("num_long_term_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-long-term-physical-bytes" => Some(("num_long_term_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-partitions" => Some(("num_partitions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-time-travel-physical-bytes" => Some(("num_time_travel_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-total-logical-bytes" => Some(("num_total_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-total-physical-bytes" => Some(("num_total_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.field" => Some(("rangePartitioning.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.range.end" => Some(("rangePartitioning.range.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.range.interval" => Some(("rangePartitioning.range.interval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2660,7 +2781,7 @@ where "view.use-explicit-column-names" => Some(("view.useExplicitColumnNames", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "view.use-legacy-sql" => Some(("view.useLegacySql", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-jagged-rows", "allow-quoted-newlines", "autodetect", "avro-options", "base-table-reference", "bigtable-options", "clustering", "compression", "connection-id", "creation-time", "csv-options", "dataset-id", "decimal-target-types", "default-collation", "description", "enable-list-inference", "enable-refresh", "encoding", "encryption-configuration", "end", "enum-as-string", "estimated-bytes", "estimated-rows", "etag", "expiration-ms", "expiration-time", "external-data-configuration", "field", "field-delimiter", "fields", "friendly-name", "google-sheets-options", "hive-partitioning-options", "id", "ignore-unknown-values", "ignore-unspecified-column-families", "interval", "kind", "kms-key-name", "labels", "last-modified-time", "last-refresh-time", "location", "loss-type", "materialized-view", "max-bad-records", "mode", "model", "model-options", "model-type", "null-marker", "num-bytes", "num-long-term-bytes", "num-physical-bytes", "num-rows", "oldest-entry-time", "parquet-options", "project-id", "query", "quote", "range", "range-partitioning", "read-rowkey-as-string", "refresh-interval-ms", "require-partition-filter", "self-link", "skip-leading-rows", "snapshot-definition", "snapshot-time", "source-format", "source-uri-prefix", "source-uris", "start", "streaming-buffer", "table-id", "table-reference", "time-partitioning", "type", "use-avro-logical-types", "use-explicit-column-names", "use-legacy-sql", "view"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-jagged-rows", "allow-quoted-newlines", "allow-non-incremental-definition", "autodetect", "avro-options", "base-table-reference", "bigtable-options", "clone-definition", "clone-time", "clustering", "compression", "connection-id", "creation-time", "csv-options", "dataset-id", "decimal-target-types", "default-collation", "description", "enable-list-inference", "enable-refresh", "encoding", "encryption-configuration", "end", "enum-as-string", "estimated-bytes", "estimated-rows", "etag", "expiration-ms", "expiration-time", "external-data-configuration", "field", "field-delimiter", "fields", "friendly-name", "google-sheets-options", "hive-partitioning-options", "id", "ignore-unknown-values", "ignore-unspecified-column-families", "interval", "kind", "kms-key-name", "labels", "last-modified-time", "last-refresh-time", "location", "loss-type", "materialized-view", "max-bad-records", "max-staleness", "metadata-cache-mode", "mode", "model", "model-options", "model-type", "null-marker", "num-bytes", "num-long-term-bytes", "num-physical-bytes", "num-rows", "num-active-logical-bytes", "num-active-physical-bytes", "num-long-term-logical-bytes", "num-long-term-physical-bytes", "num-partitions", "num-time-travel-physical-bytes", "num-total-logical-bytes", "num-total-physical-bytes", "object-metadata", "oldest-entry-time", "parquet-options", "preserve-ascii-control-characters", "project-id", "query", "quote", "range", "range-partitioning", "read-rowkey-as-string", "reference-file-schema-uri", "refresh-interval-ms", "require-partition-filter", "self-link", "skip-leading-rows", "snapshot-definition", "snapshot-time", "source-format", "source-uri-prefix", "source-uris", "start", "streaming-buffer", "table-id", "table-reference", "time-partitioning", "type", "use-avro-logical-types", "use-explicit-column-names", "use-legacy-sql", "view"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2730,7 +2851,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2802,6 +2923,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "clone-definition.base-table-reference.dataset-id" => Some(("cloneDefinition.baseTableReference.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.base-table-reference.project-id" => Some(("cloneDefinition.baseTableReference.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.base-table-reference.table-id" => Some(("cloneDefinition.baseTableReference.tableId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.clone-time" => Some(("cloneDefinition.cloneTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clustering.fields" => Some(("clustering.fields", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "creation-time" => Some(("creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "default-collation" => Some(("defaultCollation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2820,6 +2945,7 @@ where "external-data-configuration.csv-options.encoding" => Some(("externalDataConfiguration.csvOptions.encoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.field-delimiter" => Some(("externalDataConfiguration.csvOptions.fieldDelimiter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.null-marker" => Some(("externalDataConfiguration.csvOptions.null_marker", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "external-data-configuration.csv-options.preserve-ascii-control-characters" => Some(("externalDataConfiguration.csvOptions.preserveAsciiControlCharacters", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.quote" => Some(("externalDataConfiguration.csvOptions.quote", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.skip-leading-rows" => Some(("externalDataConfiguration.csvOptions.skipLeadingRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.decimal-target-types" => Some(("externalDataConfiguration.decimalTargetTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2830,8 +2956,11 @@ where "external-data-configuration.hive-partitioning-options.source-uri-prefix" => Some(("externalDataConfiguration.hivePartitioningOptions.sourceUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.ignore-unknown-values" => Some(("externalDataConfiguration.ignoreUnknownValues", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.max-bad-records" => Some(("externalDataConfiguration.maxBadRecords", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "external-data-configuration.metadata-cache-mode" => Some(("externalDataConfiguration.metadataCacheMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "external-data-configuration.object-metadata" => Some(("externalDataConfiguration.objectMetadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.parquet-options.enable-list-inference" => Some(("externalDataConfiguration.parquetOptions.enableListInference", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.parquet-options.enum-as-string" => Some(("externalDataConfiguration.parquetOptions.enumAsString", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "external-data-configuration.reference-file-schema-uri" => Some(("externalDataConfiguration.referenceFileSchemaUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.source-format" => Some(("externalDataConfiguration.sourceFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.source-uris" => Some(("externalDataConfiguration.sourceUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "friendly-name" => Some(("friendlyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2840,10 +2969,13 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "materialized-view.allow-non-incremental-definition" => Some(("materializedView.allow_non_incremental_definition", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "materialized-view.enable-refresh" => Some(("materializedView.enableRefresh", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "materialized-view.last-refresh-time" => Some(("materializedView.lastRefreshTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "materialized-view.max-staleness" => Some(("materializedView.maxStaleness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "materialized-view.query" => Some(("materializedView.query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "materialized-view.refresh-interval-ms" => Some(("materializedView.refreshIntervalMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "max-staleness" => Some(("maxStaleness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model.model-options.labels" => Some(("model.modelOptions.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "model.model-options.loss-type" => Some(("model.modelOptions.lossType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model.model-options.model-type" => Some(("model.modelOptions.modelType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2851,6 +2983,14 @@ where "num-long-term-bytes" => Some(("numLongTermBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "num-physical-bytes" => Some(("numPhysicalBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "num-rows" => Some(("numRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-active-logical-bytes" => Some(("num_active_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-active-physical-bytes" => Some(("num_active_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-long-term-logical-bytes" => Some(("num_long_term_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-long-term-physical-bytes" => Some(("num_long_term_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-partitions" => Some(("num_partitions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-time-travel-physical-bytes" => Some(("num_time_travel_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-total-logical-bytes" => Some(("num_total_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-total-physical-bytes" => Some(("num_total_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.field" => Some(("rangePartitioning.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.range.end" => Some(("rangePartitioning.range.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.range.interval" => Some(("rangePartitioning.range.interval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2876,7 +3016,7 @@ where "view.use-explicit-column-names" => Some(("view.useExplicitColumnNames", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "view.use-legacy-sql" => Some(("view.useLegacySql", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-jagged-rows", "allow-quoted-newlines", "autodetect", "avro-options", "base-table-reference", "bigtable-options", "clustering", "compression", "connection-id", "creation-time", "csv-options", "dataset-id", "decimal-target-types", "default-collation", "description", "enable-list-inference", "enable-refresh", "encoding", "encryption-configuration", "end", "enum-as-string", "estimated-bytes", "estimated-rows", "etag", "expiration-ms", "expiration-time", "external-data-configuration", "field", "field-delimiter", "fields", "friendly-name", "google-sheets-options", "hive-partitioning-options", "id", "ignore-unknown-values", "ignore-unspecified-column-families", "interval", "kind", "kms-key-name", "labels", "last-modified-time", "last-refresh-time", "location", "loss-type", "materialized-view", "max-bad-records", "mode", "model", "model-options", "model-type", "null-marker", "num-bytes", "num-long-term-bytes", "num-physical-bytes", "num-rows", "oldest-entry-time", "parquet-options", "project-id", "query", "quote", "range", "range-partitioning", "read-rowkey-as-string", "refresh-interval-ms", "require-partition-filter", "self-link", "skip-leading-rows", "snapshot-definition", "snapshot-time", "source-format", "source-uri-prefix", "source-uris", "start", "streaming-buffer", "table-id", "table-reference", "time-partitioning", "type", "use-avro-logical-types", "use-explicit-column-names", "use-legacy-sql", "view"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-jagged-rows", "allow-quoted-newlines", "allow-non-incremental-definition", "autodetect", "avro-options", "base-table-reference", "bigtable-options", "clone-definition", "clone-time", "clustering", "compression", "connection-id", "creation-time", "csv-options", "dataset-id", "decimal-target-types", "default-collation", "description", "enable-list-inference", "enable-refresh", "encoding", "encryption-configuration", "end", "enum-as-string", "estimated-bytes", "estimated-rows", "etag", "expiration-ms", "expiration-time", "external-data-configuration", "field", "field-delimiter", "fields", "friendly-name", "google-sheets-options", "hive-partitioning-options", "id", "ignore-unknown-values", "ignore-unspecified-column-families", "interval", "kind", "kms-key-name", "labels", "last-modified-time", "last-refresh-time", "location", "loss-type", "materialized-view", "max-bad-records", "max-staleness", "metadata-cache-mode", "mode", "model", "model-options", "model-type", "null-marker", "num-bytes", "num-long-term-bytes", "num-physical-bytes", "num-rows", "num-active-logical-bytes", "num-active-physical-bytes", "num-long-term-logical-bytes", "num-long-term-physical-bytes", "num-partitions", "num-time-travel-physical-bytes", "num-total-logical-bytes", "num-total-physical-bytes", "object-metadata", "oldest-entry-time", "parquet-options", "preserve-ascii-control-characters", "project-id", "query", "quote", "range", "range-partitioning", "read-rowkey-as-string", "reference-file-schema-uri", "refresh-interval-ms", "require-partition-filter", "self-link", "skip-leading-rows", "snapshot-definition", "snapshot-time", "source-format", "source-uri-prefix", "source-uris", "start", "streaming-buffer", "table-id", "table-reference", "time-partitioning", "type", "use-avro-logical-types", "use-explicit-column-names", "use-legacy-sql", "view"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2891,7 +3031,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "autodetect-schema" => { - call = call.autodetect_schema(arg_from_str(value.unwrap_or("false"), err, "autodetect-schema", "boolean")); + call = call.autodetect_schema( value.map(|v| arg_from_str(v, err, "autodetect-schema", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3135,6 +3275,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "clone-definition.base-table-reference.dataset-id" => Some(("cloneDefinition.baseTableReference.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.base-table-reference.project-id" => Some(("cloneDefinition.baseTableReference.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.base-table-reference.table-id" => Some(("cloneDefinition.baseTableReference.tableId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-definition.clone-time" => Some(("cloneDefinition.cloneTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clustering.fields" => Some(("clustering.fields", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "creation-time" => Some(("creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "default-collation" => Some(("defaultCollation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3153,6 +3297,7 @@ where "external-data-configuration.csv-options.encoding" => Some(("externalDataConfiguration.csvOptions.encoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.field-delimiter" => Some(("externalDataConfiguration.csvOptions.fieldDelimiter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.null-marker" => Some(("externalDataConfiguration.csvOptions.null_marker", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "external-data-configuration.csv-options.preserve-ascii-control-characters" => Some(("externalDataConfiguration.csvOptions.preserveAsciiControlCharacters", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.quote" => Some(("externalDataConfiguration.csvOptions.quote", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.csv-options.skip-leading-rows" => Some(("externalDataConfiguration.csvOptions.skipLeadingRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.decimal-target-types" => Some(("externalDataConfiguration.decimalTargetTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -3163,8 +3308,11 @@ where "external-data-configuration.hive-partitioning-options.source-uri-prefix" => Some(("externalDataConfiguration.hivePartitioningOptions.sourceUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.ignore-unknown-values" => Some(("externalDataConfiguration.ignoreUnknownValues", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.max-bad-records" => Some(("externalDataConfiguration.maxBadRecords", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "external-data-configuration.metadata-cache-mode" => Some(("externalDataConfiguration.metadataCacheMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "external-data-configuration.object-metadata" => Some(("externalDataConfiguration.objectMetadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.parquet-options.enable-list-inference" => Some(("externalDataConfiguration.parquetOptions.enableListInference", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "external-data-configuration.parquet-options.enum-as-string" => Some(("externalDataConfiguration.parquetOptions.enumAsString", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "external-data-configuration.reference-file-schema-uri" => Some(("externalDataConfiguration.referenceFileSchemaUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.source-format" => Some(("externalDataConfiguration.sourceFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-data-configuration.source-uris" => Some(("externalDataConfiguration.sourceUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "friendly-name" => Some(("friendlyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3173,10 +3321,13 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "materialized-view.allow-non-incremental-definition" => Some(("materializedView.allow_non_incremental_definition", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "materialized-view.enable-refresh" => Some(("materializedView.enableRefresh", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "materialized-view.last-refresh-time" => Some(("materializedView.lastRefreshTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "materialized-view.max-staleness" => Some(("materializedView.maxStaleness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "materialized-view.query" => Some(("materializedView.query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "materialized-view.refresh-interval-ms" => Some(("materializedView.refreshIntervalMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "max-staleness" => Some(("maxStaleness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model.model-options.labels" => Some(("model.modelOptions.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "model.model-options.loss-type" => Some(("model.modelOptions.lossType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "model.model-options.model-type" => Some(("model.modelOptions.modelType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3184,6 +3335,14 @@ where "num-long-term-bytes" => Some(("numLongTermBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "num-physical-bytes" => Some(("numPhysicalBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "num-rows" => Some(("numRows", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-active-logical-bytes" => Some(("num_active_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-active-physical-bytes" => Some(("num_active_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-long-term-logical-bytes" => Some(("num_long_term_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-long-term-physical-bytes" => Some(("num_long_term_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-partitions" => Some(("num_partitions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-time-travel-physical-bytes" => Some(("num_time_travel_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-total-logical-bytes" => Some(("num_total_logical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "num-total-physical-bytes" => Some(("num_total_physical_bytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.field" => Some(("rangePartitioning.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.range.end" => Some(("rangePartitioning.range.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "range-partitioning.range.interval" => Some(("rangePartitioning.range.interval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3209,7 +3368,7 @@ where "view.use-explicit-column-names" => Some(("view.useExplicitColumnNames", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "view.use-legacy-sql" => Some(("view.useLegacySql", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-jagged-rows", "allow-quoted-newlines", "autodetect", "avro-options", "base-table-reference", "bigtable-options", "clustering", "compression", "connection-id", "creation-time", "csv-options", "dataset-id", "decimal-target-types", "default-collation", "description", "enable-list-inference", "enable-refresh", "encoding", "encryption-configuration", "end", "enum-as-string", "estimated-bytes", "estimated-rows", "etag", "expiration-ms", "expiration-time", "external-data-configuration", "field", "field-delimiter", "fields", "friendly-name", "google-sheets-options", "hive-partitioning-options", "id", "ignore-unknown-values", "ignore-unspecified-column-families", "interval", "kind", "kms-key-name", "labels", "last-modified-time", "last-refresh-time", "location", "loss-type", "materialized-view", "max-bad-records", "mode", "model", "model-options", "model-type", "null-marker", "num-bytes", "num-long-term-bytes", "num-physical-bytes", "num-rows", "oldest-entry-time", "parquet-options", "project-id", "query", "quote", "range", "range-partitioning", "read-rowkey-as-string", "refresh-interval-ms", "require-partition-filter", "self-link", "skip-leading-rows", "snapshot-definition", "snapshot-time", "source-format", "source-uri-prefix", "source-uris", "start", "streaming-buffer", "table-id", "table-reference", "time-partitioning", "type", "use-avro-logical-types", "use-explicit-column-names", "use-legacy-sql", "view"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-jagged-rows", "allow-quoted-newlines", "allow-non-incremental-definition", "autodetect", "avro-options", "base-table-reference", "bigtable-options", "clone-definition", "clone-time", "clustering", "compression", "connection-id", "creation-time", "csv-options", "dataset-id", "decimal-target-types", "default-collation", "description", "enable-list-inference", "enable-refresh", "encoding", "encryption-configuration", "end", "enum-as-string", "estimated-bytes", "estimated-rows", "etag", "expiration-ms", "expiration-time", "external-data-configuration", "field", "field-delimiter", "fields", "friendly-name", "google-sheets-options", "hive-partitioning-options", "id", "ignore-unknown-values", "ignore-unspecified-column-families", "interval", "kind", "kms-key-name", "labels", "last-modified-time", "last-refresh-time", "location", "loss-type", "materialized-view", "max-bad-records", "max-staleness", "metadata-cache-mode", "mode", "model", "model-options", "model-type", "null-marker", "num-bytes", "num-long-term-bytes", "num-physical-bytes", "num-rows", "num-active-logical-bytes", "num-active-physical-bytes", "num-long-term-logical-bytes", "num-long-term-physical-bytes", "num-partitions", "num-time-travel-physical-bytes", "num-total-logical-bytes", "num-total-physical-bytes", "object-metadata", "oldest-entry-time", "parquet-options", "preserve-ascii-control-characters", "project-id", "query", "quote", "range", "range-partitioning", "read-rowkey-as-string", "reference-file-schema-uri", "refresh-interval-ms", "require-partition-filter", "self-link", "skip-leading-rows", "snapshot-definition", "snapshot-time", "source-format", "source-uri-prefix", "source-uris", "start", "streaming-buffer", "table-id", "table-reference", "time-partitioning", "type", "use-avro-logical-types", "use-explicit-column-names", "use-legacy-sql", "view"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3224,7 +3383,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "autodetect-schema" => { - call = call.autodetect_schema(arg_from_str(value.unwrap_or("false"), err, "autodetect-schema", "boolean")); + call = call.autodetect_schema( value.map(|v| arg_from_str(v, err, "autodetect-schema", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4242,7 +4401,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4304,7 +4463,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4332,7 +4491,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4502,7 +4661,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4632,7 +4791,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4660,7 +4819,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4728,7 +4887,7 @@ async fn main() { let mut app = App::new("bigquery2") .author("Sebastian Thiel ") - .version("4.0.1+20220222") + .version("5.0.2+20230114") .about("A data platform for customers to create, manage, share and query data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_bigquery2_cli") .arg(Arg::with_name("url") diff --git a/gen/bigquery2/Cargo.toml b/gen/bigquery2/Cargo.toml index 4153e12e40..9203be8719 100644 --- a/gen/bigquery2/Cargo.toml +++ b/gen/bigquery2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-bigquery2" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with bigquery (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigquery2" homepage = "https://cloud.google.com/bigquery/" -documentation = "https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-bigquery2/5.0.2+20230114" license = "MIT" keywords = ["bigquery", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/bigquery2/README.md b/gen/bigquery2/README.md index 2a29558fb5..8e3540c648 100644 --- a/gen/bigquery2/README.md +++ b/gen/bigquery2/README.md @@ -5,35 +5,35 @@ DO NOT EDIT ! --> The `google-bigquery2` library allows access to all features of the *Google bigquery* service. -This documentation was generated from *bigquery* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *bigquery:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *bigquery* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *bigquery:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *bigquery* *v2* API can be found at the [official documentation site](https://cloud.google.com/bigquery/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/Bigquery) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/Bigquery) ... -* [datasets](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::Dataset) - * [*delete*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::DatasetDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::DatasetGetCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::DatasetInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::DatasetListCall), [*patch*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::DatasetPatchCall) and [*update*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::DatasetUpdateCall) -* [jobs](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::Job) - * [*cancel*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobCancelCall), [*delete*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobGetCall), [*get query results*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobGetQueryResultCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobListCall) and [*query*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobQueryCall) -* [models](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::Model) - * [*delete*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::ModelDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::ModelGetCall), [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::ModelListCall) and [*patch*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::ModelPatchCall) +* [datasets](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::Dataset) + * [*delete*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::DatasetDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::DatasetGetCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::DatasetInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::DatasetListCall), [*patch*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::DatasetPatchCall) and [*update*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::DatasetUpdateCall) +* [jobs](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::Job) + * [*cancel*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobCancelCall), [*delete*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobGetCall), [*get query results*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobGetQueryResultCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobListCall) and [*query*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobQueryCall) +* [models](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::Model) + * [*delete*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::ModelDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::ModelGetCall), [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::ModelListCall) and [*patch*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::ModelPatchCall) * projects - * [*get service account*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::ProjectGetServiceAccountCall) and [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::ProjectListCall) -* [routines](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::Routine) - * [*delete*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RoutineDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RoutineGetCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RoutineInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RoutineListCall) and [*update*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RoutineUpdateCall) -* [row access policies](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RowAccessPolicy) - * [*get iam policy*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RowAccessPolicyGetIamPolicyCall), [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RowAccessPolicyListCall), [*set iam policy*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RowAccessPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::RowAccessPolicyTestIamPermissionCall) + * [*get service account*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::ProjectGetServiceAccountCall) and [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::ProjectListCall) +* [routines](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::Routine) + * [*delete*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RoutineDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RoutineGetCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RoutineInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RoutineListCall) and [*update*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RoutineUpdateCall) +* [row access policies](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RowAccessPolicy) + * [*get iam policy*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RowAccessPolicyGetIamPolicyCall), [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RowAccessPolicyListCall), [*set iam policy*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RowAccessPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::RowAccessPolicyTestIamPermissionCall) * tabledata - * [*insert all*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TabledataInsertAllCall) and [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TabledataListCall) -* [tables](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::Table) - * [*delete*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableGetCall), [*get iam policy*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableGetIamPolicyCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableListCall), [*patch*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TablePatchCall), [*set iam policy*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableTestIamPermissionCall) and [*update*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::TableUpdateCall) + * [*insert all*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TabledataInsertAllCall) and [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TabledataListCall) +* [tables](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::Table) + * [*delete*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableDeleteCall), [*get*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableGetCall), [*get iam policy*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableGetIamPolicyCall), [*insert*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableInsertCall), [*list*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableListCall), [*patch*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TablePatchCall), [*set iam policy*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableTestIamPermissionCall) and [*update*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::TableUpdateCall) Upload supported by ... -* [*insert jobs*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/api::JobInsertCall) +* [*insert jobs*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/api::JobInsertCall) @@ -41,17 +41,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/Bigquery)** +* **[Hub](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/Bigquery)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::CallBuilder) -* **[Resources](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::CallBuilder) +* **[Resources](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Part)** + * **[Parts](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -146,17 +146,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -166,29 +166,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Delegate) to the -[Method Builder](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Delegate) to the +[Method Builder](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::RequestValue) and -[decodable](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::RequestValue) and +[decodable](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-bigquery2/5.0.2-beta-1+20230114/google_bigquery2/client::RequestValue) are moved +* [request values](https://docs.rs/google-bigquery2/5.0.2+20230114/google_bigquery2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/bigquery2/src/api.rs b/gen/bigquery2/src/api.rs index c3951485bb..6b05be11c1 100644 --- a/gen/bigquery2/src/api.rs +++ b/gen/bigquery2/src/api.rs @@ -145,7 +145,7 @@ impl<'a, S> Bigquery { Bigquery { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://bigquery.googleapis.com/bigquery/v2/".to_string(), _root_url: "https://bigquery.googleapis.com/".to_string(), } @@ -177,7 +177,7 @@ impl<'a, S> Bigquery { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/bigquery2/src/client.rs b/gen/bigquery2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/bigquery2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/bigquery2/src/lib.rs b/gen/bigquery2/src/lib.rs index 249d33899f..f5c389105b 100644 --- a/gen/bigquery2/src/lib.rs +++ b/gen/bigquery2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *bigquery* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *bigquery:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *bigquery* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *bigquery:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *bigquery* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/bigquery/). diff --git a/gen/bigqueryconnection1_beta1-cli/Cargo.toml b/gen/bigqueryconnection1_beta1-cli/Cargo.toml index 82ddd5af60..13940e934c 100644 --- a/gen/bigqueryconnection1_beta1-cli/Cargo.toml +++ b/gen/bigqueryconnection1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-bigqueryconnection1_beta1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with BigQuery Connection Service (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigqueryconnection1_beta1-cli" @@ -20,13 +20,13 @@ name = "bigqueryconnection1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-bigqueryconnection1_beta1] path = "../bigqueryconnection1_beta1" -version = "4.0.1+20220226" +version = "5.0.2+20230115" + diff --git a/gen/bigqueryconnection1_beta1-cli/README.md b/gen/bigqueryconnection1_beta1-cli/README.md index 91d953e054..8f66ee9595 100644 --- a/gen/bigqueryconnection1_beta1-cli/README.md +++ b/gen/bigqueryconnection1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *BigQuery Connection Service* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *BigQuery Connection Service* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash bigqueryconnection1-beta1 [options] diff --git a/gen/bigqueryconnection1_beta1-cli/mkdocs.yml b/gen/bigqueryconnection1_beta1-cli/mkdocs.yml index 076aaf8bc1..7eb110e594 100644 --- a/gen/bigqueryconnection1_beta1-cli/mkdocs.yml +++ b/gen/bigqueryconnection1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: BigQuery Connection Service v4.0.1+20220226 +site_name: BigQuery Connection Service v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-bigqueryconnection1_beta1-cli site_description: A complete library to interact with BigQuery Connection Service (protocol v1beta1) @@ -7,17 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/bigqueryconnecti docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-connections-create.md', 'Projects', 'Locations Connections Create'] -- ['projects_locations-connections-delete.md', 'Projects', 'Locations Connections Delete'] -- ['projects_locations-connections-get.md', 'Projects', 'Locations Connections Get'] -- ['projects_locations-connections-get-iam-policy.md', 'Projects', 'Locations Connections Get Iam Policy'] -- ['projects_locations-connections-list.md', 'Projects', 'Locations Connections List'] -- ['projects_locations-connections-patch.md', 'Projects', 'Locations Connections Patch'] -- ['projects_locations-connections-set-iam-policy.md', 'Projects', 'Locations Connections Set Iam Policy'] -- ['projects_locations-connections-test-iam-permissions.md', 'Projects', 'Locations Connections Test Iam Permissions'] -- ['projects_locations-connections-update-credential.md', 'Projects', 'Locations Connections Update Credential'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Connections Create': 'projects_locations-connections-create.md' + - 'Locations Connections Delete': 'projects_locations-connections-delete.md' + - 'Locations Connections Get': 'projects_locations-connections-get.md' + - 'Locations Connections Get Iam Policy': 'projects_locations-connections-get-iam-policy.md' + - 'Locations Connections List': 'projects_locations-connections-list.md' + - 'Locations Connections Patch': 'projects_locations-connections-patch.md' + - 'Locations Connections Set Iam Policy': 'projects_locations-connections-set-iam-policy.md' + - 'Locations Connections Test Iam Permissions': 'projects_locations-connections-test-iam-permissions.md' + - 'Locations Connections Update Credential': 'projects_locations-connections-update-credential.md' theme: readthedocs diff --git a/gen/bigqueryconnection1_beta1-cli/src/client.rs b/gen/bigqueryconnection1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/bigqueryconnection1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/bigqueryconnection1_beta1-cli/src/main.rs b/gen/bigqueryconnection1_beta1-cli/src/main.rs index 39e44a5646..31742aee74 100644 --- a/gen/bigqueryconnection1_beta1-cli/src/main.rs +++ b/gen/bigqueryconnection1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_bigqueryconnection1_beta1::{api, Error, oauth2}; +use google_bigqueryconnection1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -78,6 +77,7 @@ where "cloud-sql.credential.username" => Some(("cloudSql.credential.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-sql.database" => Some(("cloudSql.database", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-sql.instance-id" => Some(("cloudSql.instanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloud-sql.service-account-id" => Some(("cloudSql.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-sql.type" => Some(("cloudSql.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-time" => Some(("creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -86,7 +86,7 @@ where "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-sql", "creation-time", "credential", "database", "description", "friendly-name", "has-credential", "instance-id", "last-modified-time", "name", "password", "type", "username"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-sql", "creation-time", "credential", "database", "description", "friendly-name", "has-credential", "instance-id", "last-modified-time", "name", "password", "service-account-id", "type", "username"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -349,7 +349,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -425,6 +425,7 @@ where "cloud-sql.credential.username" => Some(("cloudSql.credential.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-sql.database" => Some(("cloudSql.database", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-sql.instance-id" => Some(("cloudSql.instanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloud-sql.service-account-id" => Some(("cloudSql.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-sql.type" => Some(("cloudSql.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-time" => Some(("creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -433,7 +434,7 @@ where "last-modified-time" => Some(("lastModifiedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-sql", "creation-time", "credential", "database", "description", "friendly-name", "has-credential", "instance-id", "last-modified-time", "name", "password", "type", "username"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-sql", "creation-time", "credential", "database", "description", "friendly-name", "has-credential", "instance-id", "last-modified-time", "name", "password", "service-account-id", "type", "username"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -448,7 +449,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -947,7 +948,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1025,7 +1026,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1053,7 +1054,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1109,7 +1110,7 @@ async fn main() { let mut app = App::new("bigqueryconnection1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230115") .about("Allows users to manage BigQuery connections to external data sources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_bigqueryconnection1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/bigqueryconnection1_beta1/Cargo.toml b/gen/bigqueryconnection1_beta1/Cargo.toml index b8bc5da459..7442d7092f 100644 --- a/gen/bigqueryconnection1_beta1/Cargo.toml +++ b/gen/bigqueryconnection1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-bigqueryconnection1_beta1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with BigQuery Connection Service (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigqueryconnection1_beta1" homepage = "https://cloud.google.com/bigquery/" -documentation = "https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115" license = "MIT" keywords = ["bigqueryconnection", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/bigqueryconnection1_beta1/README.md b/gen/bigqueryconnection1_beta1/README.md index 8beed2a189..18e0166705 100644 --- a/gen/bigqueryconnection1_beta1/README.md +++ b/gen/bigqueryconnection1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-bigqueryconnection1_beta1` library allows access to all features of the *Google BigQuery Connection Service* service. -This documentation was generated from *BigQuery Connection Service* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *bigqueryconnection:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *BigQuery Connection Service* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *bigqueryconnection:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *BigQuery Connection Service* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/bigquery/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/BigQueryConnectionService) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/BigQueryConnectionService) ... * projects - * [*locations connections create*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionCreateCall), [*locations connections delete*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionDeleteCall), [*locations connections get*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionGetCall), [*locations connections get iam policy*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionGetIamPolicyCall), [*locations connections list*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionListCall), [*locations connections patch*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionPatchCall), [*locations connections set iam policy*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionSetIamPolicyCall), [*locations connections test iam permissions*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionTestIamPermissionCall) and [*locations connections update credential*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionUpdateCredentialCall) + * [*locations connections create*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionCreateCall), [*locations connections delete*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionDeleteCall), [*locations connections get*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionGetCall), [*locations connections get iam policy*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionGetIamPolicyCall), [*locations connections list*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionListCall), [*locations connections patch*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionPatchCall), [*locations connections set iam policy*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionSetIamPolicyCall), [*locations connections test iam permissions*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionTestIamPermissionCall) and [*locations connections update credential*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/api::ProjectLocationConnectionUpdateCredentialCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/BigQueryConnectionService)** +* **[Hub](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/BigQueryConnectionService)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2-beta-1+20230115/google_bigqueryconnection1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-bigqueryconnection1_beta1/5.0.2+20230115/google_bigqueryconnection1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/bigqueryconnection1_beta1/src/api.rs b/gen/bigqueryconnection1_beta1/src/api.rs index 19cb93df56..305ed18793 100644 --- a/gen/bigqueryconnection1_beta1/src/api.rs +++ b/gen/bigqueryconnection1_beta1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> BigQueryConnectionService { BigQueryConnectionService { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://bigqueryconnection.googleapis.com/".to_string(), _root_url: "https://bigqueryconnection.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> BigQueryConnectionService { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/bigqueryconnection1_beta1/src/client.rs b/gen/bigqueryconnection1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/bigqueryconnection1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/bigqueryconnection1_beta1/src/lib.rs b/gen/bigqueryconnection1_beta1/src/lib.rs index b149423b0a..c4e7df8eb9 100644 --- a/gen/bigqueryconnection1_beta1/src/lib.rs +++ b/gen/bigqueryconnection1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *BigQuery Connection Service* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *bigqueryconnection:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *BigQuery Connection Service* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *bigqueryconnection:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *BigQuery Connection Service* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/bigquery/). diff --git a/gen/bigquerydatatransfer1-cli/Cargo.toml b/gen/bigquerydatatransfer1-cli/Cargo.toml index 3c62008a44..6ef77a1daa 100644 --- a/gen/bigquerydatatransfer1-cli/Cargo.toml +++ b/gen/bigquerydatatransfer1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-bigquerydatatransfer1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with BigQuery Data Transfer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigquerydatatransfer1-cli" @@ -20,13 +20,13 @@ name = "bigquerydatatransfer1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-bigquerydatatransfer1] path = "../bigquerydatatransfer1" -version = "4.0.1+20220225" +version = "5.0.2+20230115" + diff --git a/gen/bigquerydatatransfer1-cli/README.md b/gen/bigquerydatatransfer1-cli/README.md index ae8de0b519..82fc7e7f9d 100644 --- a/gen/bigquerydatatransfer1-cli/README.md +++ b/gen/bigquerydatatransfer1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *BigQuery Data Transfer* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *BigQuery Data Transfer* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash bigquerydatatransfer1 [options] diff --git a/gen/bigquerydatatransfer1-cli/mkdocs.yml b/gen/bigquerydatatransfer1-cli/mkdocs.yml index 115b9ff01f..ef7c4ddd7f 100644 --- a/gen/bigquerydatatransfer1-cli/mkdocs.yml +++ b/gen/bigquerydatatransfer1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: BigQuery Data Transfer v4.0.1+20220225 +site_name: BigQuery Data Transfer v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-bigquerydatatransfer1-cli site_description: A complete library to interact with BigQuery Data Transfer (protocol v1) @@ -7,40 +7,41 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/bigquerydatatran docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_data-sources-check-valid-creds.md', 'Projects', 'Data Sources Check Valid Creds'] -- ['projects_data-sources-get.md', 'Projects', 'Data Sources Get'] -- ['projects_data-sources-list.md', 'Projects', 'Data Sources List'] -- ['projects_enroll-data-sources.md', 'Projects', 'Enroll Data Sources'] -- ['projects_locations-data-sources-check-valid-creds.md', 'Projects', 'Locations Data Sources Check Valid Creds'] -- ['projects_locations-data-sources-get.md', 'Projects', 'Locations Data Sources Get'] -- ['projects_locations-data-sources-list.md', 'Projects', 'Locations Data Sources List'] -- ['projects_locations-enroll-data-sources.md', 'Projects', 'Locations Enroll Data Sources'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-transfer-configs-create.md', 'Projects', 'Locations Transfer Configs Create'] -- ['projects_locations-transfer-configs-delete.md', 'Projects', 'Locations Transfer Configs Delete'] -- ['projects_locations-transfer-configs-get.md', 'Projects', 'Locations Transfer Configs Get'] -- ['projects_locations-transfer-configs-list.md', 'Projects', 'Locations Transfer Configs List'] -- ['projects_locations-transfer-configs-patch.md', 'Projects', 'Locations Transfer Configs Patch'] -- ['projects_locations-transfer-configs-runs-delete.md', 'Projects', 'Locations Transfer Configs Runs Delete'] -- ['projects_locations-transfer-configs-runs-get.md', 'Projects', 'Locations Transfer Configs Runs Get'] -- ['projects_locations-transfer-configs-runs-list.md', 'Projects', 'Locations Transfer Configs Runs List'] -- ['projects_locations-transfer-configs-runs-transfer-logs-list.md', 'Projects', 'Locations Transfer Configs Runs Transfer Logs List'] -- ['projects_locations-transfer-configs-schedule-runs.md', 'Projects', 'Locations Transfer Configs Schedule Runs'] -- ['projects_locations-transfer-configs-start-manual-runs.md', 'Projects', 'Locations Transfer Configs Start Manual Runs'] -- ['projects_transfer-configs-create.md', 'Projects', 'Transfer Configs Create'] -- ['projects_transfer-configs-delete.md', 'Projects', 'Transfer Configs Delete'] -- ['projects_transfer-configs-get.md', 'Projects', 'Transfer Configs Get'] -- ['projects_transfer-configs-list.md', 'Projects', 'Transfer Configs List'] -- ['projects_transfer-configs-patch.md', 'Projects', 'Transfer Configs Patch'] -- ['projects_transfer-configs-runs-delete.md', 'Projects', 'Transfer Configs Runs Delete'] -- ['projects_transfer-configs-runs-get.md', 'Projects', 'Transfer Configs Runs Get'] -- ['projects_transfer-configs-runs-list.md', 'Projects', 'Transfer Configs Runs List'] -- ['projects_transfer-configs-runs-transfer-logs-list.md', 'Projects', 'Transfer Configs Runs Transfer Logs List'] -- ['projects_transfer-configs-schedule-runs.md', 'Projects', 'Transfer Configs Schedule Runs'] -- ['projects_transfer-configs-start-manual-runs.md', 'Projects', 'Transfer Configs Start Manual Runs'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Data Sources Check Valid Creds': 'projects_data-sources-check-valid-creds.md' + - 'Data Sources Get': 'projects_data-sources-get.md' + - 'Data Sources List': 'projects_data-sources-list.md' + - 'Enroll Data Sources': 'projects_enroll-data-sources.md' + - 'Locations Data Sources Check Valid Creds': 'projects_locations-data-sources-check-valid-creds.md' + - 'Locations Data Sources Get': 'projects_locations-data-sources-get.md' + - 'Locations Data Sources List': 'projects_locations-data-sources-list.md' + - 'Locations Enroll Data Sources': 'projects_locations-enroll-data-sources.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Transfer Configs Create': 'projects_locations-transfer-configs-create.md' + - 'Locations Transfer Configs Delete': 'projects_locations-transfer-configs-delete.md' + - 'Locations Transfer Configs Get': 'projects_locations-transfer-configs-get.md' + - 'Locations Transfer Configs List': 'projects_locations-transfer-configs-list.md' + - 'Locations Transfer Configs Patch': 'projects_locations-transfer-configs-patch.md' + - 'Locations Transfer Configs Runs Delete': 'projects_locations-transfer-configs-runs-delete.md' + - 'Locations Transfer Configs Runs Get': 'projects_locations-transfer-configs-runs-get.md' + - 'Locations Transfer Configs Runs List': 'projects_locations-transfer-configs-runs-list.md' + - 'Locations Transfer Configs Runs Transfer Logs List': 'projects_locations-transfer-configs-runs-transfer-logs-list.md' + - 'Locations Transfer Configs Schedule Runs': 'projects_locations-transfer-configs-schedule-runs.md' + - 'Locations Transfer Configs Start Manual Runs': 'projects_locations-transfer-configs-start-manual-runs.md' + - 'Transfer Configs Create': 'projects_transfer-configs-create.md' + - 'Transfer Configs Delete': 'projects_transfer-configs-delete.md' + - 'Transfer Configs Get': 'projects_transfer-configs-get.md' + - 'Transfer Configs List': 'projects_transfer-configs-list.md' + - 'Transfer Configs Patch': 'projects_transfer-configs-patch.md' + - 'Transfer Configs Runs Delete': 'projects_transfer-configs-runs-delete.md' + - 'Transfer Configs Runs Get': 'projects_transfer-configs-runs-get.md' + - 'Transfer Configs Runs List': 'projects_transfer-configs-runs-list.md' + - 'Transfer Configs Runs Transfer Logs List': 'projects_transfer-configs-runs-transfer-logs-list.md' + - 'Transfer Configs Schedule Runs': 'projects_transfer-configs-schedule-runs.md' + - 'Transfer Configs Start Manual Runs': 'projects_transfer-configs-start-manual-runs.md' theme: readthedocs diff --git a/gen/bigquerydatatransfer1-cli/src/client.rs b/gen/bigquerydatatransfer1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/bigquerydatatransfer1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/bigquerydatatransfer1-cli/src/main.rs b/gen/bigquerydatatransfer1-cli/src/main.rs index 87423aa20c..8744cfb4a9 100644 --- a/gen/bigquerydatatransfer1-cli/src/main.rs +++ b/gen/bigquerydatatransfer1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_bigquerydatatransfer1::{api, Error, oauth2}; +use google_bigquerydatatransfer1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -197,7 +196,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -477,7 +476,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -673,7 +672,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -951,7 +950,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "data-source-ids" => { call = call.add_data_source_ids(value.unwrap_or("")); @@ -1063,7 +1062,7 @@ where call = call.version_info(value.unwrap_or("")); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "service-account-name" => { call = call.service_account_name(value.unwrap_or("")); @@ -1238,7 +1237,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1297,7 +1296,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "message-types" => { call = call.add_message_types(value.unwrap_or("")); @@ -1748,7 +1747,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "data-source-ids" => { call = call.add_data_source_ids(value.unwrap_or("")); @@ -1860,7 +1859,7 @@ where call = call.version_info(value.unwrap_or("")); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "service-account-name" => { call = call.service_account_name(value.unwrap_or("")); @@ -2035,7 +2034,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2094,7 +2093,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "message-types" => { call = call.add_message_types(value.unwrap_or("")); @@ -2575,7 +2574,7 @@ async fn main() { Some(false)), ]), ("enroll-data-sources", - Some(r##"Enroll data sources in a user project. This allows users to create transfer configurations for these data sources. They will also appear in the ListDataSources RPC and as such, will appear in the BigQuery UI 'https://bigquery.cloud.google.com' (and the documents can be found at https://cloud.google.com/bigquery/bigquery-web-ui and https://cloud.google.com/bigquery/docs/working-with-transfers)."##), + Some(r##"Enroll data sources in a user project. This allows users to create transfer configurations for these data sources. They will also appear in the ListDataSources RPC and as such, will appear in the [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents can be found in the public guide for [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and [Data Transfer Service](https://cloud.google.com/bigquery/docs/working-with-transfers)."##), "Details at http://byron.github.io/google-apis-rs/google_bigquerydatatransfer1_cli/projects_enroll-data-sources", vec![ (Some(r##"name"##), @@ -2675,7 +2674,7 @@ async fn main() { Some(false)), ]), ("locations-enroll-data-sources", - Some(r##"Enroll data sources in a user project. This allows users to create transfer configurations for these data sources. They will also appear in the ListDataSources RPC and as such, will appear in the BigQuery UI 'https://bigquery.cloud.google.com' (and the documents can be found at https://cloud.google.com/bigquery/bigquery-web-ui and https://cloud.google.com/bigquery/docs/working-with-transfers)."##), + Some(r##"Enroll data sources in a user project. This allows users to create transfer configurations for these data sources. They will also appear in the ListDataSources RPC and as such, will appear in the [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents can be found in the public guide for [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and [Data Transfer Service](https://cloud.google.com/bigquery/docs/working-with-transfers)."##), "Details at http://byron.github.io/google-apis-rs/google_bigquerydatatransfer1_cli/projects_locations-enroll-data-sources", vec![ (Some(r##"name"##), @@ -3284,7 +3283,7 @@ async fn main() { let mut app = App::new("bigquerydatatransfer1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230115") .about("Schedule queries or transfer external data from SaaS applications to Google BigQuery on a regular basis.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_bigquerydatatransfer1_cli") .arg(Arg::with_name("url") diff --git a/gen/bigquerydatatransfer1/Cargo.toml b/gen/bigquerydatatransfer1/Cargo.toml index 1893b78926..9dd68230a7 100644 --- a/gen/bigquerydatatransfer1/Cargo.toml +++ b/gen/bigquerydatatransfer1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-bigquerydatatransfer1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with BigQuery Data Transfer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigquerydatatransfer1" homepage = "https://cloud.google.com/bigquery-transfer/" -documentation = "https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115" license = "MIT" keywords = ["bigquerydatatransfer", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/bigquerydatatransfer1/README.md b/gen/bigquerydatatransfer1/README.md index 9f2b697399..101c2858a9 100644 --- a/gen/bigquerydatatransfer1/README.md +++ b/gen/bigquerydatatransfer1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-bigquerydatatransfer1` library allows access to all features of the *Google BigQuery Data Transfer* service. -This documentation was generated from *BigQuery Data Transfer* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *bigquerydatatransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *BigQuery Data Transfer* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *bigquerydatatransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *BigQuery Data Transfer* *v1* API can be found at the [official documentation site](https://cloud.google.com/bigquery-transfer/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/BigQueryDataTransfer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/BigQueryDataTransfer) ... * projects - * [*data sources check valid creds*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectDataSourceCheckValidCredCall), [*data sources get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectDataSourceGetCall), [*data sources list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectDataSourceListCall), [*enroll data sources*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectEnrollDataSourceCall), [*locations data sources check valid creds*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationDataSourceCheckValidCredCall), [*locations data sources get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationDataSourceGetCall), [*locations data sources list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationDataSourceListCall), [*locations enroll data sources*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationEnrollDataSourceCall), [*locations get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationListCall), [*locations transfer configs create*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigCreateCall), [*locations transfer configs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigDeleteCall), [*locations transfer configs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigGetCall), [*locations transfer configs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigListCall), [*locations transfer configs patch*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigPatchCall), [*locations transfer configs runs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunDeleteCall), [*locations transfer configs runs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunGetCall), [*locations transfer configs runs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunListCall), [*locations transfer configs runs transfer logs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunTransferLogListCall), [*locations transfer configs schedule runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigScheduleRunCall), [*locations transfer configs start manual runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigStartManualRunCall), [*transfer configs create*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigCreateCall), [*transfer configs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigDeleteCall), [*transfer configs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigGetCall), [*transfer configs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigListCall), [*transfer configs patch*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigPatchCall), [*transfer configs runs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunDeleteCall), [*transfer configs runs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunGetCall), [*transfer configs runs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunListCall), [*transfer configs runs transfer logs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunTransferLogListCall), [*transfer configs schedule runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigScheduleRunCall) and [*transfer configs start manual runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigStartManualRunCall) + * [*data sources check valid creds*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectDataSourceCheckValidCredCall), [*data sources get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectDataSourceGetCall), [*data sources list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectDataSourceListCall), [*enroll data sources*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectEnrollDataSourceCall), [*locations data sources check valid creds*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationDataSourceCheckValidCredCall), [*locations data sources get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationDataSourceGetCall), [*locations data sources list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationDataSourceListCall), [*locations enroll data sources*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationEnrollDataSourceCall), [*locations get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationListCall), [*locations transfer configs create*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigCreateCall), [*locations transfer configs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigDeleteCall), [*locations transfer configs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigGetCall), [*locations transfer configs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigListCall), [*locations transfer configs patch*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigPatchCall), [*locations transfer configs runs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunDeleteCall), [*locations transfer configs runs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunGetCall), [*locations transfer configs runs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunListCall), [*locations transfer configs runs transfer logs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigRunTransferLogListCall), [*locations transfer configs schedule runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigScheduleRunCall), [*locations transfer configs start manual runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectLocationTransferConfigStartManualRunCall), [*transfer configs create*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigCreateCall), [*transfer configs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigDeleteCall), [*transfer configs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigGetCall), [*transfer configs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigListCall), [*transfer configs patch*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigPatchCall), [*transfer configs runs delete*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunDeleteCall), [*transfer configs runs get*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunGetCall), [*transfer configs runs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunListCall), [*transfer configs runs transfer logs list*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigRunTransferLogListCall), [*transfer configs schedule runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigScheduleRunCall) and [*transfer configs start manual runs*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/api::ProjectTransferConfigStartManualRunCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/BigQueryDataTransfer)** +* **[Hub](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/BigQueryDataTransfer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::CallBuilder) -* **[Resources](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::CallBuilder) +* **[Resources](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Part)** + * **[Parts](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Delegate) to the -[Method Builder](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Delegate) to the +[Method Builder](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::RequestValue) and -[decodable](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::RequestValue) and +[decodable](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-bigquerydatatransfer1/5.0.2-beta-1+20230115/google_bigquerydatatransfer1/client::RequestValue) are moved +* [request values](https://docs.rs/google-bigquerydatatransfer1/5.0.2+20230115/google_bigquerydatatransfer1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/bigquerydatatransfer1/src/api.rs b/gen/bigquerydatatransfer1/src/api.rs index dff6f3c4f8..ae847c9d46 100644 --- a/gen/bigquerydatatransfer1/src/api.rs +++ b/gen/bigquerydatatransfer1/src/api.rs @@ -137,7 +137,7 @@ impl<'a, S> BigQueryDataTransfer { BigQueryDataTransfer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://bigquerydatatransfer.googleapis.com/".to_string(), _root_url: "https://bigquerydatatransfer.googleapis.com/".to_string(), } @@ -148,7 +148,7 @@ impl<'a, S> BigQueryDataTransfer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/bigquerydatatransfer1/src/client.rs b/gen/bigquerydatatransfer1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/bigquerydatatransfer1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/bigquerydatatransfer1/src/lib.rs b/gen/bigquerydatatransfer1/src/lib.rs index fbb1bc8c69..a8d2325144 100644 --- a/gen/bigquerydatatransfer1/src/lib.rs +++ b/gen/bigquerydatatransfer1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *BigQuery Data Transfer* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *bigquerydatatransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *BigQuery Data Transfer* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *bigquerydatatransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *BigQuery Data Transfer* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/bigquery-transfer/). diff --git a/gen/bigqueryreservation1-cli/Cargo.toml b/gen/bigqueryreservation1-cli/Cargo.toml index ae61bc28b1..c7eb90d90a 100644 --- a/gen/bigqueryreservation1-cli/Cargo.toml +++ b/gen/bigqueryreservation1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-bigqueryreservation1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with BigQuery Reservation (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigqueryreservation1-cli" @@ -20,13 +20,13 @@ name = "bigqueryreservation1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-bigqueryreservation1] path = "../bigqueryreservation1" -version = "4.0.1+20220226" +version = "5.0.2+20230117" + diff --git a/gen/bigqueryreservation1-cli/README.md b/gen/bigqueryreservation1-cli/README.md index 10194f86fa..6b25c10866 100644 --- a/gen/bigqueryreservation1-cli/README.md +++ b/gen/bigqueryreservation1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *BigQuery Reservation* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *BigQuery Reservation* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash bigqueryreservation1 [options] @@ -42,6 +42,7 @@ bigqueryreservation1 [options] locations-reservations-assignments-delete [-p ]... [-o ] locations-reservations-assignments-list [-p ]... [-o ] locations-reservations-assignments-move (-r )... [-p ]... [-o ] + locations-reservations-assignments-patch (-r )... [-p ]... [-o ] locations-reservations-create (-r )... [-p ]... [-o ] locations-reservations-delete [-p ]... [-o ] locations-reservations-get [-p ]... [-o ] diff --git a/gen/bigqueryreservation1-cli/mkdocs.yml b/gen/bigqueryreservation1-cli/mkdocs.yml index 492117c6f7..70603138ab 100644 --- a/gen/bigqueryreservation1-cli/mkdocs.yml +++ b/gen/bigqueryreservation1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: BigQuery Reservation v4.0.1+20220226 +site_name: BigQuery Reservation v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-bigqueryreservation1-cli site_description: A complete library to interact with BigQuery Reservation (protocol v1) @@ -7,28 +7,30 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/bigqueryreservat docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-capacity-commitments-create.md', 'Projects', 'Locations Capacity Commitments Create'] -- ['projects_locations-capacity-commitments-delete.md', 'Projects', 'Locations Capacity Commitments Delete'] -- ['projects_locations-capacity-commitments-get.md', 'Projects', 'Locations Capacity Commitments Get'] -- ['projects_locations-capacity-commitments-list.md', 'Projects', 'Locations Capacity Commitments List'] -- ['projects_locations-capacity-commitments-merge.md', 'Projects', 'Locations Capacity Commitments Merge'] -- ['projects_locations-capacity-commitments-patch.md', 'Projects', 'Locations Capacity Commitments Patch'] -- ['projects_locations-capacity-commitments-split.md', 'Projects', 'Locations Capacity Commitments Split'] -- ['projects_locations-get-bi-reservation.md', 'Projects', 'Locations Get Bi Reservation'] -- ['projects_locations-reservations-assignments-create.md', 'Projects', 'Locations Reservations Assignments Create'] -- ['projects_locations-reservations-assignments-delete.md', 'Projects', 'Locations Reservations Assignments Delete'] -- ['projects_locations-reservations-assignments-list.md', 'Projects', 'Locations Reservations Assignments List'] -- ['projects_locations-reservations-assignments-move.md', 'Projects', 'Locations Reservations Assignments Move'] -- ['projects_locations-reservations-create.md', 'Projects', 'Locations Reservations Create'] -- ['projects_locations-reservations-delete.md', 'Projects', 'Locations Reservations Delete'] -- ['projects_locations-reservations-get.md', 'Projects', 'Locations Reservations Get'] -- ['projects_locations-reservations-list.md', 'Projects', 'Locations Reservations List'] -- ['projects_locations-reservations-patch.md', 'Projects', 'Locations Reservations Patch'] -- ['projects_locations-search-all-assignments.md', 'Projects', 'Locations Search All Assignments'] -- ['projects_locations-search-assignments.md', 'Projects', 'Locations Search Assignments'] -- ['projects_locations-update-bi-reservation.md', 'Projects', 'Locations Update Bi Reservation'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Capacity Commitments Create': 'projects_locations-capacity-commitments-create.md' + - 'Locations Capacity Commitments Delete': 'projects_locations-capacity-commitments-delete.md' + - 'Locations Capacity Commitments Get': 'projects_locations-capacity-commitments-get.md' + - 'Locations Capacity Commitments List': 'projects_locations-capacity-commitments-list.md' + - 'Locations Capacity Commitments Merge': 'projects_locations-capacity-commitments-merge.md' + - 'Locations Capacity Commitments Patch': 'projects_locations-capacity-commitments-patch.md' + - 'Locations Capacity Commitments Split': 'projects_locations-capacity-commitments-split.md' + - 'Locations Get Bi Reservation': 'projects_locations-get-bi-reservation.md' + - 'Locations Reservations Assignments Create': 'projects_locations-reservations-assignments-create.md' + - 'Locations Reservations Assignments Delete': 'projects_locations-reservations-assignments-delete.md' + - 'Locations Reservations Assignments List': 'projects_locations-reservations-assignments-list.md' + - 'Locations Reservations Assignments Move': 'projects_locations-reservations-assignments-move.md' + - 'Locations Reservations Assignments Patch': 'projects_locations-reservations-assignments-patch.md' + - 'Locations Reservations Create': 'projects_locations-reservations-create.md' + - 'Locations Reservations Delete': 'projects_locations-reservations-delete.md' + - 'Locations Reservations Get': 'projects_locations-reservations-get.md' + - 'Locations Reservations List': 'projects_locations-reservations-list.md' + - 'Locations Reservations Patch': 'projects_locations-reservations-patch.md' + - 'Locations Search All Assignments': 'projects_locations-search-all-assignments.md' + - 'Locations Search Assignments': 'projects_locations-search-assignments.md' + - 'Locations Update Bi Reservation': 'projects_locations-update-bi-reservation.md' theme: readthedocs diff --git a/gen/bigqueryreservation1-cli/src/client.rs b/gen/bigqueryreservation1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/bigqueryreservation1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/bigqueryreservation1-cli/src/main.rs b/gen/bigqueryreservation1-cli/src/main.rs index c37f6cc5b3..c56179fa99 100644 --- a/gen/bigqueryreservation1-cli/src/main.rs +++ b/gen/bigqueryreservation1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_bigqueryreservation1::{api, Error, oauth2}; +use google_bigqueryreservation1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -100,7 +99,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "enforce-single-admin-project-per-org" => { - call = call.enforce_single_admin_project_per_org(arg_from_str(value.unwrap_or("false"), err, "enforce-single-admin-project-per-org", "boolean")); + call = call.enforce_single_admin_project_per_org( value.map(|v| arg_from_str(v, err, "enforce-single-admin-project-per-org", "boolean")).unwrap_or(false)); }, "capacity-commitment-id" => { call = call.capacity_commitment_id(value.unwrap_or("")); @@ -159,7 +158,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -270,7 +269,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -453,7 +452,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -793,7 +792,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -927,6 +926,98 @@ where } } + async fn _projects_locations_reservations_assignments_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "assignee" => Some(("assignee", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job-type" => Some(("jobType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["assignee", "job-type", "name", "state"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Assignment = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_reservations_assignments_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_reservations_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1136,7 +1227,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1231,7 +1322,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1293,7 +1384,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1355,7 +1446,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1446,7 +1537,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1538,6 +1629,9 @@ where ("locations-reservations-assignments-move", Some(opt)) => { call_result = self._projects_locations_reservations_assignments_move(opt, dry_run, &mut err).await; }, + ("locations-reservations-assignments-patch", Some(opt)) => { + call_result = self._projects_locations_reservations_assignments_patch(opt, dry_run, &mut err).await; + }, ("locations-reservations-create", Some(opt)) => { call_result = self._projects_locations_reservations_create(opt, dry_run, &mut err).await; }, @@ -1641,7 +1735,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-capacity-commitments-create', 'locations-capacity-commitments-delete', 'locations-capacity-commitments-get', 'locations-capacity-commitments-list', 'locations-capacity-commitments-merge', 'locations-capacity-commitments-patch', 'locations-capacity-commitments-split', 'locations-get-bi-reservation', 'locations-reservations-assignments-create', 'locations-reservations-assignments-delete', 'locations-reservations-assignments-list', 'locations-reservations-assignments-move', 'locations-reservations-create', 'locations-reservations-delete', 'locations-reservations-get', 'locations-reservations-list', 'locations-reservations-patch', 'locations-search-all-assignments', 'locations-search-assignments' and 'locations-update-bi-reservation'", vec![ + ("projects", "methods: 'locations-capacity-commitments-create', 'locations-capacity-commitments-delete', 'locations-capacity-commitments-get', 'locations-capacity-commitments-list', 'locations-capacity-commitments-merge', 'locations-capacity-commitments-patch', 'locations-capacity-commitments-split', 'locations-get-bi-reservation', 'locations-reservations-assignments-create', 'locations-reservations-assignments-delete', 'locations-reservations-assignments-list', 'locations-reservations-assignments-move', 'locations-reservations-assignments-patch', 'locations-reservations-create', 'locations-reservations-delete', 'locations-reservations-get', 'locations-reservations-list', 'locations-reservations-patch', 'locations-search-all-assignments', 'locations-search-assignments' and 'locations-update-bi-reservation'", vec![ ("locations-capacity-commitments-create", Some(r##"Creates a new capacity commitment resource."##), "Details at http://byron.github.io/google-apis-rs/google_bigqueryreservation1_cli/projects_locations-capacity-commitments-create", @@ -1770,7 +1864,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Output only. The resource name of the capacity commitment, e.g., `projects/myproject/locations/US/capacityCommitments/123` For the commitment id, it must only contain lower case alphanumeric characters or dashes.It must start with a letter and must not end with a dash. Its maximum length is 64 characters."##), + Some(r##"Output only. The resource name of the capacity commitment, e.g., `projects/myproject/locations/US/capacityCommitments/123` The commitment_id must only contain lower case alphanumeric characters or dashes. It must start with a letter and must not end with a dash. Its maximum length is 64 characters."##), Some(true), Some(false)), @@ -1793,7 +1887,7 @@ async fn main() { Some(false)), ]), ("locations-capacity-commitments-split", - Some(r##"Splits capacity commitment to two commitments of the same plan and `commitment_end_time`. A common use case is to enable downgrading commitments. For example, in order to downgrade from 10000 slots to 8000, you might split a 10000 capacity commitment into commitments of 2000 and 8000. Then, you would change the plan of the first one to `FLEX` and then delete it."##), + Some(r##"Splits capacity commitment to two commitments of the same plan and `commitment_end_time`. A common use case is to enable downgrading commitments. For example, in order to downgrade from 10000 slots to 8000, you might split a 10000 capacity commitment into commitments of 2000 and 8000. Then, you delete the first one after the commitment end time passes."##), "Details at http://byron.github.io/google-apis-rs/google_bigqueryreservation1_cli/projects_locations-capacity-commitments-split", vec![ (Some(r##"name"##), @@ -1936,6 +2030,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-reservations-assignments-patch", + Some(r##"Updates an existing assignment. Only the `priority` field can be updated."##), + "Details at http://byron.github.io/google-apis-rs/google_bigqueryreservation1_cli/projects_locations-reservations-assignments-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. Name of the resource. E.g.: `projects/myproject/locations/US/reservations/team1-prod/assignments/123`. The assignment_id must only contain lower case alphanumeric characters or dashes and the max length is 64 characters."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2042,7 +2164,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The resource name of the reservation, e.g., `projects/*/locations/*/reservations/team1-prod`. For the reservation id, it must only contain lower case alphanumeric characters or dashes.It must start with a letter and must not end with a dash. Its maximum length is 64 characters."##), + Some(r##"The resource name of the reservation, e.g., `projects/*/locations/*/reservations/team1-prod`. The reservation_id must only contain lower case alphanumeric characters or dashes. It must start with a letter and must not end with a dash. Its maximum length is 64 characters."##), Some(true), Some(false)), @@ -2142,7 +2264,7 @@ async fn main() { let mut app = App::new("bigqueryreservation1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230117") .about("A service to modify your BigQuery flat-rate reservations.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_bigqueryreservation1_cli") .arg(Arg::with_name("url") diff --git a/gen/bigqueryreservation1/Cargo.toml b/gen/bigqueryreservation1/Cargo.toml index 761f697207..b5cc6426dc 100644 --- a/gen/bigqueryreservation1/Cargo.toml +++ b/gen/bigqueryreservation1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-bigqueryreservation1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with BigQuery Reservation (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigqueryreservation1" homepage = "https://cloud.google.com/bigquery/" -documentation = "https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-bigqueryreservation1/5.0.2+20230117" license = "MIT" keywords = ["bigqueryreservation", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/bigqueryreservation1/README.md b/gen/bigqueryreservation1/README.md index ea3748c04e..f41ba150ee 100644 --- a/gen/bigqueryreservation1/README.md +++ b/gen/bigqueryreservation1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-bigqueryreservation1` library allows access to all features of the *Google BigQuery Reservation* service. -This documentation was generated from *BigQuery Reservation* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *bigqueryreservation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *BigQuery Reservation* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *bigqueryreservation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *BigQuery Reservation* *v1* API can be found at the [official documentation site](https://cloud.google.com/bigquery/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/BigQueryReservation) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/BigQueryReservation) ... * projects - * [*locations capacity commitments create*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentCreateCall), [*locations capacity commitments delete*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentDeleteCall), [*locations capacity commitments get*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentGetCall), [*locations capacity commitments list*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentListCall), [*locations capacity commitments merge*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentMergeCall), [*locations capacity commitments patch*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentPatchCall), [*locations capacity commitments split*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentSplitCall), [*locations get bi reservation*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationGetBiReservationCall), [*locations reservations assignments create*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentCreateCall), [*locations reservations assignments delete*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentDeleteCall), [*locations reservations assignments list*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentListCall), [*locations reservations assignments move*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentMoveCall), [*locations reservations assignments patch*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentPatchCall), [*locations reservations create*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationCreateCall), [*locations reservations delete*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationDeleteCall), [*locations reservations get*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationGetCall), [*locations reservations list*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationListCall), [*locations reservations patch*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationReservationPatchCall), [*locations search all assignments*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationSearchAllAssignmentCall), [*locations search assignments*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationSearchAssignmentCall) and [*locations update bi reservation*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/api::ProjectLocationUpdateBiReservationCall) + * [*locations capacity commitments create*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentCreateCall), [*locations capacity commitments delete*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentDeleteCall), [*locations capacity commitments get*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentGetCall), [*locations capacity commitments list*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentListCall), [*locations capacity commitments merge*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentMergeCall), [*locations capacity commitments patch*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentPatchCall), [*locations capacity commitments split*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationCapacityCommitmentSplitCall), [*locations get bi reservation*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationGetBiReservationCall), [*locations reservations assignments create*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentCreateCall), [*locations reservations assignments delete*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentDeleteCall), [*locations reservations assignments list*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentListCall), [*locations reservations assignments move*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentMoveCall), [*locations reservations assignments patch*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationAssignmentPatchCall), [*locations reservations create*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationCreateCall), [*locations reservations delete*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationDeleteCall), [*locations reservations get*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationGetCall), [*locations reservations list*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationListCall), [*locations reservations patch*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationReservationPatchCall), [*locations search all assignments*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationSearchAllAssignmentCall), [*locations search assignments*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationSearchAssignmentCall) and [*locations update bi reservation*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/api::ProjectLocationUpdateBiReservationCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/BigQueryReservation)** +* **[Hub](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/BigQueryReservation)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::CallBuilder) -* **[Resources](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::CallBuilder) +* **[Resources](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Part)** + * **[Parts](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Delegate) to the -[Method Builder](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Delegate) to the +[Method Builder](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::RequestValue) and -[decodable](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::RequestValue) and +[decodable](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-bigqueryreservation1/5.0.2-beta-1+20230117/google_bigqueryreservation1/client::RequestValue) are moved +* [request values](https://docs.rs/google-bigqueryreservation1/5.0.2+20230117/google_bigqueryreservation1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/bigqueryreservation1/src/api.rs b/gen/bigqueryreservation1/src/api.rs index f875bed407..38f8cedefe 100644 --- a/gen/bigqueryreservation1/src/api.rs +++ b/gen/bigqueryreservation1/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> BigQueryReservation { BigQueryReservation { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://bigqueryreservation.googleapis.com/".to_string(), _root_url: "https://bigqueryreservation.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> BigQueryReservation { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/bigqueryreservation1/src/client.rs b/gen/bigqueryreservation1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/bigqueryreservation1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/bigqueryreservation1/src/lib.rs b/gen/bigqueryreservation1/src/lib.rs index 19263586ad..dc96b5c9af 100644 --- a/gen/bigqueryreservation1/src/lib.rs +++ b/gen/bigqueryreservation1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *BigQuery Reservation* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *bigqueryreservation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *BigQuery Reservation* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *bigqueryreservation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *BigQuery Reservation* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/bigquery/). diff --git a/gen/bigtableadmin2-cli/Cargo.toml b/gen/bigtableadmin2-cli/Cargo.toml index 8eb2aa6419..650771ac5b 100644 --- a/gen/bigtableadmin2-cli/Cargo.toml +++ b/gen/bigtableadmin2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-bigtableadmin2-cli" -version = "4.0.1+20220222" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Bigtable Admin (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigtableadmin2-cli" @@ -20,13 +20,13 @@ name = "bigtableadmin2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-bigtableadmin2] path = "../bigtableadmin2" -version = "4.0.1+20220222" +version = "5.0.2+20230110" + diff --git a/gen/bigtableadmin2-cli/README.md b/gen/bigtableadmin2-cli/README.md index a4c82cc815..31b6783dab 100644 --- a/gen/bigtableadmin2-cli/README.md +++ b/gen/bigtableadmin2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Bigtable Admin* API at revision *20220222*. The CLI is at version *4.0.1*. +This documentation was generated from the *Bigtable Admin* API at revision *20230110*. The CLI is at version *5.0.2*. ```bash bigtableadmin2 [options] @@ -40,6 +40,7 @@ bigtableadmin2 [options] instances-app-profiles-get [-p ]... [-o ] instances-app-profiles-list [-p ]... [-o ] instances-app-profiles-patch (-r )... [-p ]... [-o ] + instances-clusters-backups-copy (-r )... [-p ]... [-o ] instances-clusters-backups-create (-r )... [-p ]... [-o ] instances-clusters-backups-delete [-p ]... [-o ] instances-clusters-backups-get [-p ]... [-o ] @@ -51,6 +52,7 @@ bigtableadmin2 [options] instances-clusters-create (-r )... [-p ]... [-o ] instances-clusters-delete [-p ]... [-o ] instances-clusters-get [-p ]... [-o ] + instances-clusters-hot-tablets-list [-p ]... [-o ] instances-clusters-list [-p ]... [-o ] instances-clusters-partial-update-cluster (-r )... [-p ]... [-o ] instances-clusters-update (-r )... [-p ]... [-o ] @@ -70,9 +72,11 @@ bigtableadmin2 [options] instances-tables-get-iam-policy (-r )... [-p ]... [-o ] instances-tables-list [-p ]... [-o ] instances-tables-modify-column-families (-r )... [-p ]... [-o ] + instances-tables-patch (-r )... [-p ]... [-o ] instances-tables-restore (-r )... [-p ]... [-o ] instances-tables-set-iam-policy (-r )... [-p ]... [-o ] instances-tables-test-iam-permissions (-r )... [-p ]... [-o ] + instances-tables-undelete (-r )... [-p ]... [-o ] instances-test-iam-permissions (-r )... [-p ]... [-o ] instances-update (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] diff --git a/gen/bigtableadmin2-cli/mkdocs.yml b/gen/bigtableadmin2-cli/mkdocs.yml index 9d20eb5740..5be3a35d34 100644 --- a/gen/bigtableadmin2-cli/mkdocs.yml +++ b/gen/bigtableadmin2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Bigtable Admin v4.0.1+20220222 +site_name: Bigtable Admin v5.0.2+20230110 site_url: http://byron.github.io/google-apis-rs/google-bigtableadmin2-cli site_description: A complete library to interact with Bigtable Admin (protocol v2) @@ -7,54 +7,60 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/bigtableadmin2-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_projects-operations-list.md', 'Operations', 'Projects Operations List'] -- ['projects_instances-app-profiles-create.md', 'Projects', 'Instances App Profiles Create'] -- ['projects_instances-app-profiles-delete.md', 'Projects', 'Instances App Profiles Delete'] -- ['projects_instances-app-profiles-get.md', 'Projects', 'Instances App Profiles Get'] -- ['projects_instances-app-profiles-list.md', 'Projects', 'Instances App Profiles List'] -- ['projects_instances-app-profiles-patch.md', 'Projects', 'Instances App Profiles Patch'] -- ['projects_instances-clusters-backups-create.md', 'Projects', 'Instances Clusters Backups Create'] -- ['projects_instances-clusters-backups-delete.md', 'Projects', 'Instances Clusters Backups Delete'] -- ['projects_instances-clusters-backups-get.md', 'Projects', 'Instances Clusters Backups Get'] -- ['projects_instances-clusters-backups-get-iam-policy.md', 'Projects', 'Instances Clusters Backups Get Iam Policy'] -- ['projects_instances-clusters-backups-list.md', 'Projects', 'Instances Clusters Backups List'] -- ['projects_instances-clusters-backups-patch.md', 'Projects', 'Instances Clusters Backups Patch'] -- ['projects_instances-clusters-backups-set-iam-policy.md', 'Projects', 'Instances Clusters Backups Set Iam Policy'] -- ['projects_instances-clusters-backups-test-iam-permissions.md', 'Projects', 'Instances Clusters Backups Test Iam Permissions'] -- ['projects_instances-clusters-create.md', 'Projects', 'Instances Clusters Create'] -- ['projects_instances-clusters-delete.md', 'Projects', 'Instances Clusters Delete'] -- ['projects_instances-clusters-get.md', 'Projects', 'Instances Clusters Get'] -- ['projects_instances-clusters-list.md', 'Projects', 'Instances Clusters List'] -- ['projects_instances-clusters-partial-update-cluster.md', 'Projects', 'Instances Clusters Partial Update Cluster'] -- ['projects_instances-clusters-update.md', 'Projects', 'Instances Clusters Update'] -- ['projects_instances-create.md', 'Projects', 'Instances Create'] -- ['projects_instances-delete.md', 'Projects', 'Instances Delete'] -- ['projects_instances-get.md', 'Projects', 'Instances Get'] -- ['projects_instances-get-iam-policy.md', 'Projects', 'Instances Get Iam Policy'] -- ['projects_instances-list.md', 'Projects', 'Instances List'] -- ['projects_instances-partial-update-instance.md', 'Projects', 'Instances Partial Update Instance'] -- ['projects_instances-set-iam-policy.md', 'Projects', 'Instances Set Iam Policy'] -- ['projects_instances-tables-check-consistency.md', 'Projects', 'Instances Tables Check Consistency'] -- ['projects_instances-tables-create.md', 'Projects', 'Instances Tables Create'] -- ['projects_instances-tables-delete.md', 'Projects', 'Instances Tables Delete'] -- ['projects_instances-tables-drop-row-range.md', 'Projects', 'Instances Tables Drop Row Range'] -- ['projects_instances-tables-generate-consistency-token.md', 'Projects', 'Instances Tables Generate Consistency Token'] -- ['projects_instances-tables-get.md', 'Projects', 'Instances Tables Get'] -- ['projects_instances-tables-get-iam-policy.md', 'Projects', 'Instances Tables Get Iam Policy'] -- ['projects_instances-tables-list.md', 'Projects', 'Instances Tables List'] -- ['projects_instances-tables-modify-column-families.md', 'Projects', 'Instances Tables Modify Column Families'] -- ['projects_instances-tables-restore.md', 'Projects', 'Instances Tables Restore'] -- ['projects_instances-tables-set-iam-policy.md', 'Projects', 'Instances Tables Set Iam Policy'] -- ['projects_instances-tables-test-iam-permissions.md', 'Projects', 'Instances Tables Test Iam Permissions'] -- ['projects_instances-test-iam-permissions.md', 'Projects', 'Instances Test Iam Permissions'] -- ['projects_instances-update.md', 'Projects', 'Instances Update'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'Get': 'operations_get.md' + - 'Projects Operations List': 'operations_projects-operations-list.md' +- 'Projects': + - 'Instances App Profiles Create': 'projects_instances-app-profiles-create.md' + - 'Instances App Profiles Delete': 'projects_instances-app-profiles-delete.md' + - 'Instances App Profiles Get': 'projects_instances-app-profiles-get.md' + - 'Instances App Profiles List': 'projects_instances-app-profiles-list.md' + - 'Instances App Profiles Patch': 'projects_instances-app-profiles-patch.md' + - 'Instances Clusters Backups Copy': 'projects_instances-clusters-backups-copy.md' + - 'Instances Clusters Backups Create': 'projects_instances-clusters-backups-create.md' + - 'Instances Clusters Backups Delete': 'projects_instances-clusters-backups-delete.md' + - 'Instances Clusters Backups Get': 'projects_instances-clusters-backups-get.md' + - 'Instances Clusters Backups Get Iam Policy': 'projects_instances-clusters-backups-get-iam-policy.md' + - 'Instances Clusters Backups List': 'projects_instances-clusters-backups-list.md' + - 'Instances Clusters Backups Patch': 'projects_instances-clusters-backups-patch.md' + - 'Instances Clusters Backups Set Iam Policy': 'projects_instances-clusters-backups-set-iam-policy.md' + - 'Instances Clusters Backups Test Iam Permissions': 'projects_instances-clusters-backups-test-iam-permissions.md' + - 'Instances Clusters Create': 'projects_instances-clusters-create.md' + - 'Instances Clusters Delete': 'projects_instances-clusters-delete.md' + - 'Instances Clusters Get': 'projects_instances-clusters-get.md' + - 'Instances Clusters Hot Tablets List': 'projects_instances-clusters-hot-tablets-list.md' + - 'Instances Clusters List': 'projects_instances-clusters-list.md' + - 'Instances Clusters Partial Update Cluster': 'projects_instances-clusters-partial-update-cluster.md' + - 'Instances Clusters Update': 'projects_instances-clusters-update.md' + - 'Instances Create': 'projects_instances-create.md' + - 'Instances Delete': 'projects_instances-delete.md' + - 'Instances Get': 'projects_instances-get.md' + - 'Instances Get Iam Policy': 'projects_instances-get-iam-policy.md' + - 'Instances List': 'projects_instances-list.md' + - 'Instances Partial Update Instance': 'projects_instances-partial-update-instance.md' + - 'Instances Set Iam Policy': 'projects_instances-set-iam-policy.md' + - 'Instances Tables Check Consistency': 'projects_instances-tables-check-consistency.md' + - 'Instances Tables Create': 'projects_instances-tables-create.md' + - 'Instances Tables Delete': 'projects_instances-tables-delete.md' + - 'Instances Tables Drop Row Range': 'projects_instances-tables-drop-row-range.md' + - 'Instances Tables Generate Consistency Token': 'projects_instances-tables-generate-consistency-token.md' + - 'Instances Tables Get': 'projects_instances-tables-get.md' + - 'Instances Tables Get Iam Policy': 'projects_instances-tables-get-iam-policy.md' + - 'Instances Tables List': 'projects_instances-tables-list.md' + - 'Instances Tables Modify Column Families': 'projects_instances-tables-modify-column-families.md' + - 'Instances Tables Patch': 'projects_instances-tables-patch.md' + - 'Instances Tables Restore': 'projects_instances-tables-restore.md' + - 'Instances Tables Set Iam Policy': 'projects_instances-tables-set-iam-policy.md' + - 'Instances Tables Test Iam Permissions': 'projects_instances-tables-test-iam-permissions.md' + - 'Instances Tables Undelete': 'projects_instances-tables-undelete.md' + - 'Instances Test Iam Permissions': 'projects_instances-test-iam-permissions.md' + - 'Instances Update': 'projects_instances-update.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/bigtableadmin2-cli/src/client.rs b/gen/bigtableadmin2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/bigtableadmin2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/bigtableadmin2-cli/src/main.rs b/gen/bigtableadmin2-cli/src/main.rs index a982e0fff2..7ef680cafe 100644 --- a/gen/bigtableadmin2-cli/src/main.rs +++ b/gen/bigtableadmin2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_bigtableadmin2::{api, Error, oauth2}; +use google_bigtableadmin2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -217,7 +216,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -314,7 +313,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-warnings" => { - call = call.ignore_warnings(arg_from_str(value.unwrap_or("false"), err, "ignore-warnings", "boolean")); + call = call.ignore_warnings( value.map(|v| arg_from_str(v, err, "ignore-warnings", "boolean")).unwrap_or(false)); }, "app-profile-id" => { call = call.app_profile_id(value.unwrap_or("")); @@ -373,7 +372,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ignore-warnings" => { - call = call.ignore_warnings(arg_from_str(value.unwrap_or("false"), err, "ignore-warnings", "boolean")); + call = call.ignore_warnings( value.map(|v| arg_from_str(v, err, "ignore-warnings", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -484,7 +483,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -578,10 +577,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "ignore-warnings" => { - call = call.ignore_warnings(arg_from_str(value.unwrap_or("false"), err, "ignore-warnings", "boolean")); + call = call.ignore_warnings( value.map(|v| arg_from_str(v, err, "ignore-warnings", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -630,6 +629,93 @@ where } } + async fn _projects_instances_clusters_backups_copy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "backup-id" => Some(("backupId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-backup" => Some(("sourceBackup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-id", "expire-time", "source-backup"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CopyBackupRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().instances_clusters_backups_copy(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_instances_clusters_backups_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -661,11 +747,12 @@ where "expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "size-bytes" => Some(("sizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-backup" => Some(("sourceBackup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-table" => Some(("sourceTable", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "start-time" => Some(("startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "encryption-info", "encryption-status", "encryption-type", "end-time", "expire-time", "kms-key-version", "message", "name", "size-bytes", "source-table", "start-time", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "encryption-info", "encryption-status", "encryption-type", "end-time", "expire-time", "kms-key-version", "message", "name", "size-bytes", "source-backup", "source-table", "start-time", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -928,7 +1015,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1014,11 +1101,12 @@ where "expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "size-bytes" => Some(("sizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-backup" => Some(("sourceBackup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-table" => Some(("sourceTable", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "start-time" => Some(("startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "encryption-info", "encryption-status", "encryption-type", "end-time", "expire-time", "kms-key-version", "message", "name", "size-bytes", "source-table", "start-time", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "encryption-info", "encryption-status", "encryption-type", "end-time", "expire-time", "kms-key-version", "message", "name", "size-bytes", "source-backup", "source-table", "start-time", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1033,7 +1121,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1280,6 +1368,7 @@ where "cluster-config.cluster-autoscaling-config.autoscaling-limits.max-serve-nodes" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingLimits.maxServeNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-config.cluster-autoscaling-config.autoscaling-limits.min-serve-nodes" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingLimits.minServeNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-config.cluster-autoscaling-config.autoscaling-targets.cpu-utilization-percent" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingTargets.cpuUtilizationPercent", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster-config.cluster-autoscaling-config.autoscaling-targets.storage-utilization-gib-per-node" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingTargets.storageUtilizationGibPerNode", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "default-storage-type" => Some(("defaultStorageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "encryption-config.kms-key-name" => Some(("encryptionConfig.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1287,7 +1376,7 @@ where "serve-nodes" => Some(("serveNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-limits", "autoscaling-targets", "cluster-autoscaling-config", "cluster-config", "cpu-utilization-percent", "default-storage-type", "encryption-config", "kms-key-name", "location", "max-serve-nodes", "min-serve-nodes", "name", "serve-nodes", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-limits", "autoscaling-targets", "cluster-autoscaling-config", "cluster-config", "cpu-utilization-percent", "default-storage-type", "encryption-config", "kms-key-name", "location", "max-serve-nodes", "min-serve-nodes", "name", "serve-nodes", "state", "storage-utilization-gib-per-node"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1455,6 +1544,71 @@ where } } + async fn _projects_instances_clusters_hot_tablets_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().instances_clusters_hot_tablets_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "start-time" => { + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "end-time" => { + call = call.end_time( value.map(|v| arg_from_str(v, err, "end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["end-time", "page-size", "page-token", "start-time"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_instances_clusters_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().instances_clusters_list(opt.value_of("parent").unwrap_or("")); @@ -1537,6 +1691,7 @@ where "cluster-config.cluster-autoscaling-config.autoscaling-limits.max-serve-nodes" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingLimits.maxServeNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-config.cluster-autoscaling-config.autoscaling-limits.min-serve-nodes" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingLimits.minServeNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-config.cluster-autoscaling-config.autoscaling-targets.cpu-utilization-percent" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingTargets.cpuUtilizationPercent", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster-config.cluster-autoscaling-config.autoscaling-targets.storage-utilization-gib-per-node" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingTargets.storageUtilizationGibPerNode", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "default-storage-type" => Some(("defaultStorageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "encryption-config.kms-key-name" => Some(("encryptionConfig.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1544,7 +1699,7 @@ where "serve-nodes" => Some(("serveNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-limits", "autoscaling-targets", "cluster-autoscaling-config", "cluster-config", "cpu-utilization-percent", "default-storage-type", "encryption-config", "kms-key-name", "location", "max-serve-nodes", "min-serve-nodes", "name", "serve-nodes", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-limits", "autoscaling-targets", "cluster-autoscaling-config", "cluster-config", "cpu-utilization-percent", "default-storage-type", "encryption-config", "kms-key-name", "location", "max-serve-nodes", "min-serve-nodes", "name", "serve-nodes", "state", "storage-utilization-gib-per-node"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1559,7 +1714,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1634,6 +1789,7 @@ where "cluster-config.cluster-autoscaling-config.autoscaling-limits.max-serve-nodes" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingLimits.maxServeNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-config.cluster-autoscaling-config.autoscaling-limits.min-serve-nodes" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingLimits.minServeNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-config.cluster-autoscaling-config.autoscaling-targets.cpu-utilization-percent" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingTargets.cpuUtilizationPercent", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster-config.cluster-autoscaling-config.autoscaling-targets.storage-utilization-gib-per-node" => Some(("clusterConfig.clusterAutoscalingConfig.autoscalingTargets.storageUtilizationGibPerNode", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "default-storage-type" => Some(("defaultStorageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "encryption-config.kms-key-name" => Some(("encryptionConfig.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1641,7 +1797,7 @@ where "serve-nodes" => Some(("serveNodes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-limits", "autoscaling-targets", "cluster-autoscaling-config", "cluster-config", "cpu-utilization-percent", "default-storage-type", "encryption-config", "kms-key-name", "location", "max-serve-nodes", "min-serve-nodes", "name", "serve-nodes", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-limits", "autoscaling-targets", "cluster-autoscaling-config", "cluster-config", "cpu-utilization-percent", "default-storage-type", "encryption-config", "kms-key-name", "location", "max-serve-nodes", "min-serve-nodes", "name", "serve-nodes", "state", "storage-utilization-gib-per-node"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1728,12 +1884,13 @@ where "instance.display-name" => Some(("instance.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance.labels" => Some(("instance.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "instance.name" => Some(("instance.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "instance.satisfies-pzs" => Some(("instance.satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance.state" => Some(("instance.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance.type" => Some(("instance.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-id" => Some(("instanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "instance", "instance-id", "labels", "name", "parent", "state", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "instance", "instance-id", "labels", "name", "parent", "satisfies-pzs", "state", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2065,10 +2222,11 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "labels", "name", "state", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "labels", "name", "satisfies-pzs", "state", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2083,7 +2241,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2327,16 +2485,22 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "table.deletion-protection" => Some(("table.deletionProtection", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "table.granularity" => Some(("table.granularity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "table.name" => Some(("table.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "table.restore-info.backup-info.backup" => Some(("table.restoreInfo.backupInfo.backup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "table.restore-info.backup-info.end-time" => Some(("table.restoreInfo.backupInfo.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "table.restore-info.backup-info.source-backup" => Some(("table.restoreInfo.backupInfo.sourceBackup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "table.restore-info.backup-info.source-table" => Some(("table.restoreInfo.backupInfo.sourceTable", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "table.restore-info.backup-info.start-time" => Some(("table.restoreInfo.backupInfo.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "table.restore-info.source-type" => Some(("table.restoreInfo.sourceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "table.stats.average-cells-per-column" => Some(("table.stats.averageCellsPerColumn", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "table.stats.average-columns-per-row" => Some(("table.stats.averageColumnsPerRow", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "table.stats.logical-data-bytes" => Some(("table.stats.logicalDataBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "table.stats.row-count" => Some(("table.stats.rowCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "table-id" => Some(("tableId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["backup", "backup-info", "end-time", "granularity", "name", "restore-info", "source-table", "source-type", "start-time", "table", "table-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["average-cells-per-column", "average-columns-per-row", "backup", "backup-info", "deletion-protection", "end-time", "granularity", "logical-data-bytes", "name", "restore-info", "row-count", "source-backup", "source-table", "source-type", "start-time", "stats", "table", "table-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2772,7 +2936,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2905,6 +3069,107 @@ where } } + async fn _projects_instances_tables_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "deletion-protection" => Some(("deletionProtection", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "granularity" => Some(("granularity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "restore-info.backup-info.backup" => Some(("restoreInfo.backupInfo.backup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "restore-info.backup-info.end-time" => Some(("restoreInfo.backupInfo.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "restore-info.backup-info.source-backup" => Some(("restoreInfo.backupInfo.sourceBackup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "restore-info.backup-info.source-table" => Some(("restoreInfo.backupInfo.sourceTable", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "restore-info.backup-info.start-time" => Some(("restoreInfo.backupInfo.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "restore-info.source-type" => Some(("restoreInfo.sourceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stats.average-cells-per-column" => Some(("stats.averageCellsPerColumn", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "stats.average-columns-per-row" => Some(("stats.averageColumnsPerRow", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "stats.logical-data-bytes" => Some(("stats.logicalDataBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stats.row-count" => Some(("stats.rowCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["average-cells-per-column", "average-columns-per-row", "backup", "backup-info", "deletion-protection", "end-time", "granularity", "logical-data-bytes", "name", "restore-info", "row-count", "source-backup", "source-table", "source-type", "start-time", "stats"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Table = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().instances_tables_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_instances_tables_restore(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3163,6 +3428,90 @@ where } } + async fn _projects_instances_tables_undelete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UndeleteTableRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().instances_tables_undelete(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_instances_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3275,10 +3624,11 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "labels", "name", "state", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "labels", "name", "satisfies-pzs", "state", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3400,7 +3750,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3494,6 +3844,9 @@ where ("instances-app-profiles-patch", Some(opt)) => { call_result = self._projects_instances_app_profiles_patch(opt, dry_run, &mut err).await; }, + ("instances-clusters-backups-copy", Some(opt)) => { + call_result = self._projects_instances_clusters_backups_copy(opt, dry_run, &mut err).await; + }, ("instances-clusters-backups-create", Some(opt)) => { call_result = self._projects_instances_clusters_backups_create(opt, dry_run, &mut err).await; }, @@ -3527,6 +3880,9 @@ where ("instances-clusters-get", Some(opt)) => { call_result = self._projects_instances_clusters_get(opt, dry_run, &mut err).await; }, + ("instances-clusters-hot-tablets-list", Some(opt)) => { + call_result = self._projects_instances_clusters_hot_tablets_list(opt, dry_run, &mut err).await; + }, ("instances-clusters-list", Some(opt)) => { call_result = self._projects_instances_clusters_list(opt, dry_run, &mut err).await; }, @@ -3584,6 +3940,9 @@ where ("instances-tables-modify-column-families", Some(opt)) => { call_result = self._projects_instances_tables_modify_column_families(opt, dry_run, &mut err).await; }, + ("instances-tables-patch", Some(opt)) => { + call_result = self._projects_instances_tables_patch(opt, dry_run, &mut err).await; + }, ("instances-tables-restore", Some(opt)) => { call_result = self._projects_instances_tables_restore(opt, dry_run, &mut err).await; }, @@ -3593,6 +3952,9 @@ where ("instances-tables-test-iam-permissions", Some(opt)) => { call_result = self._projects_instances_tables_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("instances-tables-undelete", Some(opt)) => { + call_result = self._projects_instances_tables_undelete(opt, dry_run, &mut err).await; + }, ("instances-test-iam-permissions", Some(opt)) => { call_result = self._projects_instances_test_iam_permissions(opt, dry_run, &mut err).await; }, @@ -3775,7 +4137,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'instances-app-profiles-create', 'instances-app-profiles-delete', 'instances-app-profiles-get', 'instances-app-profiles-list', 'instances-app-profiles-patch', 'instances-clusters-backups-create', 'instances-clusters-backups-delete', 'instances-clusters-backups-get', 'instances-clusters-backups-get-iam-policy', 'instances-clusters-backups-list', 'instances-clusters-backups-patch', 'instances-clusters-backups-set-iam-policy', 'instances-clusters-backups-test-iam-permissions', 'instances-clusters-create', 'instances-clusters-delete', 'instances-clusters-get', 'instances-clusters-list', 'instances-clusters-partial-update-cluster', 'instances-clusters-update', 'instances-create', 'instances-delete', 'instances-get', 'instances-get-iam-policy', 'instances-list', 'instances-partial-update-instance', 'instances-set-iam-policy', 'instances-tables-check-consistency', 'instances-tables-create', 'instances-tables-delete', 'instances-tables-drop-row-range', 'instances-tables-generate-consistency-token', 'instances-tables-get', 'instances-tables-get-iam-policy', 'instances-tables-list', 'instances-tables-modify-column-families', 'instances-tables-restore', 'instances-tables-set-iam-policy', 'instances-tables-test-iam-permissions', 'instances-test-iam-permissions', 'instances-update', 'locations-get' and 'locations-list'", vec![ + ("projects", "methods: 'instances-app-profiles-create', 'instances-app-profiles-delete', 'instances-app-profiles-get', 'instances-app-profiles-list', 'instances-app-profiles-patch', 'instances-clusters-backups-copy', 'instances-clusters-backups-create', 'instances-clusters-backups-delete', 'instances-clusters-backups-get', 'instances-clusters-backups-get-iam-policy', 'instances-clusters-backups-list', 'instances-clusters-backups-patch', 'instances-clusters-backups-set-iam-policy', 'instances-clusters-backups-test-iam-permissions', 'instances-clusters-create', 'instances-clusters-delete', 'instances-clusters-get', 'instances-clusters-hot-tablets-list', 'instances-clusters-list', 'instances-clusters-partial-update-cluster', 'instances-clusters-update', 'instances-create', 'instances-delete', 'instances-get', 'instances-get-iam-policy', 'instances-list', 'instances-partial-update-instance', 'instances-set-iam-policy', 'instances-tables-check-consistency', 'instances-tables-create', 'instances-tables-delete', 'instances-tables-drop-row-range', 'instances-tables-generate-consistency-token', 'instances-tables-get', 'instances-tables-get-iam-policy', 'instances-tables-list', 'instances-tables-modify-column-families', 'instances-tables-patch', 'instances-tables-restore', 'instances-tables-set-iam-policy', 'instances-tables-test-iam-permissions', 'instances-tables-undelete', 'instances-test-iam-permissions', 'instances-update', 'locations-get' and 'locations-list'", vec![ ("instances-app-profiles-create", Some(r##"Creates an app profile within an instance."##), "Details at http://byron.github.io/google-apis-rs/google_bigtableadmin2_cli/projects_instances-app-profiles-create", @@ -3892,6 +4254,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("instances-clusters-backups-copy", + Some(r##"Copy a Cloud Bigtable backup to a new backup in the destination cluster located in the destination instance and project."##), + "Details at http://byron.github.io/google-apis-rs/google_bigtableadmin2_cli/projects_instances-clusters-backups-copy", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The name of the destination cluster that will contain the backup copy. The cluster must already exists. Values are of the form: `projects/{project}/instances/{instance}/clusters/{cluster}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3976,7 +4366,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4054,7 +4444,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4082,7 +4472,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4170,6 +4560,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("instances-clusters-hot-tablets-list", + Some(r##"Lists hot tablets in a cluster, within the time range provided. Hot tablets are ordered based on CPU usage."##), + "Details at http://byron.github.io/google-apis-rs/google_bigtableadmin2_cli/projects_instances-clusters-hot-tablets-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The cluster name to list hot tablets. Value is in the following form: `projects/{project}/instances/{instance}/clusters/{cluster}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4332,7 +4744,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4410,7 +4822,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4594,7 +5006,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4660,6 +5072,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("instances-tables-patch", + Some(r##"Updates a specified table."##), + "Details at http://byron.github.io/google-apis-rs/google_bigtableadmin2_cli/projects_instances-tables-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"The unique name of the table. Values are of the form `projects/{project}/instances/{instance}/tables/_a-zA-Z0-9*`. Views: `NAME_ONLY`, `SCHEMA_VIEW`, `REPLICATION_VIEW`, `STATS_VIEW`, `FULL`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4667,12 +5107,12 @@ async fn main() { Some(false)), ]), ("instances-tables-restore", - Some(r##"Create a new table by restoring from a completed backup. The new table must be in the same project as the instance containing the backup. The returned table long-running operation can be used to track the progress of the operation, and to cancel it. The metadata field type is RestoreTableMetadata. The response type is Table, if successful."##), + Some(r##"Create a new table by restoring from a completed backup. The returned table long-running operation can be used to track the progress of the operation, and to cancel it. The metadata field type is RestoreTableMetadata. The response type is Table, if successful."##), "Details at http://byron.github.io/google-apis-rs/google_bigtableadmin2_cli/projects_instances-tables-restore", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The name of the instance in which to create the restored table. This instance must be in the same project as the source backup. Values are of the form `projects//instances/`."##), + Some(r##"Required. The name of the instance in which to create the restored table. Values are of the form `projects//instances/`."##), Some(true), Some(false)), @@ -4700,7 +5140,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4728,7 +5168,35 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("instances-tables-undelete", + Some(r##"Restores a specified table which was accidentally deleted."##), + "Details at http://byron.github.io/google-apis-rs/google_bigtableadmin2_cli/projects_instances-tables-undelete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The unique name of the table to be restored. Values are of the form `projects/{project}/instances/{instance}/tables/{table}`."##), Some(true), Some(false)), @@ -4756,7 +5224,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4856,7 +5324,7 @@ async fn main() { let mut app = App::new("bigtableadmin2") .author("Sebastian Thiel ") - .version("4.0.1+20220222") + .version("5.0.2+20230110") .about("Administer your Cloud Bigtable tables and instances.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_bigtableadmin2_cli") .arg(Arg::with_name("url") diff --git a/gen/bigtableadmin2/Cargo.toml b/gen/bigtableadmin2/Cargo.toml index e428a5272b..de56f58d83 100644 --- a/gen/bigtableadmin2/Cargo.toml +++ b/gen/bigtableadmin2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-bigtableadmin2" -version = "5.0.2-beta-1+20230110" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Bigtable Admin (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/bigtableadmin2" homepage = "https://cloud.google.com/bigtable/" -documentation = "https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110" +documentation = "https://docs.rs/google-bigtableadmin2/5.0.2+20230110" license = "MIT" keywords = ["bigtableadmin", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/bigtableadmin2/README.md b/gen/bigtableadmin2/README.md index d5f3cdff9b..daf67f01e4 100644 --- a/gen/bigtableadmin2/README.md +++ b/gen/bigtableadmin2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-bigtableadmin2` library allows access to all features of the *Google Bigtable Admin* service. -This documentation was generated from *Bigtable Admin* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *bigtableadmin:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Bigtable Admin* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *bigtableadmin:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Bigtable Admin* *v2* API can be found at the [official documentation site](https://cloud.google.com/bigtable/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/BigtableAdmin) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/BigtableAdmin) ... -* [operations](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::Operation) - * [*cancel*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::OperationCancelCall), [*delete*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::OperationDeleteCall), [*get*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::OperationGetCall) and [*projects operations list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::OperationProjectOperationListCall) +* [operations](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::Operation) + * [*cancel*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::OperationCancelCall), [*delete*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::OperationDeleteCall), [*get*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::OperationGetCall) and [*projects operations list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::OperationProjectOperationListCall) * projects - * [*instances app profiles create*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileCreateCall), [*instances app profiles delete*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileDeleteCall), [*instances app profiles get*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileGetCall), [*instances app profiles list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileListCall), [*instances app profiles patch*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfilePatchCall), [*instances clusters backups copy*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupCopyCall), [*instances clusters backups create*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupCreateCall), [*instances clusters backups delete*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupDeleteCall), [*instances clusters backups get*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupGetCall), [*instances clusters backups get iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupGetIamPolicyCall), [*instances clusters backups list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupListCall), [*instances clusters backups patch*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupPatchCall), [*instances clusters backups set iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupSetIamPolicyCall), [*instances clusters backups test iam permissions*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupTestIamPermissionCall), [*instances clusters create*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterCreateCall), [*instances clusters delete*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterDeleteCall), [*instances clusters get*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterGetCall), [*instances clusters hot tablets list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterHotTabletListCall), [*instances clusters list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterListCall), [*instances clusters partial update cluster*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterPartialUpdateClusterCall), [*instances clusters update*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceClusterUpdateCall), [*instances create*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceCreateCall), [*instances delete*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceDeleteCall), [*instances get*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceGetCall), [*instances get iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceGetIamPolicyCall), [*instances list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceListCall), [*instances partial update instance*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstancePartialUpdateInstanceCall), [*instances set iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceSetIamPolicyCall), [*instances tables check consistency*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableCheckConsistencyCall), [*instances tables create*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableCreateCall), [*instances tables delete*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableDeleteCall), [*instances tables drop row range*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableDropRowRangeCall), [*instances tables generate consistency token*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableGenerateConsistencyTokenCall), [*instances tables get*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableGetCall), [*instances tables get iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableGetIamPolicyCall), [*instances tables list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableListCall), [*instances tables modify column families*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableModifyColumnFamilyCall), [*instances tables patch*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTablePatchCall), [*instances tables restore*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableRestoreCall), [*instances tables set iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableSetIamPolicyCall), [*instances tables test iam permissions*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableTestIamPermissionCall), [*instances tables undelete*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTableUndeleteCall), [*instances test iam permissions*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceTestIamPermissionCall), [*instances update*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectInstanceUpdateCall), [*locations get*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectLocationGetCall) and [*locations list*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/api::ProjectLocationListCall) + * [*instances app profiles create*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileCreateCall), [*instances app profiles delete*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileDeleteCall), [*instances app profiles get*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileGetCall), [*instances app profiles list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfileListCall), [*instances app profiles patch*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceAppProfilePatchCall), [*instances clusters backups copy*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupCopyCall), [*instances clusters backups create*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupCreateCall), [*instances clusters backups delete*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupDeleteCall), [*instances clusters backups get*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupGetCall), [*instances clusters backups get iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupGetIamPolicyCall), [*instances clusters backups list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupListCall), [*instances clusters backups patch*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupPatchCall), [*instances clusters backups set iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupSetIamPolicyCall), [*instances clusters backups test iam permissions*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterBackupTestIamPermissionCall), [*instances clusters create*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterCreateCall), [*instances clusters delete*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterDeleteCall), [*instances clusters get*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterGetCall), [*instances clusters hot tablets list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterHotTabletListCall), [*instances clusters list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterListCall), [*instances clusters partial update cluster*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterPartialUpdateClusterCall), [*instances clusters update*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceClusterUpdateCall), [*instances create*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceCreateCall), [*instances delete*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceDeleteCall), [*instances get*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceGetCall), [*instances get iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceGetIamPolicyCall), [*instances list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceListCall), [*instances partial update instance*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstancePartialUpdateInstanceCall), [*instances set iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceSetIamPolicyCall), [*instances tables check consistency*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableCheckConsistencyCall), [*instances tables create*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableCreateCall), [*instances tables delete*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableDeleteCall), [*instances tables drop row range*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableDropRowRangeCall), [*instances tables generate consistency token*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableGenerateConsistencyTokenCall), [*instances tables get*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableGetCall), [*instances tables get iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableGetIamPolicyCall), [*instances tables list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableListCall), [*instances tables modify column families*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableModifyColumnFamilyCall), [*instances tables patch*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTablePatchCall), [*instances tables restore*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableRestoreCall), [*instances tables set iam policy*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableSetIamPolicyCall), [*instances tables test iam permissions*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableTestIamPermissionCall), [*instances tables undelete*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTableUndeleteCall), [*instances test iam permissions*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceTestIamPermissionCall), [*instances update*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectInstanceUpdateCall), [*locations get*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectLocationGetCall) and [*locations list*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/api::ProjectLocationListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/BigtableAdmin)** +* **[Hub](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/BigtableAdmin)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::CallBuilder) -* **[Resources](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::CallBuilder) +* **[Resources](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Part)** + * **[Parts](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Delegate) to the -[Method Builder](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Delegate) to the +[Method Builder](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::RequestValue) and -[decodable](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::RequestValue) and +[decodable](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-bigtableadmin2/5.0.2-beta-1+20230110/google_bigtableadmin2/client::RequestValue) are moved +* [request values](https://docs.rs/google-bigtableadmin2/5.0.2+20230110/google_bigtableadmin2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/bigtableadmin2/src/api.rs b/gen/bigtableadmin2/src/api.rs index a9a6e61cfe..e8864d861c 100644 --- a/gen/bigtableadmin2/src/api.rs +++ b/gen/bigtableadmin2/src/api.rs @@ -154,7 +154,7 @@ impl<'a, S> BigtableAdmin { BigtableAdmin { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://bigtableadmin.googleapis.com/".to_string(), _root_url: "https://bigtableadmin.googleapis.com/".to_string(), } @@ -168,7 +168,7 @@ impl<'a, S> BigtableAdmin { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/bigtableadmin2/src/client.rs b/gen/bigtableadmin2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/bigtableadmin2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/bigtableadmin2/src/lib.rs b/gen/bigtableadmin2/src/lib.rs index 99043bd03b..46d8041522 100644 --- a/gen/bigtableadmin2/src/lib.rs +++ b/gen/bigtableadmin2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Bigtable Admin* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *bigtableadmin:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Bigtable Admin* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *bigtableadmin:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Bigtable Admin* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/bigtable/). diff --git a/gen/billingbudgets1-cli/Cargo.toml b/gen/billingbudgets1-cli/Cargo.toml index 855fca0f22..e50b283f59 100644 --- a/gen/billingbudgets1-cli/Cargo.toml +++ b/gen/billingbudgets1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-billingbudgets1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with CloudBillingBudget (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/billingbudgets1-cli" @@ -20,13 +20,13 @@ name = "billingbudgets1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-billingbudgets1] path = "../billingbudgets1" -version = "4.0.1+20220227" +version = "5.0.2+20230117" + diff --git a/gen/billingbudgets1-cli/README.md b/gen/billingbudgets1-cli/README.md index faf3a79901..990d9363ee 100644 --- a/gen/billingbudgets1-cli/README.md +++ b/gen/billingbudgets1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *CloudBillingBudget* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *CloudBillingBudget* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash billingbudgets1 [options] diff --git a/gen/billingbudgets1-cli/mkdocs.yml b/gen/billingbudgets1-cli/mkdocs.yml index d8737be78a..95114ac2d9 100644 --- a/gen/billingbudgets1-cli/mkdocs.yml +++ b/gen/billingbudgets1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: CloudBillingBudget v4.0.1+20220227 +site_name: CloudBillingBudget v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-billingbudgets1-cli site_description: A complete library to interact with CloudBillingBudget (protocol v1) @@ -7,13 +7,14 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/billingbudgets1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['billing-accounts_budgets-create.md', 'Billing Accounts', 'Budgets Create'] -- ['billing-accounts_budgets-delete.md', 'Billing Accounts', 'Budgets Delete'] -- ['billing-accounts_budgets-get.md', 'Billing Accounts', 'Budgets Get'] -- ['billing-accounts_budgets-list.md', 'Billing Accounts', 'Budgets List'] -- ['billing-accounts_budgets-patch.md', 'Billing Accounts', 'Budgets Patch'] +nav: +- Home: 'index.md' +- 'Billing Accounts': + - 'Budgets Create': 'billing-accounts_budgets-create.md' + - 'Budgets Delete': 'billing-accounts_budgets-delete.md' + - 'Budgets Get': 'billing-accounts_budgets-get.md' + - 'Budgets List': 'billing-accounts_budgets-list.md' + - 'Budgets Patch': 'billing-accounts_budgets-patch.md' theme: readthedocs diff --git a/gen/billingbudgets1-cli/src/client.rs b/gen/billingbudgets1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/billingbudgets1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/billingbudgets1-cli/src/main.rs b/gen/billingbudgets1-cli/src/main.rs index ee9ee73e74..f0866aea68 100644 --- a/gen/billingbudgets1-cli/src/main.rs +++ b/gen/billingbudgets1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_billingbudgets1::{api, Error, oauth2}; +use google_billingbudgets1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -271,7 +270,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -381,7 +380,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -660,7 +659,7 @@ async fn main() { let mut app = App::new("billingbudgets1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230117") .about("The Cloud Billing Budget API stores Cloud Billing budgets, which define a budget plan and the rules to execute as spend is tracked against that plan.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_billingbudgets1_cli") .arg(Arg::with_name("url") diff --git a/gen/billingbudgets1/Cargo.toml b/gen/billingbudgets1/Cargo.toml index c8ee58a831..68f3703382 100644 --- a/gen/billingbudgets1/Cargo.toml +++ b/gen/billingbudgets1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-billingbudgets1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with CloudBillingBudget (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/billingbudgets1" homepage = "https://cloud.google.com/billing/docs/how-to/budget-api-overview" -documentation = "https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-billingbudgets1/5.0.2+20230117" license = "MIT" keywords = ["billingbudgets", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/billingbudgets1/README.md b/gen/billingbudgets1/README.md index 1bcfc6d0db..6ec30ccde9 100644 --- a/gen/billingbudgets1/README.md +++ b/gen/billingbudgets1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-billingbudgets1` library allows access to all features of the *Google CloudBillingBudget* service. -This documentation was generated from *CloudBillingBudget* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *CloudBillingBudget* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *CloudBillingBudget* *v1* API can be found at the [official documentation site](https://cloud.google.com/billing/docs/how-to/budget-api-overview). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/CloudBillingBudget) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/CloudBillingBudget) ... * billing accounts - * [*budgets create*](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/api::BillingAccountBudgetCreateCall), [*budgets delete*](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/api::BillingAccountBudgetDeleteCall), [*budgets get*](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/api::BillingAccountBudgetGetCall), [*budgets list*](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/api::BillingAccountBudgetListCall) and [*budgets patch*](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/api::BillingAccountBudgetPatchCall) + * [*budgets create*](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/api::BillingAccountBudgetCreateCall), [*budgets delete*](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/api::BillingAccountBudgetDeleteCall), [*budgets get*](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/api::BillingAccountBudgetGetCall), [*budgets list*](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/api::BillingAccountBudgetListCall) and [*budgets patch*](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/api::BillingAccountBudgetPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/CloudBillingBudget)** +* **[Hub](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/CloudBillingBudget)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::CallBuilder) -* **[Resources](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::CallBuilder) +* **[Resources](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Part)** + * **[Parts](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Delegate) to the -[Method Builder](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Delegate) to the +[Method Builder](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::RequestValue) and -[decodable](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::RequestValue) and +[decodable](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-billingbudgets1/5.0.2-beta-1+20230117/google_billingbudgets1/client::RequestValue) are moved +* [request values](https://docs.rs/google-billingbudgets1/5.0.2+20230117/google_billingbudgets1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/billingbudgets1/src/api.rs b/gen/billingbudgets1/src/api.rs index c3d875136e..5c06f28f80 100644 --- a/gen/billingbudgets1/src/api.rs +++ b/gen/billingbudgets1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> CloudBillingBudget { CloudBillingBudget { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://billingbudgets.googleapis.com/".to_string(), _root_url: "https://billingbudgets.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> CloudBillingBudget { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/billingbudgets1/src/client.rs b/gen/billingbudgets1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/billingbudgets1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/billingbudgets1/src/lib.rs b/gen/billingbudgets1/src/lib.rs index f724b08d2b..3e1b010f61 100644 --- a/gen/billingbudgets1/src/lib.rs +++ b/gen/billingbudgets1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *CloudBillingBudget* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *CloudBillingBudget* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *CloudBillingBudget* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/billing/docs/how-to/budget-api-overview). diff --git a/gen/billingbudgets1_beta1-cli/Cargo.toml b/gen/billingbudgets1_beta1-cli/Cargo.toml index 82d0001c40..cc0959d3db 100644 --- a/gen/billingbudgets1_beta1-cli/Cargo.toml +++ b/gen/billingbudgets1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-billingbudgets1_beta1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with CloudBillingBudget (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/billingbudgets1_beta1-cli" @@ -20,13 +20,13 @@ name = "billingbudgets1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-billingbudgets1_beta1] path = "../billingbudgets1_beta1" -version = "4.0.1+20220227" +version = "5.0.2+20230117" + diff --git a/gen/billingbudgets1_beta1-cli/README.md b/gen/billingbudgets1_beta1-cli/README.md index ab09b465ff..abb4686969 100644 --- a/gen/billingbudgets1_beta1-cli/README.md +++ b/gen/billingbudgets1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *CloudBillingBudget* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *CloudBillingBudget* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash billingbudgets1-beta1 [options] diff --git a/gen/billingbudgets1_beta1-cli/mkdocs.yml b/gen/billingbudgets1_beta1-cli/mkdocs.yml index 9b6196442b..5ae8e2fb05 100644 --- a/gen/billingbudgets1_beta1-cli/mkdocs.yml +++ b/gen/billingbudgets1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: CloudBillingBudget v4.0.1+20220227 +site_name: CloudBillingBudget v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-billingbudgets1_beta1-cli site_description: A complete library to interact with CloudBillingBudget (protocol v1beta1) @@ -7,13 +7,14 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/billingbudgets1_ docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['billing-accounts_budgets-create.md', 'Billing Accounts', 'Budgets Create'] -- ['billing-accounts_budgets-delete.md', 'Billing Accounts', 'Budgets Delete'] -- ['billing-accounts_budgets-get.md', 'Billing Accounts', 'Budgets Get'] -- ['billing-accounts_budgets-list.md', 'Billing Accounts', 'Budgets List'] -- ['billing-accounts_budgets-patch.md', 'Billing Accounts', 'Budgets Patch'] +nav: +- Home: 'index.md' +- 'Billing Accounts': + - 'Budgets Create': 'billing-accounts_budgets-create.md' + - 'Budgets Delete': 'billing-accounts_budgets-delete.md' + - 'Budgets Get': 'billing-accounts_budgets-get.md' + - 'Budgets List': 'billing-accounts_budgets-list.md' + - 'Budgets Patch': 'billing-accounts_budgets-patch.md' theme: readthedocs diff --git a/gen/billingbudgets1_beta1-cli/src/client.rs b/gen/billingbudgets1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/billingbudgets1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/billingbudgets1_beta1-cli/src/main.rs b/gen/billingbudgets1_beta1-cli/src/main.rs index 2e8d1a2cc2..4f22abe2ec 100644 --- a/gen/billingbudgets1_beta1-cli/src/main.rs +++ b/gen/billingbudgets1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_billingbudgets1_beta1::{api, Error, oauth2}; +use google_billingbudgets1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -271,7 +270,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -657,7 +656,7 @@ async fn main() { let mut app = App::new("billingbudgets1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230117") .about("The Cloud Billing Budget API stores Cloud Billing budgets, which define a budget plan and the rules to execute as spend is tracked against that plan.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_billingbudgets1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/billingbudgets1_beta1/Cargo.toml b/gen/billingbudgets1_beta1/Cargo.toml index 24d9b67992..61acf474d8 100644 --- a/gen/billingbudgets1_beta1/Cargo.toml +++ b/gen/billingbudgets1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-billingbudgets1_beta1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with CloudBillingBudget (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/billingbudgets1_beta1" homepage = "https://cloud.google.com/billing/docs/how-to/budget-api-overview" -documentation = "https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117" license = "MIT" keywords = ["billingbudgets", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/billingbudgets1_beta1/README.md b/gen/billingbudgets1_beta1/README.md index 238d5534c9..7838621d47 100644 --- a/gen/billingbudgets1_beta1/README.md +++ b/gen/billingbudgets1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-billingbudgets1_beta1` library allows access to all features of the *Google CloudBillingBudget* service. -This documentation was generated from *CloudBillingBudget* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *CloudBillingBudget* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *CloudBillingBudget* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/billing/docs/how-to/budget-api-overview). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/CloudBillingBudget) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/CloudBillingBudget) ... * billing accounts - * [*budgets create*](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetCreateCall), [*budgets delete*](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetDeleteCall), [*budgets get*](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetGetCall), [*budgets list*](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetListCall) and [*budgets patch*](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetPatchCall) + * [*budgets create*](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetCreateCall), [*budgets delete*](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetDeleteCall), [*budgets get*](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetGetCall), [*budgets list*](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetListCall) and [*budgets patch*](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/api::BillingAccountBudgetPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/CloudBillingBudget)** +* **[Hub](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/CloudBillingBudget)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-billingbudgets1_beta1/5.0.2-beta-1+20230117/google_billingbudgets1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-billingbudgets1_beta1/5.0.2+20230117/google_billingbudgets1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/billingbudgets1_beta1/src/api.rs b/gen/billingbudgets1_beta1/src/api.rs index 93f60a9d9e..fcc9d65487 100644 --- a/gen/billingbudgets1_beta1/src/api.rs +++ b/gen/billingbudgets1_beta1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> CloudBillingBudget { CloudBillingBudget { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://billingbudgets.googleapis.com/".to_string(), _root_url: "https://billingbudgets.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudBillingBudget { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/billingbudgets1_beta1/src/client.rs b/gen/billingbudgets1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/billingbudgets1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/billingbudgets1_beta1/src/lib.rs b/gen/billingbudgets1_beta1/src/lib.rs index 04402bcb6d..4bbde095e1 100644 --- a/gen/billingbudgets1_beta1/src/lib.rs +++ b/gen/billingbudgets1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *CloudBillingBudget* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *CloudBillingBudget* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *billingbudgets:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *CloudBillingBudget* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/billing/docs/how-to/budget-api-overview). diff --git a/gen/binaryauthorization1-cli/Cargo.toml b/gen/binaryauthorization1-cli/Cargo.toml index e4e27e7c97..a7b88bd15e 100644 --- a/gen/binaryauthorization1-cli/Cargo.toml +++ b/gen/binaryauthorization1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-binaryauthorization1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Binary Authorization (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/binaryauthorization1-cli" @@ -20,13 +20,13 @@ name = "binaryauthorization1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-binaryauthorization1] path = "../binaryauthorization1" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/binaryauthorization1-cli/README.md b/gen/binaryauthorization1-cli/README.md index 3b97ee68b8..62cf429e9d 100644 --- a/gen/binaryauthorization1-cli/README.md +++ b/gen/binaryauthorization1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Binary Authorization* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Binary Authorization* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash binaryauthorization1 [options] diff --git a/gen/binaryauthorization1-cli/mkdocs.yml b/gen/binaryauthorization1-cli/mkdocs.yml index 637256e8c7..eeb629edbb 100644 --- a/gen/binaryauthorization1-cli/mkdocs.yml +++ b/gen/binaryauthorization1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Binary Authorization v4.0.1+20220225 +site_name: Binary Authorization v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-binaryauthorization1-cli site_description: A complete library to interact with Binary Authorization (protocol v1) @@ -7,23 +7,25 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/binaryauthorizat docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_attestors-create.md', 'Projects', 'Attestors Create'] -- ['projects_attestors-delete.md', 'Projects', 'Attestors Delete'] -- ['projects_attestors-get.md', 'Projects', 'Attestors Get'] -- ['projects_attestors-get-iam-policy.md', 'Projects', 'Attestors Get Iam Policy'] -- ['projects_attestors-list.md', 'Projects', 'Attestors List'] -- ['projects_attestors-set-iam-policy.md', 'Projects', 'Attestors Set Iam Policy'] -- ['projects_attestors-test-iam-permissions.md', 'Projects', 'Attestors Test Iam Permissions'] -- ['projects_attestors-update.md', 'Projects', 'Attestors Update'] -- ['projects_attestors-validate-attestation-occurrence.md', 'Projects', 'Attestors Validate Attestation Occurrence'] -- ['projects_get-policy.md', 'Projects', 'Get Policy'] -- ['projects_policy-get-iam-policy.md', 'Projects', 'Policy Get Iam Policy'] -- ['projects_policy-set-iam-policy.md', 'Projects', 'Policy Set Iam Policy'] -- ['projects_policy-test-iam-permissions.md', 'Projects', 'Policy Test Iam Permissions'] -- ['projects_update-policy.md', 'Projects', 'Update Policy'] -- ['systempolicy_get-policy.md', 'Systempolicy', 'Get Policy'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Attestors Create': 'projects_attestors-create.md' + - 'Attestors Delete': 'projects_attestors-delete.md' + - 'Attestors Get': 'projects_attestors-get.md' + - 'Attestors Get Iam Policy': 'projects_attestors-get-iam-policy.md' + - 'Attestors List': 'projects_attestors-list.md' + - 'Attestors Set Iam Policy': 'projects_attestors-set-iam-policy.md' + - 'Attestors Test Iam Permissions': 'projects_attestors-test-iam-permissions.md' + - 'Attestors Update': 'projects_attestors-update.md' + - 'Attestors Validate Attestation Occurrence': 'projects_attestors-validate-attestation-occurrence.md' + - 'Get Policy': 'projects_get-policy.md' + - 'Policy Get Iam Policy': 'projects_policy-get-iam-policy.md' + - 'Policy Set Iam Policy': 'projects_policy-set-iam-policy.md' + - 'Policy Test Iam Permissions': 'projects_policy-test-iam-permissions.md' + - 'Update Policy': 'projects_update-policy.md' +- 'Systempolicy': + - 'Get Policy': 'systempolicy_get-policy.md' theme: readthedocs diff --git a/gen/binaryauthorization1-cli/src/client.rs b/gen/binaryauthorization1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/binaryauthorization1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/binaryauthorization1-cli/src/main.rs b/gen/binaryauthorization1-cli/src/main.rs index 3f86c7587d..c61492a44d 100644 --- a/gen/binaryauthorization1-cli/src/main.rs +++ b/gen/binaryauthorization1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_binaryauthorization1::{api, Error, oauth2}; +use google_binaryauthorization1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -256,7 +255,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -315,7 +314,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -771,7 +770,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1353,7 +1352,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1397,7 +1396,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1425,7 +1424,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1531,7 +1530,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1553,7 +1552,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1581,7 +1580,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1662,7 +1661,7 @@ async fn main() { let mut app = App::new("binaryauthorization1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("The management interface for Binary Authorization, a service that provides policy-based deployment validation and control for images deployed to Google Kubernetes Engine (GKE), Anthos Service Mesh, Anthos Clusters, and Cloud Run. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_binaryauthorization1_cli") .arg(Arg::with_name("url") diff --git a/gen/binaryauthorization1/Cargo.toml b/gen/binaryauthorization1/Cargo.toml index 7da6e6b695..13ac3a6f32 100644 --- a/gen/binaryauthorization1/Cargo.toml +++ b/gen/binaryauthorization1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-binaryauthorization1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Binary Authorization (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/binaryauthorization1" homepage = "https://cloud.google.com/binary-authorization/" -documentation = "https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-binaryauthorization1/5.0.2+20230113" license = "MIT" keywords = ["binaryauthorization", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/binaryauthorization1/README.md b/gen/binaryauthorization1/README.md index 41930cb700..eda8b6d86c 100644 --- a/gen/binaryauthorization1/README.md +++ b/gen/binaryauthorization1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-binaryauthorization1` library allows access to all features of the *Google Binary Authorization* service. -This documentation was generated from *Binary Authorization* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Binary Authorization* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Binary Authorization* *v1* API can be found at the [official documentation site](https://cloud.google.com/binary-authorization/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/BinaryAuthorization) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/BinaryAuthorization) ... * projects - * [*attestors create*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorCreateCall), [*attestors delete*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorDeleteCall), [*attestors get*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorGetCall), [*attestors get iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorGetIamPolicyCall), [*attestors list*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorListCall), [*attestors set iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorSetIamPolicyCall), [*attestors test iam permissions*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorTestIamPermissionCall), [*attestors update*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorUpdateCall), [*attestors validate attestation occurrence*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectAttestorValidateAttestationOccurrenceCall), [*get policy*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectGetPolicyCall), [*policy get iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectPolicyGetIamPolicyCall), [*policy set iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectPolicySetIamPolicyCall), [*policy test iam permissions*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectPolicyTestIamPermissionCall) and [*update policy*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::ProjectUpdatePolicyCall) + * [*attestors create*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorCreateCall), [*attestors delete*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorDeleteCall), [*attestors get*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorGetCall), [*attestors get iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorGetIamPolicyCall), [*attestors list*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorListCall), [*attestors set iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorSetIamPolicyCall), [*attestors test iam permissions*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorTestIamPermissionCall), [*attestors update*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorUpdateCall), [*attestors validate attestation occurrence*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectAttestorValidateAttestationOccurrenceCall), [*get policy*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectGetPolicyCall), [*policy get iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectPolicyGetIamPolicyCall), [*policy set iam policy*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectPolicySetIamPolicyCall), [*policy test iam permissions*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectPolicyTestIamPermissionCall) and [*update policy*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::ProjectUpdatePolicyCall) * systempolicy - * [*get policy*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/api::SystempolicyGetPolicyCall) + * [*get policy*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/api::SystempolicyGetPolicyCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/BinaryAuthorization)** +* **[Hub](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/BinaryAuthorization)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::CallBuilder) -* **[Resources](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::CallBuilder) +* **[Resources](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Part)** + * **[Parts](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Delegate) to the -[Method Builder](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Delegate) to the +[Method Builder](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::RequestValue) and -[decodable](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::RequestValue) and +[decodable](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-binaryauthorization1/5.0.2-beta-1+20230113/google_binaryauthorization1/client::RequestValue) are moved +* [request values](https://docs.rs/google-binaryauthorization1/5.0.2+20230113/google_binaryauthorization1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/binaryauthorization1/src/api.rs b/gen/binaryauthorization1/src/api.rs index ecef7da1d8..e801eb5d6c 100644 --- a/gen/binaryauthorization1/src/api.rs +++ b/gen/binaryauthorization1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> BinaryAuthorization { BinaryAuthorization { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://binaryauthorization.googleapis.com/".to_string(), _root_url: "https://binaryauthorization.googleapis.com/".to_string(), } @@ -134,7 +134,7 @@ impl<'a, S> BinaryAuthorization { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/binaryauthorization1/src/client.rs b/gen/binaryauthorization1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/binaryauthorization1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/binaryauthorization1/src/lib.rs b/gen/binaryauthorization1/src/lib.rs index 16a89c3b5c..a0a1e8b5e3 100644 --- a/gen/binaryauthorization1/src/lib.rs +++ b/gen/binaryauthorization1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Binary Authorization* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Binary Authorization* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Binary Authorization* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/binary-authorization/). diff --git a/gen/binaryauthorization1_beta1-cli/Cargo.toml b/gen/binaryauthorization1_beta1-cli/Cargo.toml index c8a5e7d9b8..3f9971c76c 100644 --- a/gen/binaryauthorization1_beta1-cli/Cargo.toml +++ b/gen/binaryauthorization1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-binaryauthorization1_beta1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Binary Authorization (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/binaryauthorization1_beta1-cli" @@ -20,13 +20,13 @@ name = "binaryauthorization1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-binaryauthorization1_beta1] path = "../binaryauthorization1_beta1" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/binaryauthorization1_beta1-cli/README.md b/gen/binaryauthorization1_beta1-cli/README.md index 526d699a90..3fd42730aa 100644 --- a/gen/binaryauthorization1_beta1-cli/README.md +++ b/gen/binaryauthorization1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Binary Authorization* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Binary Authorization* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash binaryauthorization1-beta1 [options] diff --git a/gen/binaryauthorization1_beta1-cli/mkdocs.yml b/gen/binaryauthorization1_beta1-cli/mkdocs.yml index 1537c2574a..f3cf99cb31 100644 --- a/gen/binaryauthorization1_beta1-cli/mkdocs.yml +++ b/gen/binaryauthorization1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Binary Authorization v4.0.1+20220225 +site_name: Binary Authorization v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-binaryauthorization1_beta1-cli site_description: A complete library to interact with Binary Authorization (protocol v1beta1) @@ -7,23 +7,25 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/binaryauthorizat docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_attestors-create.md', 'Projects', 'Attestors Create'] -- ['projects_attestors-delete.md', 'Projects', 'Attestors Delete'] -- ['projects_attestors-get.md', 'Projects', 'Attestors Get'] -- ['projects_attestors-get-iam-policy.md', 'Projects', 'Attestors Get Iam Policy'] -- ['projects_attestors-list.md', 'Projects', 'Attestors List'] -- ['projects_attestors-set-iam-policy.md', 'Projects', 'Attestors Set Iam Policy'] -- ['projects_attestors-test-iam-permissions.md', 'Projects', 'Attestors Test Iam Permissions'] -- ['projects_attestors-update.md', 'Projects', 'Attestors Update'] -- ['projects_attestors-validate-attestation-occurrence.md', 'Projects', 'Attestors Validate Attestation Occurrence'] -- ['projects_get-policy.md', 'Projects', 'Get Policy'] -- ['projects_policy-get-iam-policy.md', 'Projects', 'Policy Get Iam Policy'] -- ['projects_policy-set-iam-policy.md', 'Projects', 'Policy Set Iam Policy'] -- ['projects_policy-test-iam-permissions.md', 'Projects', 'Policy Test Iam Permissions'] -- ['projects_update-policy.md', 'Projects', 'Update Policy'] -- ['systempolicy_get-policy.md', 'Systempolicy', 'Get Policy'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Attestors Create': 'projects_attestors-create.md' + - 'Attestors Delete': 'projects_attestors-delete.md' + - 'Attestors Get': 'projects_attestors-get.md' + - 'Attestors Get Iam Policy': 'projects_attestors-get-iam-policy.md' + - 'Attestors List': 'projects_attestors-list.md' + - 'Attestors Set Iam Policy': 'projects_attestors-set-iam-policy.md' + - 'Attestors Test Iam Permissions': 'projects_attestors-test-iam-permissions.md' + - 'Attestors Update': 'projects_attestors-update.md' + - 'Attestors Validate Attestation Occurrence': 'projects_attestors-validate-attestation-occurrence.md' + - 'Get Policy': 'projects_get-policy.md' + - 'Policy Get Iam Policy': 'projects_policy-get-iam-policy.md' + - 'Policy Set Iam Policy': 'projects_policy-set-iam-policy.md' + - 'Policy Test Iam Permissions': 'projects_policy-test-iam-permissions.md' + - 'Update Policy': 'projects_update-policy.md' +- 'Systempolicy': + - 'Get Policy': 'systempolicy_get-policy.md' theme: readthedocs diff --git a/gen/binaryauthorization1_beta1-cli/src/client.rs b/gen/binaryauthorization1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/binaryauthorization1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/binaryauthorization1_beta1-cli/src/main.rs b/gen/binaryauthorization1_beta1-cli/src/main.rs index cee7a63d0f..3147bb1e54 100644 --- a/gen/binaryauthorization1_beta1-cli/src/main.rs +++ b/gen/binaryauthorization1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_binaryauthorization1_beta1::{api, Error, oauth2}; +use google_binaryauthorization1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -256,7 +255,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -315,7 +314,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -771,7 +770,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1353,7 +1352,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1397,7 +1396,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1425,7 +1424,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1531,7 +1530,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1553,7 +1552,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1581,7 +1580,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1662,7 +1661,7 @@ async fn main() { let mut app = App::new("binaryauthorization1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("The management interface for Binary Authorization, a service that provides policy-based deployment validation and control for images deployed to Google Kubernetes Engine (GKE), Anthos Service Mesh, Anthos Clusters, and Cloud Run. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_binaryauthorization1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/binaryauthorization1_beta1/Cargo.toml b/gen/binaryauthorization1_beta1/Cargo.toml index c36317ea02..db267035a2 100644 --- a/gen/binaryauthorization1_beta1/Cargo.toml +++ b/gen/binaryauthorization1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-binaryauthorization1_beta1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Binary Authorization (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/binaryauthorization1_beta1" homepage = "https://cloud.google.com/binary-authorization/" -documentation = "https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113" license = "MIT" keywords = ["binaryauthorization", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/binaryauthorization1_beta1/README.md b/gen/binaryauthorization1_beta1/README.md index 1c0dea8451..8e8bdf1e67 100644 --- a/gen/binaryauthorization1_beta1/README.md +++ b/gen/binaryauthorization1_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-binaryauthorization1_beta1` library allows access to all features of the *Google Binary Authorization* service. -This documentation was generated from *Binary Authorization* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Binary Authorization* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Binary Authorization* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/binary-authorization/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/BinaryAuthorization) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/BinaryAuthorization) ... * projects - * [*attestors create*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorCreateCall), [*attestors delete*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorDeleteCall), [*attestors get*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorGetCall), [*attestors get iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorGetIamPolicyCall), [*attestors list*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorListCall), [*attestors set iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorSetIamPolicyCall), [*attestors test iam permissions*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorTestIamPermissionCall), [*attestors update*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorUpdateCall), [*attestors validate attestation occurrence*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorValidateAttestationOccurrenceCall), [*get policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectGetPolicyCall), [*policy get iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectPolicyGetIamPolicyCall), [*policy set iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectPolicySetIamPolicyCall), [*policy test iam permissions*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectPolicyTestIamPermissionCall) and [*update policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::ProjectUpdatePolicyCall) + * [*attestors create*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorCreateCall), [*attestors delete*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorDeleteCall), [*attestors get*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorGetCall), [*attestors get iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorGetIamPolicyCall), [*attestors list*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorListCall), [*attestors set iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorSetIamPolicyCall), [*attestors test iam permissions*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorTestIamPermissionCall), [*attestors update*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorUpdateCall), [*attestors validate attestation occurrence*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectAttestorValidateAttestationOccurrenceCall), [*get policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectGetPolicyCall), [*policy get iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectPolicyGetIamPolicyCall), [*policy set iam policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectPolicySetIamPolicyCall), [*policy test iam permissions*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectPolicyTestIamPermissionCall) and [*update policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::ProjectUpdatePolicyCall) * systempolicy - * [*get policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/api::SystempolicyGetPolicyCall) + * [*get policy*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/api::SystempolicyGetPolicyCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/BinaryAuthorization)** +* **[Hub](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/BinaryAuthorization)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-binaryauthorization1_beta1/5.0.2-beta-1+20230113/google_binaryauthorization1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-binaryauthorization1_beta1/5.0.2+20230113/google_binaryauthorization1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/binaryauthorization1_beta1/src/api.rs b/gen/binaryauthorization1_beta1/src/api.rs index 36b72d2584..170133b42a 100644 --- a/gen/binaryauthorization1_beta1/src/api.rs +++ b/gen/binaryauthorization1_beta1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> BinaryAuthorization { BinaryAuthorization { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://binaryauthorization.googleapis.com/".to_string(), _root_url: "https://binaryauthorization.googleapis.com/".to_string(), } @@ -134,7 +134,7 @@ impl<'a, S> BinaryAuthorization { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/binaryauthorization1_beta1/src/client.rs b/gen/binaryauthorization1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/binaryauthorization1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/binaryauthorization1_beta1/src/lib.rs b/gen/binaryauthorization1_beta1/src/lib.rs index 79eba12457..54258419db 100644 --- a/gen/binaryauthorization1_beta1/src/lib.rs +++ b/gen/binaryauthorization1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Binary Authorization* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Binary Authorization* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *binaryauthorization:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Binary Authorization* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/binary-authorization/). diff --git a/gen/blogger3-cli/Cargo.toml b/gen/blogger3-cli/Cargo.toml index 83506846e6..fa0b973454 100644 --- a/gen/blogger3-cli/Cargo.toml +++ b/gen/blogger3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-blogger3-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Blogger (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/blogger3-cli" @@ -20,13 +20,13 @@ name = "blogger3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-blogger3] path = "../blogger3" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/blogger3-cli/README.md b/gen/blogger3-cli/README.md index 92708de161..c787d9f5a4 100644 --- a/gen/blogger3-cli/README.md +++ b/gen/blogger3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Blogger* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Blogger* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash blogger3 [options] diff --git a/gen/blogger3-cli/mkdocs.yml b/gen/blogger3-cli/mkdocs.yml index 9464b8a82c..99f9346b6c 100644 --- a/gen/blogger3-cli/mkdocs.yml +++ b/gen/blogger3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Blogger v4.0.1+20220305 +site_name: Blogger v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-blogger3-cli site_description: A complete library to interact with Blogger (protocol v3) @@ -7,41 +7,49 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/blogger3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['blog-user-infos_get.md', 'Blog User Infos', 'Get'] -- ['blogs_get.md', 'Blogs', 'Get'] -- ['blogs_get-by-url.md', 'Blogs', 'Get By Url'] -- ['blogs_list-by-user.md', 'Blogs', 'List By User'] -- ['comments_approve.md', 'Comments', 'Approve'] -- ['comments_delete.md', 'Comments', 'Delete'] -- ['comments_get.md', 'Comments', 'Get'] -- ['comments_list.md', 'Comments', 'List'] -- ['comments_list-by-blog.md', 'Comments', 'List By Blog'] -- ['comments_mark-as-spam.md', 'Comments', 'Mark As Spam'] -- ['comments_remove-content.md', 'Comments', 'Remove Content'] -- ['page-views_get.md', 'Page Views', 'Get'] -- ['pages_delete.md', 'Pages', 'Delete'] -- ['pages_get.md', 'Pages', 'Get'] -- ['pages_insert.md', 'Pages', 'Insert'] -- ['pages_list.md', 'Pages', 'List'] -- ['pages_patch.md', 'Pages', 'Patch'] -- ['pages_publish.md', 'Pages', 'Publish'] -- ['pages_revert.md', 'Pages', 'Revert'] -- ['pages_update.md', 'Pages', 'Update'] -- ['post-user-infos_get.md', 'Post User Infos', 'Get'] -- ['post-user-infos_list.md', 'Post User Infos', 'List'] -- ['posts_delete.md', 'Posts', 'Delete'] -- ['posts_get.md', 'Posts', 'Get'] -- ['posts_get-by-path.md', 'Posts', 'Get By Path'] -- ['posts_insert.md', 'Posts', 'Insert'] -- ['posts_list.md', 'Posts', 'List'] -- ['posts_patch.md', 'Posts', 'Patch'] -- ['posts_publish.md', 'Posts', 'Publish'] -- ['posts_revert.md', 'Posts', 'Revert'] -- ['posts_search.md', 'Posts', 'Search'] -- ['posts_update.md', 'Posts', 'Update'] -- ['users_get.md', 'Users', 'Get'] +nav: +- Home: 'index.md' +- 'Blog User Infos': + - 'Get': 'blog-user-infos_get.md' +- 'Blogs': + - 'Get': 'blogs_get.md' + - 'Get By Url': 'blogs_get-by-url.md' + - 'List By User': 'blogs_list-by-user.md' +- 'Comments': + - 'Approve': 'comments_approve.md' + - 'Delete': 'comments_delete.md' + - 'Get': 'comments_get.md' + - 'List': 'comments_list.md' + - 'List By Blog': 'comments_list-by-blog.md' + - 'Mark As Spam': 'comments_mark-as-spam.md' + - 'Remove Content': 'comments_remove-content.md' +- 'Page Views': + - 'Get': 'page-views_get.md' +- 'Pages': + - 'Delete': 'pages_delete.md' + - 'Get': 'pages_get.md' + - 'Insert': 'pages_insert.md' + - 'List': 'pages_list.md' + - 'Patch': 'pages_patch.md' + - 'Publish': 'pages_publish.md' + - 'Revert': 'pages_revert.md' + - 'Update': 'pages_update.md' +- 'Post User Infos': + - 'Get': 'post-user-infos_get.md' + - 'List': 'post-user-infos_list.md' +- 'Posts': + - 'Delete': 'posts_delete.md' + - 'Get': 'posts_get.md' + - 'Get By Path': 'posts_get-by-path.md' + - 'Insert': 'posts_insert.md' + - 'List': 'posts_list.md' + - 'Patch': 'posts_patch.md' + - 'Publish': 'posts_publish.md' + - 'Revert': 'posts_revert.md' + - 'Search': 'posts_search.md' + - 'Update': 'posts_update.md' +- 'Users': + - 'Get': 'users_get.md' theme: readthedocs diff --git a/gen/blogger3-cli/src/client.rs b/gen/blogger3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/blogger3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/blogger3-cli/src/main.rs b/gen/blogger3-cli/src/main.rs index b73a392847..a109bb0e7c 100644 --- a/gen/blogger3-cli/src/main.rs +++ b/gen/blogger3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_blogger3::{api, Error, oauth2}; +use google_blogger3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "max-posts" => { - call = call.max_posts(arg_from_str(value.unwrap_or("-0"), err, "max-posts", "integer")); + call = call.max_posts( value.map(|v| arg_from_str(v, err, "max-posts", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -117,7 +116,7 @@ where call = call.view(value.unwrap_or("")); }, "max-posts" => { - call = call.max_posts(arg_from_str(value.unwrap_or("-0"), err, "max-posts", "integer")); + call = call.max_posts( value.map(|v| arg_from_str(v, err, "max-posts", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -238,7 +237,7 @@ where call = call.add_role(value.unwrap_or("")); }, "fetch-user-info" => { - call = call.fetch_user_info(arg_from_str(value.unwrap_or("false"), err, "fetch-user-info", "boolean")); + call = call.fetch_user_info( value.map(|v| arg_from_str(v, err, "fetch-user-info", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -458,10 +457,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "fetch-bodies" => { - call = call.fetch_bodies(arg_from_str(value.unwrap_or("false"), err, "fetch-bodies", "boolean")); + call = call.fetch_bodies( value.map(|v| arg_from_str(v, err, "fetch-bodies", "boolean")).unwrap_or(false)); }, "end-date" => { call = call.end_date(value.unwrap_or("")); @@ -529,10 +528,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "fetch-bodies" => { - call = call.fetch_bodies(arg_from_str(value.unwrap_or("false"), err, "fetch-bodies", "boolean")); + call = call.fetch_bodies( value.map(|v| arg_from_str(v, err, "fetch-bodies", "boolean")).unwrap_or(false)); }, "end-date" => { call = call.end_date(value.unwrap_or("")); @@ -750,6 +749,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "use-trash" => { + call = call.use_trash( value.map(|v| arg_from_str(v, err, "use-trash", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -763,6 +765,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["use-trash"].iter().map(|v|*v)); v } )); } } @@ -880,10 +883,11 @@ where "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title" => Some(("title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trashed" => Some(("trashed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "updated" => Some(("updated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url" => Some(("url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "display-name", "etag", "id", "image", "kind", "published", "self-link", "status", "title", "updated", "url"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "display-name", "etag", "id", "image", "kind", "published", "self-link", "status", "title", "trashed", "updated", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -898,7 +902,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "is-draft" => { - call = call.is_draft(arg_from_str(value.unwrap_or("false"), err, "is-draft", "boolean")); + call = call.is_draft( value.map(|v| arg_from_str(v, err, "is-draft", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -963,10 +967,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "fetch-bodies" => { - call = call.fetch_bodies(arg_from_str(value.unwrap_or("false"), err, "fetch-bodies", "boolean")); + call = call.fetch_bodies( value.map(|v| arg_from_str(v, err, "fetch-bodies", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1051,10 +1055,11 @@ where "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title" => Some(("title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trashed" => Some(("trashed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "updated" => Some(("updated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url" => Some(("url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "display-name", "etag", "id", "image", "kind", "published", "self-link", "status", "title", "updated", "url"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "display-name", "etag", "id", "image", "kind", "published", "self-link", "status", "title", "trashed", "updated", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1069,10 +1074,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "revert" => { - call = call.revert(arg_from_str(value.unwrap_or("false"), err, "revert", "boolean")); + call = call.revert( value.map(|v| arg_from_str(v, err, "revert", "boolean")).unwrap_or(false)); }, "publish" => { - call = call.publish(arg_from_str(value.unwrap_or("false"), err, "publish", "boolean")); + call = call.publish( value.map(|v| arg_from_str(v, err, "publish", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1261,10 +1266,11 @@ where "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title" => Some(("title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trashed" => Some(("trashed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "updated" => Some(("updated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url" => Some(("url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "display-name", "etag", "id", "image", "kind", "published", "self-link", "status", "title", "updated", "url"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "display-name", "etag", "id", "image", "kind", "published", "self-link", "status", "title", "trashed", "updated", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1279,10 +1285,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "revert" => { - call = call.revert(arg_from_str(value.unwrap_or("false"), err, "revert", "boolean")); + call = call.revert( value.map(|v| arg_from_str(v, err, "revert", "boolean")).unwrap_or(false)); }, "publish" => { - call = call.publish(arg_from_str(value.unwrap_or("false"), err, "publish", "boolean")); + call = call.publish( value.map(|v| arg_from_str(v, err, "publish", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1338,7 +1344,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "max-comments" => { - call = call.max_comments(arg_from_str(value.unwrap_or("-0"), err, "max-comments", "integer")); + call = call.max_comments( value.map(|v| arg_from_str(v, err, "max-comments", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1409,13 +1415,13 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "labels" => { call = call.labels(value.unwrap_or("")); }, "fetch-bodies" => { - call = call.fetch_bodies(arg_from_str(value.unwrap_or("false"), err, "fetch-bodies", "boolean")); + call = call.fetch_bodies( value.map(|v| arg_from_str(v, err, "fetch-bodies", "boolean")).unwrap_or(false)); }, "end-date" => { call = call.end_date(value.unwrap_or("")); @@ -1473,6 +1479,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "use-trash" => { + call = call.use_trash( value.map(|v| arg_from_str(v, err, "use-trash", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -1486,6 +1495,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["use-trash"].iter().map(|v|*v)); v } )); } } @@ -1521,13 +1531,13 @@ where call = call.view(value.unwrap_or("")); }, "max-comments" => { - call = call.max_comments(arg_from_str(value.unwrap_or("-0"), err, "max-comments", "integer")); + call = call.max_comments( value.map(|v| arg_from_str(v, err, "max-comments", "uint32")).unwrap_or(0)); }, "fetch-images" => { - call = call.fetch_images(arg_from_str(value.unwrap_or("false"), err, "fetch-images", "boolean")); + call = call.fetch_images( value.map(|v| arg_from_str(v, err, "fetch-images", "boolean")).unwrap_or(false)); }, "fetch-body" => { - call = call.fetch_body(arg_from_str(value.unwrap_or("false"), err, "fetch-body", "boolean")); + call = call.fetch_body( value.map(|v| arg_from_str(v, err, "fetch-body", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1586,7 +1596,7 @@ where call = call.view(value.unwrap_or("")); }, "max-comments" => { - call = call.max_comments(arg_from_str(value.unwrap_or("-0"), err, "max-comments", "integer")); + call = call.max_comments( value.map(|v| arg_from_str(v, err, "max-comments", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1681,10 +1691,11 @@ where "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title" => Some(("title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title-link" => Some(("titleLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trashed" => Some(("trashed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "updated" => Some(("updated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url" => Some(("url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "custom-meta-data", "display-name", "etag", "id", "image", "kind", "labels", "lat", "lng", "location", "name", "published", "reader-comments", "replies", "self-link", "span", "status", "title", "title-link", "total-items", "updated", "url"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "custom-meta-data", "display-name", "etag", "id", "image", "kind", "labels", "lat", "lng", "location", "name", "published", "reader-comments", "replies", "self-link", "span", "status", "title", "title-link", "total-items", "trashed", "updated", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1699,13 +1710,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "is-draft" => { - call = call.is_draft(arg_from_str(value.unwrap_or("false"), err, "is-draft", "boolean")); + call = call.is_draft( value.map(|v| arg_from_str(v, err, "is-draft", "boolean")).unwrap_or(false)); }, "fetch-images" => { - call = call.fetch_images(arg_from_str(value.unwrap_or("false"), err, "fetch-images", "boolean")); + call = call.fetch_images( value.map(|v| arg_from_str(v, err, "fetch-images", "boolean")).unwrap_or(false)); }, "fetch-body" => { - call = call.fetch_body(arg_from_str(value.unwrap_or("false"), err, "fetch-body", "boolean")); + call = call.fetch_body( value.map(|v| arg_from_str(v, err, "fetch-body", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1769,6 +1780,9 @@ where "start-date" => { call = call.start_date(value.unwrap_or("")); }, + "sort-option" => { + call = call.sort_option(value.unwrap_or("")); + }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, @@ -1776,16 +1790,16 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "labels" => { call = call.labels(value.unwrap_or("")); }, "fetch-images" => { - call = call.fetch_images(arg_from_str(value.unwrap_or("false"), err, "fetch-images", "boolean")); + call = call.fetch_images( value.map(|v| arg_from_str(v, err, "fetch-images", "boolean")).unwrap_or(false)); }, "fetch-bodies" => { - call = call.fetch_bodies(arg_from_str(value.unwrap_or("false"), err, "fetch-bodies", "boolean")); + call = call.fetch_bodies( value.map(|v| arg_from_str(v, err, "fetch-bodies", "boolean")).unwrap_or(false)); }, "end-date" => { call = call.end_date(value.unwrap_or("")); @@ -1803,7 +1817,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["end-date", "fetch-bodies", "fetch-images", "labels", "max-results", "order-by", "page-token", "start-date", "status", "view"].iter().map(|v|*v)); + v.extend(["end-date", "fetch-bodies", "fetch-images", "labels", "max-results", "order-by", "page-token", "sort-option", "start-date", "status", "view"].iter().map(|v|*v)); v } )); } } @@ -1883,10 +1897,11 @@ where "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title" => Some(("title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title-link" => Some(("titleLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trashed" => Some(("trashed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "updated" => Some(("updated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url" => Some(("url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "custom-meta-data", "display-name", "etag", "id", "image", "kind", "labels", "lat", "lng", "location", "name", "published", "reader-comments", "replies", "self-link", "span", "status", "title", "title-link", "total-items", "updated", "url"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "custom-meta-data", "display-name", "etag", "id", "image", "kind", "labels", "lat", "lng", "location", "name", "published", "reader-comments", "replies", "self-link", "span", "status", "title", "title-link", "total-items", "trashed", "updated", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1901,19 +1916,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "revert" => { - call = call.revert(arg_from_str(value.unwrap_or("false"), err, "revert", "boolean")); + call = call.revert( value.map(|v| arg_from_str(v, err, "revert", "boolean")).unwrap_or(false)); }, "publish" => { - call = call.publish(arg_from_str(value.unwrap_or("false"), err, "publish", "boolean")); + call = call.publish( value.map(|v| arg_from_str(v, err, "publish", "boolean")).unwrap_or(false)); }, "max-comments" => { - call = call.max_comments(arg_from_str(value.unwrap_or("-0"), err, "max-comments", "integer")); + call = call.max_comments( value.map(|v| arg_from_str(v, err, "max-comments", "uint32")).unwrap_or(0)); }, "fetch-images" => { - call = call.fetch_images(arg_from_str(value.unwrap_or("false"), err, "fetch-images", "boolean")); + call = call.fetch_images( value.map(|v| arg_from_str(v, err, "fetch-images", "boolean")).unwrap_or(false)); }, "fetch-body" => { - call = call.fetch_body(arg_from_str(value.unwrap_or("false"), err, "fetch-body", "boolean")); + call = call.fetch_body( value.map(|v| arg_from_str(v, err, "fetch-body", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2080,7 +2095,7 @@ where call = call.order_by(value.unwrap_or("")); }, "fetch-bodies" => { - call = call.fetch_bodies(arg_from_str(value.unwrap_or("false"), err, "fetch-bodies", "boolean")); + call = call.fetch_bodies( value.map(|v| arg_from_str(v, err, "fetch-bodies", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2175,10 +2190,11 @@ where "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title" => Some(("title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "title-link" => Some(("titleLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trashed" => Some(("trashed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "updated" => Some(("updated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url" => Some(("url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "custom-meta-data", "display-name", "etag", "id", "image", "kind", "labels", "lat", "lng", "location", "name", "published", "reader-comments", "replies", "self-link", "span", "status", "title", "title-link", "total-items", "updated", "url"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["author", "blog", "content", "custom-meta-data", "display-name", "etag", "id", "image", "kind", "labels", "lat", "lng", "location", "name", "published", "reader-comments", "replies", "self-link", "span", "status", "title", "title-link", "total-items", "trashed", "updated", "url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2193,19 +2209,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "revert" => { - call = call.revert(arg_from_str(value.unwrap_or("false"), err, "revert", "boolean")); + call = call.revert( value.map(|v| arg_from_str(v, err, "revert", "boolean")).unwrap_or(false)); }, "publish" => { - call = call.publish(arg_from_str(value.unwrap_or("false"), err, "publish", "boolean")); + call = call.publish( value.map(|v| arg_from_str(v, err, "publish", "boolean")).unwrap_or(false)); }, "max-comments" => { - call = call.max_comments(arg_from_str(value.unwrap_or("-0"), err, "max-comments", "integer")); + call = call.max_comments( value.map(|v| arg_from_str(v, err, "max-comments", "uint32")).unwrap_or(0)); }, "fetch-images" => { - call = call.fetch_images(arg_from_str(value.unwrap_or("false"), err, "fetch-images", "boolean")); + call = call.fetch_images( value.map(|v| arg_from_str(v, err, "fetch-images", "boolean")).unwrap_or(false)); }, "fetch-body" => { - call = call.fetch_body(arg_from_str(value.unwrap_or("false"), err, "fetch-body", "boolean")); + call = call.fetch_body( value.map(|v| arg_from_str(v, err, "fetch-body", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3493,7 +3509,7 @@ async fn main() { let mut app = App::new("blogger3") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230124") .about("The Blogger API provides access to posts, comments and pages of a Blogger blog.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_blogger3_cli") .arg(Arg::with_name("url") diff --git a/gen/blogger3/Cargo.toml b/gen/blogger3/Cargo.toml index fc1d88827e..db2f4b959f 100644 --- a/gen/blogger3/Cargo.toml +++ b/gen/blogger3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-blogger3" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Blogger (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/blogger3" homepage = "https://developers.google.com/blogger/docs/3.0/getting_started" -documentation = "https://docs.rs/google-blogger3/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-blogger3/5.0.2+20230124" license = "MIT" keywords = ["blogger", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/blogger3/README.md b/gen/blogger3/README.md index a91dfe5288..056c612251 100644 --- a/gen/blogger3/README.md +++ b/gen/blogger3/README.md @@ -5,30 +5,30 @@ DO NOT EDIT ! --> The `google-blogger3` library allows access to all features of the *Google Blogger* service. -This documentation was generated from *Blogger* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *blogger:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Blogger* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *blogger:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Blogger* *v3* API can be found at the [official documentation site](https://developers.google.com/blogger/docs/3.0/getting_started). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/Blogger) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/Blogger) ... -* [blog user infos](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::BlogUserInfo) - * [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::BlogUserInfoGetCall) -* [blogs](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::Blog) - * [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::BlogGetCall), [*get by url*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::BlogGetByUrlCall) and [*list by user*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::BlogListByUserCall) -* [comments](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::Comment) - * [*approve*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::CommentApproveCall), [*delete*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::CommentDeleteCall), [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::CommentGetCall), [*list*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::CommentListCall), [*list by blog*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::CommentListByBlogCall), [*mark as spam*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::CommentMarkAsSpamCall) and [*remove content*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::CommentRemoveContentCall) +* [blog user infos](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::BlogUserInfo) + * [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::BlogUserInfoGetCall) +* [blogs](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::Blog) + * [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::BlogGetCall), [*get by url*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::BlogGetByUrlCall) and [*list by user*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::BlogListByUserCall) +* [comments](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::Comment) + * [*approve*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::CommentApproveCall), [*delete*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::CommentDeleteCall), [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::CommentGetCall), [*list*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::CommentListCall), [*list by blog*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::CommentListByBlogCall), [*mark as spam*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::CommentMarkAsSpamCall) and [*remove content*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::CommentRemoveContentCall) * page views - * [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PageViewGetCall) -* [pages](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::Page) - * [*delete*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PageDeleteCall), [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PageGetCall), [*insert*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PageInsertCall), [*list*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PageListCall), [*patch*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PagePatchCall), [*publish*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PagePublishCall), [*revert*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PageRevertCall) and [*update*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PageUpdateCall) -* [post user infos](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostUserInfo) - * [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostUserInfoGetCall) and [*list*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostUserInfoListCall) -* [posts](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::Post) - * [*delete*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostDeleteCall), [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostGetCall), [*get by path*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostGetByPathCall), [*insert*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostInsertCall), [*list*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostListCall), [*patch*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostPatchCall), [*publish*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostPublishCall), [*revert*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostRevertCall), [*search*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostSearchCall) and [*update*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::PostUpdateCall) -* [users](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::User) - * [*get*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/api::UserGetCall) + * [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PageViewGetCall) +* [pages](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::Page) + * [*delete*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PageDeleteCall), [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PageGetCall), [*insert*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PageInsertCall), [*list*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PageListCall), [*patch*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PagePatchCall), [*publish*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PagePublishCall), [*revert*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PageRevertCall) and [*update*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PageUpdateCall) +* [post user infos](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostUserInfo) + * [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostUserInfoGetCall) and [*list*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostUserInfoListCall) +* [posts](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::Post) + * [*delete*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostDeleteCall), [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostGetCall), [*get by path*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostGetByPathCall), [*insert*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostInsertCall), [*list*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostListCall), [*patch*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostPatchCall), [*publish*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostPublishCall), [*revert*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostRevertCall), [*search*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostSearchCall) and [*update*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::PostUpdateCall) +* [users](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::User) + * [*get*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/api::UserGetCall) @@ -37,17 +37,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/Blogger)** +* **[Hub](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/Blogger)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::CallBuilder) -* **[Resources](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::CallBuilder) +* **[Resources](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Part)** + * **[Parts](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -152,17 +152,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -172,29 +172,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Delegate) to the -[Method Builder](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Delegate) to the +[Method Builder](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::RequestValue) and -[decodable](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::RequestValue) and +[decodable](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-blogger3/5.0.2-beta-1+20230124/google_blogger3/client::RequestValue) are moved +* [request values](https://docs.rs/google-blogger3/5.0.2+20230124/google_blogger3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/blogger3/src/api.rs b/gen/blogger3/src/api.rs index 597116ce82..702ec9e10e 100644 --- a/gen/blogger3/src/api.rs +++ b/gen/blogger3/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> Blogger { Blogger { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://blogger.googleapis.com/".to_string(), _root_url: "https://blogger.googleapis.com/".to_string(), } @@ -166,7 +166,7 @@ impl<'a, S> Blogger { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/blogger3/src/client.rs b/gen/blogger3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/blogger3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/blogger3/src/lib.rs b/gen/blogger3/src/lib.rs index cdeea013a8..5e21ff1c38 100644 --- a/gen/blogger3/src/lib.rs +++ b/gen/blogger3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Blogger* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *blogger:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Blogger* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *blogger:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Blogger* *v3* API can be found at the //! [official documentation site](https://developers.google.com/blogger/docs/3.0/getting_started). diff --git a/gen/books1-cli/Cargo.toml b/gen/books1-cli/Cargo.toml index 94de96a481..98138093ec 100644 --- a/gen/books1-cli/Cargo.toml +++ b/gen/books1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-books1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with books (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/books1-cli" @@ -20,13 +20,13 @@ name = "books1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-books1] path = "../books1" -version = "4.0.1+20220301" +version = "5.0.2+20230117" + diff --git a/gen/books1-cli/README.md b/gen/books1-cli/README.md index feec99a49d..b51fd4b5d9 100644 --- a/gen/books1-cli/README.md +++ b/gen/books1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *books* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *books* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash books1 [options] diff --git a/gen/books1-cli/mkdocs.yml b/gen/books1-cli/mkdocs.yml index 5f3ce0b7fd..2012a4f547 100644 --- a/gen/books1-cli/mkdocs.yml +++ b/gen/books1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: books v4.0.1+20220301 +site_name: books v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-books1-cli site_description: A complete library to interact with books (protocol v1) @@ -7,59 +7,72 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/books1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['bookshelves_get.md', 'Bookshelves', 'Get'] -- ['bookshelves_list.md', 'Bookshelves', 'List'] -- ['bookshelves_volumes-list.md', 'Bookshelves', 'Volumes List'] -- ['cloudloading_add-book.md', 'Cloudloading', 'Add Book'] -- ['cloudloading_delete-book.md', 'Cloudloading', 'Delete Book'] -- ['cloudloading_update-book.md', 'Cloudloading', 'Update Book'] -- ['dictionary_list-offline-metadata.md', 'Dictionary', 'List Offline Metadata'] -- ['familysharing_get-family-info.md', 'Familysharing', 'Get Family Info'] -- ['familysharing_share.md', 'Familysharing', 'Share'] -- ['familysharing_unshare.md', 'Familysharing', 'Unshare'] -- ['layers_annotation-data-get.md', 'Layers', 'Annotation Data Get'] -- ['layers_annotation-data-list.md', 'Layers', 'Annotation Data List'] -- ['layers_get.md', 'Layers', 'Get'] -- ['layers_list.md', 'Layers', 'List'] -- ['layers_volume-annotations-get.md', 'Layers', 'Volume Annotations Get'] -- ['layers_volume-annotations-list.md', 'Layers', 'Volume Annotations List'] -- ['myconfig_get-user-settings.md', 'Myconfig', 'Get User Settings'] -- ['myconfig_release-download-access.md', 'Myconfig', 'Release Download Access'] -- ['myconfig_request-access.md', 'Myconfig', 'Request Access'] -- ['myconfig_sync-volume-licenses.md', 'Myconfig', 'Sync Volume Licenses'] -- ['myconfig_update-user-settings.md', 'Myconfig', 'Update User Settings'] -- ['mylibrary_annotations-delete.md', 'Mylibrary', 'Annotations Delete'] -- ['mylibrary_annotations-insert.md', 'Mylibrary', 'Annotations Insert'] -- ['mylibrary_annotations-list.md', 'Mylibrary', 'Annotations List'] -- ['mylibrary_annotations-summary.md', 'Mylibrary', 'Annotations Summary'] -- ['mylibrary_annotations-update.md', 'Mylibrary', 'Annotations Update'] -- ['mylibrary_bookshelves-add-volume.md', 'Mylibrary', 'Bookshelves Add Volume'] -- ['mylibrary_bookshelves-clear-volumes.md', 'Mylibrary', 'Bookshelves Clear Volumes'] -- ['mylibrary_bookshelves-get.md', 'Mylibrary', 'Bookshelves Get'] -- ['mylibrary_bookshelves-list.md', 'Mylibrary', 'Bookshelves List'] -- ['mylibrary_bookshelves-move-volume.md', 'Mylibrary', 'Bookshelves Move Volume'] -- ['mylibrary_bookshelves-remove-volume.md', 'Mylibrary', 'Bookshelves Remove Volume'] -- ['mylibrary_bookshelves-volumes-list.md', 'Mylibrary', 'Bookshelves Volumes List'] -- ['mylibrary_readingpositions-get.md', 'Mylibrary', 'Readingpositions Get'] -- ['mylibrary_readingpositions-set-position.md', 'Mylibrary', 'Readingpositions Set Position'] -- ['notification_get.md', 'Notification', 'Get'] -- ['onboarding_list-categories.md', 'Onboarding', 'List Categories'] -- ['onboarding_list-category-volumes.md', 'Onboarding', 'List Category Volumes'] -- ['personalizedstream_get.md', 'Personalizedstream', 'Get'] -- ['promooffer_accept.md', 'Promooffer', 'Accept'] -- ['promooffer_dismiss.md', 'Promooffer', 'Dismiss'] -- ['promooffer_get.md', 'Promooffer', 'Get'] -- ['series_get.md', 'Series', 'Get'] -- ['series_membership-get.md', 'Series', 'Membership Get'] -- ['volumes_associated-list.md', 'Volumes', 'Associated List'] -- ['volumes_get.md', 'Volumes', 'Get'] -- ['volumes_list.md', 'Volumes', 'List'] -- ['volumes_mybooks-list.md', 'Volumes', 'Mybooks List'] -- ['volumes_recommended-list.md', 'Volumes', 'Recommended List'] -- ['volumes_recommended-rate.md', 'Volumes', 'Recommended Rate'] -- ['volumes_useruploaded-list.md', 'Volumes', 'Useruploaded List'] +nav: +- Home: 'index.md' +- 'Bookshelves': + - 'Get': 'bookshelves_get.md' + - 'List': 'bookshelves_list.md' + - 'Volumes List': 'bookshelves_volumes-list.md' +- 'Cloudloading': + - 'Add Book': 'cloudloading_add-book.md' + - 'Delete Book': 'cloudloading_delete-book.md' + - 'Update Book': 'cloudloading_update-book.md' +- 'Dictionary': + - 'List Offline Metadata': 'dictionary_list-offline-metadata.md' +- 'Familysharing': + - 'Get Family Info': 'familysharing_get-family-info.md' + - 'Share': 'familysharing_share.md' + - 'Unshare': 'familysharing_unshare.md' +- 'Layers': + - 'Annotation Data Get': 'layers_annotation-data-get.md' + - 'Annotation Data List': 'layers_annotation-data-list.md' + - 'Get': 'layers_get.md' + - 'List': 'layers_list.md' + - 'Volume Annotations Get': 'layers_volume-annotations-get.md' + - 'Volume Annotations List': 'layers_volume-annotations-list.md' +- 'Myconfig': + - 'Get User Settings': 'myconfig_get-user-settings.md' + - 'Release Download Access': 'myconfig_release-download-access.md' + - 'Request Access': 'myconfig_request-access.md' + - 'Sync Volume Licenses': 'myconfig_sync-volume-licenses.md' + - 'Update User Settings': 'myconfig_update-user-settings.md' +- 'Mylibrary': + - 'Annotations Delete': 'mylibrary_annotations-delete.md' + - 'Annotations Insert': 'mylibrary_annotations-insert.md' + - 'Annotations List': 'mylibrary_annotations-list.md' + - 'Annotations Summary': 'mylibrary_annotations-summary.md' + - 'Annotations Update': 'mylibrary_annotations-update.md' + - 'Bookshelves Add Volume': 'mylibrary_bookshelves-add-volume.md' + - 'Bookshelves Clear Volumes': 'mylibrary_bookshelves-clear-volumes.md' + - 'Bookshelves Get': 'mylibrary_bookshelves-get.md' + - 'Bookshelves List': 'mylibrary_bookshelves-list.md' + - 'Bookshelves Move Volume': 'mylibrary_bookshelves-move-volume.md' + - 'Bookshelves Remove Volume': 'mylibrary_bookshelves-remove-volume.md' + - 'Bookshelves Volumes List': 'mylibrary_bookshelves-volumes-list.md' + - 'Readingpositions Get': 'mylibrary_readingpositions-get.md' + - 'Readingpositions Set Position': 'mylibrary_readingpositions-set-position.md' +- 'Notification': + - 'Get': 'notification_get.md' +- 'Onboarding': + - 'List Categories': 'onboarding_list-categories.md' + - 'List Category Volumes': 'onboarding_list-category-volumes.md' +- 'Personalizedstream': + - 'Get': 'personalizedstream_get.md' +- 'Promooffer': + - 'Accept': 'promooffer_accept.md' + - 'Dismiss': 'promooffer_dismiss.md' + - 'Get': 'promooffer_get.md' +- 'Series': + - 'Get': 'series_get.md' + - 'Membership Get': 'series_membership-get.md' +- 'Volumes': + - 'Associated List': 'volumes_associated-list.md' + - 'Get': 'volumes_get.md' + - 'List': 'volumes_list.md' + - 'Mybooks List': 'volumes_mybooks-list.md' + - 'Recommended List': 'volumes_recommended-list.md' + - 'Recommended Rate': 'volumes_recommended-rate.md' + - 'Useruploaded List': 'volumes_useruploaded-list.md' theme: readthedocs diff --git a/gen/books1-cli/src/client.rs b/gen/books1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/books1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/books1-cli/src/main.rs b/gen/books1-cli/src/main.rs index da32504010..d0d1d544fd 100644 --- a/gen/books1-cli/src/main.rs +++ b/gen/books1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_books1::{api, Error, oauth2}; +use google_books1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -170,16 +169,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "source" => { call = call.source(value.unwrap_or("")); }, "show-preorders" => { - call = call.show_preorders(arg_from_str(value.unwrap_or("false"), err, "show-preorders", "boolean")); + call = call.show_preorders( value.map(|v| arg_from_str(v, err, "show-preorders", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -672,22 +671,22 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "w" => { - call = call.w(arg_from_str(value.unwrap_or("-0"), err, "w", "integer")); + call = call.w( value.map(|v| arg_from_str(v, err, "w", "int32")).unwrap_or(-0)); }, "source" => { call = call.source(value.unwrap_or("")); }, "scale" => { - call = call.scale(arg_from_str(value.unwrap_or("-0"), err, "scale", "integer")); + call = call.scale( value.map(|v| arg_from_str(v, err, "scale", "int32")).unwrap_or(-0)); }, "locale" => { call = call.locale(value.unwrap_or("")); }, "h" => { - call = call.h(arg_from_str(value.unwrap_or("-0"), err, "h", "integer")); + call = call.h( value.map(|v| arg_from_str(v, err, "h", "int32")).unwrap_or(-0)); }, "allow-web-definitions" => { - call = call.allow_web_definitions(arg_from_str(value.unwrap_or("false"), err, "allow-web-definitions", "boolean")); + call = call.allow_web_definitions( value.map(|v| arg_from_str(v, err, "allow-web-definitions", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -743,7 +742,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "w" => { - call = call.w(arg_from_str(value.unwrap_or("-0"), err, "w", "integer")); + call = call.w( value.map(|v| arg_from_str(v, err, "w", "int32")).unwrap_or(-0)); }, "updated-min" => { call = call.updated_min(value.unwrap_or("")); @@ -755,19 +754,19 @@ where call = call.source(value.unwrap_or("")); }, "scale" => { - call = call.scale(arg_from_str(value.unwrap_or("-0"), err, "scale", "integer")); + call = call.scale( value.map(|v| arg_from_str(v, err, "scale", "int32")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "locale" => { call = call.locale(value.unwrap_or("")); }, "h" => { - call = call.h(arg_from_str(value.unwrap_or("-0"), err, "h", "integer")); + call = call.h( value.map(|v| arg_from_str(v, err, "h", "int32")).unwrap_or(-0)); }, "annotation-data-id" => { call = call.add_annotation_data_id(value.unwrap_or("")); @@ -891,7 +890,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "content-version" => { call = call.content_version(value.unwrap_or("")); @@ -1027,13 +1026,13 @@ where call = call.source(value.unwrap_or("")); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -1275,13 +1274,13 @@ where call = call.add_volume_ids(value.unwrap_or("")); }, "show-preorders" => { - call = call.show_preorders(arg_from_str(value.unwrap_or("false"), err, "show-preorders", "boolean")); + call = call.show_preorders( value.map(|v| arg_from_str(v, err, "show-preorders", "boolean")).unwrap_or(false)); }, "locale" => { call = call.locale(value.unwrap_or("")); }, "include-non-comics-series" => { - call = call.include_non_comics_series(arg_from_str(value.unwrap_or("false"), err, "include-non-comics-series", "boolean")); + call = call.include_non_comics_series( value.map(|v| arg_from_str(v, err, "include-non-comics-series", "boolean")).unwrap_or(false)); }, "features" => { call = call.add_features(value.unwrap_or("")); @@ -1574,7 +1573,7 @@ where call = call.source(value.unwrap_or("")); }, "show-only-summary-in-response" => { - call = call.show_only_summary_in_response(arg_from_str(value.unwrap_or("false"), err, "show-only-summary-in-response", "boolean")); + call = call.show_only_summary_in_response( value.map(|v| arg_from_str(v, err, "show-only-summary-in-response", "boolean")).unwrap_or(false)); }, "country" => { call = call.country(value.unwrap_or("")); @@ -1648,13 +1647,13 @@ where call = call.source(value.unwrap_or("")); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "layer-ids" => { call = call.add_layer_ids(value.unwrap_or("")); @@ -2253,13 +2252,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "source" => { call = call.source(value.unwrap_or("")); }, "show-preorders" => { - call = call.show_preorders(arg_from_str(value.unwrap_or("false"), err, "show-preorders", "boolean")); + call = call.show_preorders( value.map(|v| arg_from_str(v, err, "show-preorders", "boolean")).unwrap_or(false)); }, "q" => { call = call.q(value.unwrap_or("")); @@ -2268,7 +2267,7 @@ where call = call.projection(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "country" => { call = call.country(value.unwrap_or("")); @@ -2569,7 +2568,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "uint32")).unwrap_or(0)); }, "max-allowed-maturity-rating" => { call = call.max_allowed_maturity_rating(value.unwrap_or("")); @@ -2973,7 +2972,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -3094,7 +3093,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-library-consistent-read" => { - call = call.user_library_consistent_read(arg_from_str(value.unwrap_or("false"), err, "user-library-consistent-read", "boolean")); + call = call.user_library_consistent_read( value.map(|v| arg_from_str(v, err, "user-library-consistent-read", "boolean")).unwrap_or(false)); }, "source" => { call = call.source(value.unwrap_or("")); @@ -3106,7 +3105,7 @@ where call = call.partner(value.unwrap_or("")); }, "include-non-comics-series" => { - call = call.include_non_comics_series(arg_from_str(value.unwrap_or("false"), err, "include-non-comics-series", "boolean")); + call = call.include_non_comics_series( value.map(|v| arg_from_str(v, err, "include-non-comics-series", "boolean")).unwrap_or(false)); }, "country" => { call = call.country(value.unwrap_or("")); @@ -3165,13 +3164,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "source" => { call = call.source(value.unwrap_or("")); }, "show-preorders" => { - call = call.show_preorders(arg_from_str(value.unwrap_or("false"), err, "show-preorders", "boolean")); + call = call.show_preorders( value.map(|v| arg_from_str(v, err, "show-preorders", "boolean")).unwrap_or(false)); }, "projection" => { call = call.projection(value.unwrap_or("")); @@ -3186,7 +3185,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "max-allowed-maturity-rating" => { call = call.max_allowed_maturity_rating(value.unwrap_or("")); @@ -3257,7 +3256,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "source" => { call = call.source(value.unwrap_or("")); @@ -3266,7 +3265,7 @@ where call = call.add_processing_state(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -3455,7 +3454,7 @@ where call = call.add_volume_id(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "source" => { call = call.source(value.unwrap_or("")); @@ -3464,7 +3463,7 @@ where call = call.add_processing_state(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -5082,7 +5081,7 @@ async fn main() { let mut app = App::new("books1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230117") .about("The Google Books API allows clients to access the Google Books repository.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_books1_cli") .arg(Arg::with_name("url") diff --git a/gen/books1/Cargo.toml b/gen/books1/Cargo.toml index e1f9659a81..bf186ace35 100644 --- a/gen/books1/Cargo.toml +++ b/gen/books1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-books1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with books (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/books1" homepage = "https://code.google.com/apis/books/docs/v1/getting_started.html" -documentation = "https://docs.rs/google-books1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-books1/5.0.2+20230117" license = "MIT" keywords = ["books", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/books1/README.md b/gen/books1/README.md index 18120065f0..dcc6667ffb 100644 --- a/gen/books1/README.md +++ b/gen/books1/README.md @@ -5,40 +5,40 @@ DO NOT EDIT ! --> The `google-books1` library allows access to all features of the *Google books* service. -This documentation was generated from *books* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *books:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *books* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *books:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *books* *v1* API can be found at the [official documentation site](https://code.google.com/apis/books/docs/v1/getting_started.html). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/Books) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-books1/5.0.2+20230117/google_books1/Books) ... -* [bookshelves](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::Bookshelf) - * [*get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::BookshelfGetCall), [*list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::BookshelfListCall) and [*volumes list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::BookshelfVolumeListCall) +* [bookshelves](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::Bookshelf) + * [*get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::BookshelfGetCall), [*list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::BookshelfListCall) and [*volumes list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::BookshelfVolumeListCall) * cloudloading - * [*add book*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::CloudloadingAddBookCall), [*delete book*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::CloudloadingDeleteBookCall) and [*update book*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::CloudloadingUpdateBookCall) + * [*add book*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::CloudloadingAddBookCall), [*delete book*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::CloudloadingDeleteBookCall) and [*update book*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::CloudloadingUpdateBookCall) * dictionary - * [*list offline metadata*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::DictionaryListOfflineMetadataCall) + * [*list offline metadata*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::DictionaryListOfflineMetadataCall) * familysharing - * [*get family info*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::FamilysharingGetFamilyInfoCall), [*share*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::FamilysharingShareCall) and [*unshare*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::FamilysharingUnshareCall) + * [*get family info*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::FamilysharingGetFamilyInfoCall), [*share*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::FamilysharingShareCall) and [*unshare*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::FamilysharingUnshareCall) * layers - * [*annotation data get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::LayerAnnotationDataGetCall), [*annotation data list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::LayerAnnotationDataListCall), [*get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::LayerGetCall), [*list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::LayerListCall), [*volume annotations get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::LayerVolumeAnnotationGetCall) and [*volume annotations list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::LayerVolumeAnnotationListCall) + * [*annotation data get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::LayerAnnotationDataGetCall), [*annotation data list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::LayerAnnotationDataListCall), [*get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::LayerGetCall), [*list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::LayerListCall), [*volume annotations get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::LayerVolumeAnnotationGetCall) and [*volume annotations list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::LayerVolumeAnnotationListCall) * myconfig - * [*get user settings*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MyconfigGetUserSettingCall), [*release download access*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MyconfigReleaseDownloadAccesCall), [*request access*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MyconfigRequestAccesCall), [*sync volume licenses*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MyconfigSyncVolumeLicenseCall) and [*update user settings*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MyconfigUpdateUserSettingCall) + * [*get user settings*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MyconfigGetUserSettingCall), [*release download access*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MyconfigReleaseDownloadAccesCall), [*request access*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MyconfigRequestAccesCall), [*sync volume licenses*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MyconfigSyncVolumeLicenseCall) and [*update user settings*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MyconfigUpdateUserSettingCall) * mylibrary - * [*annotations delete*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryAnnotationDeleteCall), [*annotations insert*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryAnnotationInsertCall), [*annotations list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryAnnotationListCall), [*annotations summary*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryAnnotationSummaryCall), [*annotations update*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryAnnotationUpdateCall), [*bookshelves add volume*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryBookshelfAddVolumeCall), [*bookshelves clear volumes*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryBookshelfClearVolumeCall), [*bookshelves get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryBookshelfGetCall), [*bookshelves list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryBookshelfListCall), [*bookshelves move volume*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryBookshelfMoveVolumeCall), [*bookshelves remove volume*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryBookshelfRemoveVolumeCall), [*bookshelves volumes list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryBookshelfVolumeListCall), [*readingpositions get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryReadingpositionGetCall) and [*readingpositions set position*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::MylibraryReadingpositionSetPositionCall) -* [notification](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::Notification) - * [*get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::NotificationGetCall) + * [*annotations delete*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryAnnotationDeleteCall), [*annotations insert*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryAnnotationInsertCall), [*annotations list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryAnnotationListCall), [*annotations summary*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryAnnotationSummaryCall), [*annotations update*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryAnnotationUpdateCall), [*bookshelves add volume*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryBookshelfAddVolumeCall), [*bookshelves clear volumes*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryBookshelfClearVolumeCall), [*bookshelves get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryBookshelfGetCall), [*bookshelves list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryBookshelfListCall), [*bookshelves move volume*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryBookshelfMoveVolumeCall), [*bookshelves remove volume*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryBookshelfRemoveVolumeCall), [*bookshelves volumes list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryBookshelfVolumeListCall), [*readingpositions get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryReadingpositionGetCall) and [*readingpositions set position*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::MylibraryReadingpositionSetPositionCall) +* [notification](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::Notification) + * [*get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::NotificationGetCall) * onboarding - * [*list categories*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::OnboardingListCategoryCall) and [*list category volumes*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::OnboardingListCategoryVolumeCall) + * [*list categories*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::OnboardingListCategoryCall) and [*list category volumes*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::OnboardingListCategoryVolumeCall) * personalizedstream - * [*get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::PersonalizedstreamGetCall) + * [*get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::PersonalizedstreamGetCall) * promooffer - * [*accept*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::PromoofferAcceptCall), [*dismiss*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::PromoofferDismisCall) and [*get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::PromoofferGetCall) -* [series](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::Series) - * [*get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::SeriesGetCall) and [*membership get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::SeriesMembershipGetCall) -* [volumes](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::Volume) - * [*associated list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::VolumeAssociatedListCall), [*get*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::VolumeGetCall), [*list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::VolumeListCall), [*mybooks list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::VolumeMybookListCall), [*recommended list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::VolumeRecommendedListCall), [*recommended rate*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::VolumeRecommendedRateCall) and [*useruploaded list*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/api::VolumeUseruploadedListCall) + * [*accept*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::PromoofferAcceptCall), [*dismiss*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::PromoofferDismisCall) and [*get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::PromoofferGetCall) +* [series](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::Series) + * [*get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::SeriesGetCall) and [*membership get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::SeriesMembershipGetCall) +* [volumes](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::Volume) + * [*associated list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::VolumeAssociatedListCall), [*get*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::VolumeGetCall), [*list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::VolumeListCall), [*mybooks list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::VolumeMybookListCall), [*recommended list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::VolumeRecommendedListCall), [*recommended rate*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::VolumeRecommendedRateCall) and [*useruploaded list*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/api::VolumeUseruploadedListCall) @@ -47,17 +47,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/Books)** +* **[Hub](https://docs.rs/google-books1/5.0.2+20230117/google_books1/Books)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::CallBuilder) -* **[Resources](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::CallBuilder) +* **[Resources](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Part)** + * **[Parts](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -160,17 +160,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -180,29 +180,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Delegate) to the -[Method Builder](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Delegate) to the +[Method Builder](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::RequestValue) and -[decodable](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::RequestValue) and +[decodable](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-books1/5.0.2-beta-1+20230117/google_books1/client::RequestValue) are moved +* [request values](https://docs.rs/google-books1/5.0.2+20230117/google_books1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/books1/src/api.rs b/gen/books1/src/api.rs index 8ce7d74d55..38b346dee1 100644 --- a/gen/books1/src/api.rs +++ b/gen/books1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> Books { Books { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://books.googleapis.com/".to_string(), _root_url: "https://books.googleapis.com/".to_string(), } @@ -174,7 +174,7 @@ impl<'a, S> Books { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/books1/src/client.rs b/gen/books1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/books1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/books1/src/lib.rs b/gen/books1/src/lib.rs index 6c4bb3a8dc..78674e3a91 100644 --- a/gen/books1/src/lib.rs +++ b/gen/books1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *books* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *books:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *books* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *books:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *books* *v1* API can be found at the //! [official documentation site](https://code.google.com/apis/books/docs/v1/getting_started.html). diff --git a/gen/calendar3-cli/Cargo.toml b/gen/calendar3-cli/Cargo.toml index c73cfa6b39..adefc245c6 100644 --- a/gen/calendar3-cli/Cargo.toml +++ b/gen/calendar3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-calendar3-cli" -version = "4.0.1+20220217" +version = "5.0.2+20221229" authors = ["Sebastian Thiel "] description = "A complete library to interact with calendar (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/calendar3-cli" @@ -20,13 +20,13 @@ name = "calendar3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-calendar3] path = "../calendar3" -version = "4.0.1+20220217" +version = "5.0.2+20221229" + diff --git a/gen/calendar3-cli/README.md b/gen/calendar3-cli/README.md index 5be37621d0..fde69633bb 100644 --- a/gen/calendar3-cli/README.md +++ b/gen/calendar3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *calendar* API at revision *20220217*. The CLI is at version *4.0.1*. +This documentation was generated from the *calendar* API at revision *20221229*. The CLI is at version *5.0.2*. ```bash calendar3 [options] diff --git a/gen/calendar3-cli/mkdocs.yml b/gen/calendar3-cli/mkdocs.yml index 273b16e003..52a2c2165a 100644 --- a/gen/calendar3-cli/mkdocs.yml +++ b/gen/calendar3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: calendar v4.0.1+20220217 +site_name: calendar v5.0.2+20221229 site_url: http://byron.github.io/google-apis-rs/google-calendar3-cli site_description: A complete library to interact with calendar (protocol v3) @@ -7,45 +7,53 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/calendar3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['acl_delete.md', 'Acl', 'Delete'] -- ['acl_get.md', 'Acl', 'Get'] -- ['acl_insert.md', 'Acl', 'Insert'] -- ['acl_list.md', 'Acl', 'List'] -- ['acl_patch.md', 'Acl', 'Patch'] -- ['acl_update.md', 'Acl', 'Update'] -- ['acl_watch.md', 'Acl', 'Watch'] -- ['calendar-list_delete.md', 'Calendar List', 'Delete'] -- ['calendar-list_get.md', 'Calendar List', 'Get'] -- ['calendar-list_insert.md', 'Calendar List', 'Insert'] -- ['calendar-list_list.md', 'Calendar List', 'List'] -- ['calendar-list_patch.md', 'Calendar List', 'Patch'] -- ['calendar-list_update.md', 'Calendar List', 'Update'] -- ['calendar-list_watch.md', 'Calendar List', 'Watch'] -- ['calendars_clear.md', 'Calendars', 'Clear'] -- ['calendars_delete.md', 'Calendars', 'Delete'] -- ['calendars_get.md', 'Calendars', 'Get'] -- ['calendars_insert.md', 'Calendars', 'Insert'] -- ['calendars_patch.md', 'Calendars', 'Patch'] -- ['calendars_update.md', 'Calendars', 'Update'] -- ['channels_stop.md', 'Channels', 'Stop'] -- ['colors_get.md', 'Colors', 'Get'] -- ['events_delete.md', 'Events', 'Delete'] -- ['events_get.md', 'Events', 'Get'] -- ['events_import.md', 'Events', 'Import'] -- ['events_insert.md', 'Events', 'Insert'] -- ['events_instances.md', 'Events', 'Instances'] -- ['events_list.md', 'Events', 'List'] -- ['events_move.md', 'Events', 'Move'] -- ['events_patch.md', 'Events', 'Patch'] -- ['events_quick-add.md', 'Events', 'Quick Add'] -- ['events_update.md', 'Events', 'Update'] -- ['events_watch.md', 'Events', 'Watch'] -- ['freebusy_query.md', 'Freebusy', 'Query'] -- ['settings_get.md', 'Settings', 'Get'] -- ['settings_list.md', 'Settings', 'List'] -- ['settings_watch.md', 'Settings', 'Watch'] +nav: +- Home: 'index.md' +- 'Acl': + - 'Delete': 'acl_delete.md' + - 'Get': 'acl_get.md' + - 'Insert': 'acl_insert.md' + - 'List': 'acl_list.md' + - 'Patch': 'acl_patch.md' + - 'Update': 'acl_update.md' + - 'Watch': 'acl_watch.md' +- 'Calendar List': + - 'Delete': 'calendar-list_delete.md' + - 'Get': 'calendar-list_get.md' + - 'Insert': 'calendar-list_insert.md' + - 'List': 'calendar-list_list.md' + - 'Patch': 'calendar-list_patch.md' + - 'Update': 'calendar-list_update.md' + - 'Watch': 'calendar-list_watch.md' +- 'Calendars': + - 'Clear': 'calendars_clear.md' + - 'Delete': 'calendars_delete.md' + - 'Get': 'calendars_get.md' + - 'Insert': 'calendars_insert.md' + - 'Patch': 'calendars_patch.md' + - 'Update': 'calendars_update.md' +- 'Channels': + - 'Stop': 'channels_stop.md' +- 'Colors': + - 'Get': 'colors_get.md' +- 'Events': + - 'Delete': 'events_delete.md' + - 'Get': 'events_get.md' + - 'Import': 'events_import.md' + - 'Insert': 'events_insert.md' + - 'Instances': 'events_instances.md' + - 'List': 'events_list.md' + - 'Move': 'events_move.md' + - 'Patch': 'events_patch.md' + - 'Quick Add': 'events_quick-add.md' + - 'Update': 'events_update.md' + - 'Watch': 'events_watch.md' +- 'Freebusy': + - 'Query': 'freebusy_query.md' +- 'Settings': + - 'Get': 'settings_get.md' + - 'List': 'settings_list.md' + - 'Watch': 'settings_watch.md' theme: readthedocs diff --git a/gen/calendar3-cli/src/client.rs b/gen/calendar3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/calendar3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/calendar3-cli/src/main.rs b/gen/calendar3-cli/src/main.rs index 2758faf37b..6e77a039b2 100644 --- a/gen/calendar3-cli/src/main.rs +++ b/gen/calendar3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_calendar3::{api, Error, oauth2}; +use google_calendar3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -192,7 +191,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -251,13 +250,13 @@ where call = call.sync_token(value.unwrap_or("")); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -351,7 +350,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -445,7 +444,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -546,13 +545,13 @@ where call = call.sync_token(value.unwrap_or("")); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -753,7 +752,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "color-rgb-format" => { - call = call.color_rgb_format(arg_from_str(value.unwrap_or("false"), err, "color-rgb-format", "boolean")); + call = call.color_rgb_format( value.map(|v| arg_from_str(v, err, "color-rgb-format", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -812,10 +811,10 @@ where call = call.sync_token(value.unwrap_or("")); }, "show-hidden" => { - call = call.show_hidden(arg_from_str(value.unwrap_or("false"), err, "show-hidden", "boolean")); + call = call.show_hidden( value.map(|v| arg_from_str(v, err, "show-hidden", "boolean")).unwrap_or(false)); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -824,7 +823,7 @@ where call = call.min_access_role(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -929,7 +928,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "color-rgb-format" => { - call = call.color_rgb_format(arg_from_str(value.unwrap_or("false"), err, "color-rgb-format", "boolean")); + call = call.color_rgb_format( value.map(|v| arg_from_str(v, err, "color-rgb-format", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1034,7 +1033,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "color-rgb-format" => { - call = call.color_rgb_format(arg_from_str(value.unwrap_or("false"), err, "color-rgb-format", "boolean")); + call = call.color_rgb_format( value.map(|v| arg_from_str(v, err, "color-rgb-format", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1135,10 +1134,10 @@ where call = call.sync_token(value.unwrap_or("")); }, "show-hidden" => { - call = call.show_hidden(arg_from_str(value.unwrap_or("false"), err, "show-hidden", "boolean")); + call = call.show_hidden( value.map(|v| arg_from_str(v, err, "show-hidden", "boolean")).unwrap_or(false)); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -1147,7 +1146,7 @@ where call = call.min_access_role(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1760,7 +1759,7 @@ where call = call.send_updates(value.unwrap_or("")); }, "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1811,10 +1810,10 @@ where call = call.time_zone(value.unwrap_or("")); }, "max-attendees" => { - call = call.max_attendees(arg_from_str(value.unwrap_or("-0"), err, "max-attendees", "integer")); + call = call.max_attendees( value.map(|v| arg_from_str(v, err, "max-attendees", "int32")).unwrap_or(-0)); }, "always-include-email" => { - call = call.always_include_email(arg_from_str(value.unwrap_or("false"), err, "always-include-email", "boolean")); + call = call.always_include_email( value.map(|v| arg_from_str(v, err, "always-include-email", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1969,10 +1968,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-attachments" => { - call = call.supports_attachments(arg_from_str(value.unwrap_or("false"), err, "supports-attachments", "boolean")); + call = call.supports_attachments( value.map(|v| arg_from_str(v, err, "supports-attachments", "boolean")).unwrap_or(false)); }, "conference-data-version" => { - call = call.conference_data_version(arg_from_str(value.unwrap_or("-0"), err, "conference-data-version", "integer")); + call = call.conference_data_version( value.map(|v| arg_from_str(v, err, "conference-data-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2127,19 +2126,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-attachments" => { - call = call.supports_attachments(arg_from_str(value.unwrap_or("false"), err, "supports-attachments", "boolean")); + call = call.supports_attachments( value.map(|v| arg_from_str(v, err, "supports-attachments", "boolean")).unwrap_or(false)); }, "send-updates" => { call = call.send_updates(value.unwrap_or("")); }, "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, "max-attendees" => { - call = call.max_attendees(arg_from_str(value.unwrap_or("-0"), err, "max-attendees", "integer")); + call = call.max_attendees( value.map(|v| arg_from_str(v, err, "max-attendees", "int32")).unwrap_or(-0)); }, "conference-data-version" => { - call = call.conference_data_version(arg_from_str(value.unwrap_or("-0"), err, "conference-data-version", "integer")); + call = call.conference_data_version( value.map(|v| arg_from_str(v, err, "conference-data-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2198,13 +2197,13 @@ where call = call.time_zone(value.unwrap_or("")); }, "time-min" => { - call = call.time_min(value.unwrap_or("")); + call = call.time_min( value.map(|v| arg_from_str(v, err, "time-min", "date-time")).unwrap_or(chrono::Utc::now())); }, "time-max" => { - call = call.time_max(value.unwrap_or("")); + call = call.time_max( value.map(|v| arg_from_str(v, err, "time-max", "date-time")).unwrap_or(chrono::Utc::now())); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -2213,13 +2212,13 @@ where call = call.original_start(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-attendees" => { - call = call.max_attendees(arg_from_str(value.unwrap_or("-0"), err, "max-attendees", "integer")); + call = call.max_attendees( value.map(|v| arg_from_str(v, err, "max-attendees", "int32")).unwrap_or(-0)); }, "always-include-email" => { - call = call.always_include_email(arg_from_str(value.unwrap_or("false"), err, "always-include-email", "boolean")); + call = call.always_include_email( value.map(|v| arg_from_str(v, err, "always-include-email", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2275,28 +2274,28 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "updated-min" => { - call = call.updated_min(value.unwrap_or("")); + call = call.updated_min( value.map(|v| arg_from_str(v, err, "updated-min", "date-time")).unwrap_or(chrono::Utc::now())); }, "time-zone" => { call = call.time_zone(value.unwrap_or("")); }, "time-min" => { - call = call.time_min(value.unwrap_or("")); + call = call.time_min( value.map(|v| arg_from_str(v, err, "time-min", "date-time")).unwrap_or(chrono::Utc::now())); }, "time-max" => { - call = call.time_max(value.unwrap_or("")); + call = call.time_max( value.map(|v| arg_from_str(v, err, "time-max", "date-time")).unwrap_or(chrono::Utc::now())); }, "sync-token" => { call = call.sync_token(value.unwrap_or("")); }, "single-events" => { - call = call.single_events(arg_from_str(value.unwrap_or("false"), err, "single-events", "boolean")); + call = call.single_events( value.map(|v| arg_from_str(v, err, "single-events", "boolean")).unwrap_or(false)); }, "show-hidden-invitations" => { - call = call.show_hidden_invitations(arg_from_str(value.unwrap_or("false"), err, "show-hidden-invitations", "boolean")); + call = call.show_hidden_invitations( value.map(|v| arg_from_str(v, err, "show-hidden-invitations", "boolean")).unwrap_or(false)); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "shared-extended-property" => { call = call.add_shared_extended_property(value.unwrap_or("")); @@ -2314,16 +2313,16 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-attendees" => { - call = call.max_attendees(arg_from_str(value.unwrap_or("-0"), err, "max-attendees", "integer")); + call = call.max_attendees( value.map(|v| arg_from_str(v, err, "max-attendees", "int32")).unwrap_or(-0)); }, "i-cal-uid" => { call = call.i_cal_uid(value.unwrap_or("")); }, "always-include-email" => { - call = call.always_include_email(arg_from_str(value.unwrap_or("false"), err, "always-include-email", "boolean")); + call = call.always_include_email( value.map(|v| arg_from_str(v, err, "always-include-email", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2382,7 +2381,7 @@ where call = call.send_updates(value.unwrap_or("")); }, "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2537,22 +2536,22 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-attachments" => { - call = call.supports_attachments(arg_from_str(value.unwrap_or("false"), err, "supports-attachments", "boolean")); + call = call.supports_attachments( value.map(|v| arg_from_str(v, err, "supports-attachments", "boolean")).unwrap_or(false)); }, "send-updates" => { call = call.send_updates(value.unwrap_or("")); }, "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, "max-attendees" => { - call = call.max_attendees(arg_from_str(value.unwrap_or("-0"), err, "max-attendees", "integer")); + call = call.max_attendees( value.map(|v| arg_from_str(v, err, "max-attendees", "int32")).unwrap_or(-0)); }, "conference-data-version" => { - call = call.conference_data_version(arg_from_str(value.unwrap_or("-0"), err, "conference-data-version", "integer")); + call = call.conference_data_version( value.map(|v| arg_from_str(v, err, "conference-data-version", "int32")).unwrap_or(-0)); }, "always-include-email" => { - call = call.always_include_email(arg_from_str(value.unwrap_or("false"), err, "always-include-email", "boolean")); + call = call.always_include_email( value.map(|v| arg_from_str(v, err, "always-include-email", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2611,7 +2610,7 @@ where call = call.send_updates(value.unwrap_or("")); }, "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2766,22 +2765,22 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-attachments" => { - call = call.supports_attachments(arg_from_str(value.unwrap_or("false"), err, "supports-attachments", "boolean")); + call = call.supports_attachments( value.map(|v| arg_from_str(v, err, "supports-attachments", "boolean")).unwrap_or(false)); }, "send-updates" => { call = call.send_updates(value.unwrap_or("")); }, "send-notifications" => { - call = call.send_notifications(arg_from_str(value.unwrap_or("false"), err, "send-notifications", "boolean")); + call = call.send_notifications( value.map(|v| arg_from_str(v, err, "send-notifications", "boolean")).unwrap_or(false)); }, "max-attendees" => { - call = call.max_attendees(arg_from_str(value.unwrap_or("-0"), err, "max-attendees", "integer")); + call = call.max_attendees( value.map(|v| arg_from_str(v, err, "max-attendees", "int32")).unwrap_or(-0)); }, "conference-data-version" => { - call = call.conference_data_version(arg_from_str(value.unwrap_or("-0"), err, "conference-data-version", "integer")); + call = call.conference_data_version( value.map(|v| arg_from_str(v, err, "conference-data-version", "int32")).unwrap_or(-0)); }, "always-include-email" => { - call = call.always_include_email(arg_from_str(value.unwrap_or("false"), err, "always-include-email", "boolean")); + call = call.always_include_email( value.map(|v| arg_from_str(v, err, "always-include-email", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2879,28 +2878,28 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "updated-min" => { - call = call.updated_min(value.unwrap_or("")); + call = call.updated_min( value.map(|v| arg_from_str(v, err, "updated-min", "date-time")).unwrap_or(chrono::Utc::now())); }, "time-zone" => { call = call.time_zone(value.unwrap_or("")); }, "time-min" => { - call = call.time_min(value.unwrap_or("")); + call = call.time_min( value.map(|v| arg_from_str(v, err, "time-min", "date-time")).unwrap_or(chrono::Utc::now())); }, "time-max" => { - call = call.time_max(value.unwrap_or("")); + call = call.time_max( value.map(|v| arg_from_str(v, err, "time-max", "date-time")).unwrap_or(chrono::Utc::now())); }, "sync-token" => { call = call.sync_token(value.unwrap_or("")); }, "single-events" => { - call = call.single_events(arg_from_str(value.unwrap_or("false"), err, "single-events", "boolean")); + call = call.single_events( value.map(|v| arg_from_str(v, err, "single-events", "boolean")).unwrap_or(false)); }, "show-hidden-invitations" => { - call = call.show_hidden_invitations(arg_from_str(value.unwrap_or("false"), err, "show-hidden-invitations", "boolean")); + call = call.show_hidden_invitations( value.map(|v| arg_from_str(v, err, "show-hidden-invitations", "boolean")).unwrap_or(false)); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "shared-extended-property" => { call = call.add_shared_extended_property(value.unwrap_or("")); @@ -2918,16 +2917,16 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-attendees" => { - call = call.max_attendees(arg_from_str(value.unwrap_or("-0"), err, "max-attendees", "integer")); + call = call.max_attendees( value.map(|v| arg_from_str(v, err, "max-attendees", "int32")).unwrap_or(-0)); }, "i-cal-uid" => { call = call.i_cal_uid(value.unwrap_or("")); }, "always-include-email" => { - call = call.always_include_email(arg_from_str(value.unwrap_or("false"), err, "always-include-email", "boolean")); + call = call.always_include_email( value.map(|v| arg_from_str(v, err, "always-include-email", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3130,7 +3129,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3234,7 +3233,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4086,7 +4085,7 @@ async fn main() { Some(true)), ]), ("get", - Some(r##"Returns an event."##), + Some(r##"Returns an event based on its Google Calendar ID. To retrieve an event using its iCalendar ID, call the events.list method using the iCalUID parameter."##), "Details at http://byron.github.io/google-apis-rs/google_calendar3_cli/events_get", vec![ (Some(r##"calendar-id"##), @@ -4471,7 +4470,7 @@ async fn main() { let mut app = App::new("calendar3") .author("Sebastian Thiel ") - .version("4.0.1+20220217") + .version("5.0.2+20221229") .about("Manipulates events and other calendar data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_calendar3_cli") .arg(Arg::with_name("url") diff --git a/gen/calendar3/Cargo.toml b/gen/calendar3/Cargo.toml index 18aa245479..60f2f70ceb 100644 --- a/gen/calendar3/Cargo.toml +++ b/gen/calendar3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-calendar3" -version = "5.0.2-beta-1+20221229" +version = "5.0.2+20221229" authors = ["Sebastian Thiel "] description = "A complete library to interact with calendar (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/calendar3" homepage = "https://developers.google.com/google-apps/calendar/firstapp" -documentation = "https://docs.rs/google-calendar3/5.0.2-beta-1+20221229" +documentation = "https://docs.rs/google-calendar3/5.0.2+20221229" license = "MIT" keywords = ["calendar", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/calendar3/README.md b/gen/calendar3/README.md index ee34002d4f..6ee1ae58e6 100644 --- a/gen/calendar3/README.md +++ b/gen/calendar3/README.md @@ -5,43 +5,43 @@ DO NOT EDIT ! --> The `google-calendar3` library allows access to all features of the *Google calendar* service. -This documentation was generated from *calendar* crate version *5.0.2-beta-1+20221229*, where *20221229* is the exact revision of the *calendar:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *calendar* crate version *5.0.2+20221229*, where *20221229* is the exact revision of the *calendar:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *calendar* *v3* API can be found at the [official documentation site](https://developers.google.com/google-apps/calendar/firstapp). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/CalendarHub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/CalendarHub) ... -* [acl](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::Acl) - * [*delete*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclGetCall), [*insert*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclInsertCall), [*list*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclListCall), [*patch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclPatchCall), [*update*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclUpdateCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclWatchCall) -* [calendar list](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarList) - * [*delete*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListGetCall), [*insert*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListInsertCall), [*list*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListListCall), [*patch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListPatchCall), [*update*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListUpdateCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListWatchCall) -* [calendars](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::Calendar) - * [*clear*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarClearCall), [*delete*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarGetCall), [*insert*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarInsertCall), [*patch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarPatchCall) and [*update*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarUpdateCall) -* [channels](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::Channel) - * [*stop*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::ChannelStopCall) +* [acl](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::Acl) + * [*delete*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclGetCall), [*insert*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclInsertCall), [*list*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclListCall), [*patch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclPatchCall), [*update*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclUpdateCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclWatchCall) +* [calendar list](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarList) + * [*delete*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListGetCall), [*insert*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListInsertCall), [*list*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListListCall), [*patch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListPatchCall), [*update*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListUpdateCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListWatchCall) +* [calendars](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::Calendar) + * [*clear*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarClearCall), [*delete*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarGetCall), [*insert*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarInsertCall), [*patch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarPatchCall) and [*update*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarUpdateCall) +* [channels](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::Channel) + * [*stop*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::ChannelStopCall) * colors - * [*get*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::ColorGetCall) -* [events](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::Event) - * [*delete*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventGetCall), [*import*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventImportCall), [*insert*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventInsertCall), [*instances*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventInstanceCall), [*list*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventListCall), [*move*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventMoveCall), [*patch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventPatchCall), [*quick add*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventQuickAddCall), [*update*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventUpdateCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventWatchCall) + * [*get*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::ColorGetCall) +* [events](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::Event) + * [*delete*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventDeleteCall), [*get*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventGetCall), [*import*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventImportCall), [*insert*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventInsertCall), [*instances*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventInstanceCall), [*list*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventListCall), [*move*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventMoveCall), [*patch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventPatchCall), [*quick add*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventQuickAddCall), [*update*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventUpdateCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventWatchCall) * freebusy - * [*query*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::FreebusyQueryCall) -* [settings](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::Setting) - * [*get*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::SettingGetCall), [*list*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::SettingListCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::SettingWatchCall) + * [*query*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::FreebusyQueryCall) +* [settings](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::Setting) + * [*get*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::SettingGetCall), [*list*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::SettingListCall) and [*watch*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::SettingWatchCall) Subscription supported by ... -* [*list acl*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclListCall) -* [*watch acl*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::AclWatchCall) -* [*list calendar list*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListListCall) -* [*watch calendar list*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::CalendarListWatchCall) -* [*instances events*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventInstanceCall) -* [*list events*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventListCall) -* [*watch events*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::EventWatchCall) -* [*list settings*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::SettingListCall) -* [*watch settings*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/api::SettingWatchCall) +* [*list acl*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclListCall) +* [*watch acl*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::AclWatchCall) +* [*list calendar list*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListListCall) +* [*watch calendar list*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::CalendarListWatchCall) +* [*instances events*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventInstanceCall) +* [*list events*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventListCall) +* [*watch events*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::EventWatchCall) +* [*list settings*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::SettingListCall) +* [*watch settings*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/api::SettingWatchCall) @@ -49,17 +49,17 @@ Subscription supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/CalendarHub)** +* **[Hub](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/CalendarHub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::CallBuilder) -* **[Resources](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::CallBuilder) +* **[Resources](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Part)** + * **[Parts](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -177,17 +177,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -197,29 +197,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Delegate) to the -[Method Builder](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Delegate) to the +[Method Builder](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::RequestValue) and -[decodable](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::RequestValue) and +[decodable](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-calendar3/5.0.2-beta-1+20221229/google_calendar3/client::RequestValue) are moved +* [request values](https://docs.rs/google-calendar3/5.0.2+20221229/google_calendar3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/calendar3/src/api.rs b/gen/calendar3/src/api.rs index 59a29e72ed..90d95f2e64 100644 --- a/gen/calendar3/src/api.rs +++ b/gen/calendar3/src/api.rs @@ -158,7 +158,7 @@ impl<'a, S> CalendarHub { CalendarHub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/calendar/v3/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -190,7 +190,7 @@ impl<'a, S> CalendarHub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/calendar3/src/client.rs b/gen/calendar3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/calendar3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/calendar3/src/lib.rs b/gen/calendar3/src/lib.rs index 01d3918362..703b07406f 100644 --- a/gen/calendar3/src/lib.rs +++ b/gen/calendar3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *calendar* crate version *5.0.2-beta-1+20221229*, where *20221229* is the exact revision of the *calendar:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *calendar* crate version *5.0.2+20221229*, where *20221229* is the exact revision of the *calendar:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *calendar* *v3* API can be found at the //! [official documentation site](https://developers.google.com/google-apps/calendar/firstapp). diff --git a/gen/certificatemanager1-cli/Cargo.toml b/gen/certificatemanager1-cli/Cargo.toml index 497b867640..ae92aa3778 100644 --- a/gen/certificatemanager1-cli/Cargo.toml +++ b/gen/certificatemanager1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-certificatemanager1-cli" -version = "4.0.1+20220214" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Certificate Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/certificatemanager1-cli" @@ -20,13 +20,13 @@ name = "certificatemanager1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-certificatemanager1] path = "../certificatemanager1" -version = "4.0.1+20220214" +version = "5.0.2+20230104" + diff --git a/gen/certificatemanager1-cli/README.md b/gen/certificatemanager1-cli/README.md index 9661684909..092376883b 100644 --- a/gen/certificatemanager1-cli/README.md +++ b/gen/certificatemanager1-cli/README.md @@ -25,11 +25,15 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Certificate Manager* API at revision *20220214*. The CLI is at version *4.0.1*. +This documentation was generated from the *Certificate Manager* API at revision *20230104*. The CLI is at version *5.0.2*. ```bash certificatemanager1 [options] projects + locations-certificate-issuance-configs-create (-r )... [-p ]... [-o ] + locations-certificate-issuance-configs-delete [-p ]... [-o ] + locations-certificate-issuance-configs-get [-p ]... [-o ] + locations-certificate-issuance-configs-list [-p ]... [-o ] locations-certificate-maps-certificate-map-entries-create (-r )... [-p ]... [-o ] locations-certificate-maps-certificate-map-entries-delete [-p ]... [-o ] locations-certificate-maps-certificate-map-entries-get [-p ]... [-o ] diff --git a/gen/certificatemanager1-cli/mkdocs.yml b/gen/certificatemanager1-cli/mkdocs.yml index ab7ecaa793..617b73c322 100644 --- a/gen/certificatemanager1-cli/mkdocs.yml +++ b/gen/certificatemanager1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Certificate Manager v4.0.1+20220214 +site_name: Certificate Manager v5.0.2+20230104 site_url: http://byron.github.io/google-apis-rs/google-certificatemanager1-cli site_description: A complete library to interact with Certificate Manager (protocol v1) @@ -7,34 +7,39 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/certificatemanag docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-certificate-maps-certificate-map-entries-create.md', 'Projects', 'Locations Certificate Maps Certificate Map Entries Create'] -- ['projects_locations-certificate-maps-certificate-map-entries-delete.md', 'Projects', 'Locations Certificate Maps Certificate Map Entries Delete'] -- ['projects_locations-certificate-maps-certificate-map-entries-get.md', 'Projects', 'Locations Certificate Maps Certificate Map Entries Get'] -- ['projects_locations-certificate-maps-certificate-map-entries-list.md', 'Projects', 'Locations Certificate Maps Certificate Map Entries List'] -- ['projects_locations-certificate-maps-certificate-map-entries-patch.md', 'Projects', 'Locations Certificate Maps Certificate Map Entries Patch'] -- ['projects_locations-certificate-maps-create.md', 'Projects', 'Locations Certificate Maps Create'] -- ['projects_locations-certificate-maps-delete.md', 'Projects', 'Locations Certificate Maps Delete'] -- ['projects_locations-certificate-maps-get.md', 'Projects', 'Locations Certificate Maps Get'] -- ['projects_locations-certificate-maps-list.md', 'Projects', 'Locations Certificate Maps List'] -- ['projects_locations-certificate-maps-patch.md', 'Projects', 'Locations Certificate Maps Patch'] -- ['projects_locations-certificates-create.md', 'Projects', 'Locations Certificates Create'] -- ['projects_locations-certificates-delete.md', 'Projects', 'Locations Certificates Delete'] -- ['projects_locations-certificates-get.md', 'Projects', 'Locations Certificates Get'] -- ['projects_locations-certificates-list.md', 'Projects', 'Locations Certificates List'] -- ['projects_locations-certificates-patch.md', 'Projects', 'Locations Certificates Patch'] -- ['projects_locations-dns-authorizations-create.md', 'Projects', 'Locations Dns Authorizations Create'] -- ['projects_locations-dns-authorizations-delete.md', 'Projects', 'Locations Dns Authorizations Delete'] -- ['projects_locations-dns-authorizations-get.md', 'Projects', 'Locations Dns Authorizations Get'] -- ['projects_locations-dns-authorizations-list.md', 'Projects', 'Locations Dns Authorizations List'] -- ['projects_locations-dns-authorizations-patch.md', 'Projects', 'Locations Dns Authorizations Patch'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Certificate Issuance Configs Create': 'projects_locations-certificate-issuance-configs-create.md' + - 'Locations Certificate Issuance Configs Delete': 'projects_locations-certificate-issuance-configs-delete.md' + - 'Locations Certificate Issuance Configs Get': 'projects_locations-certificate-issuance-configs-get.md' + - 'Locations Certificate Issuance Configs List': 'projects_locations-certificate-issuance-configs-list.md' + - 'Locations Certificate Maps Certificate Map Entries Create': 'projects_locations-certificate-maps-certificate-map-entries-create.md' + - 'Locations Certificate Maps Certificate Map Entries Delete': 'projects_locations-certificate-maps-certificate-map-entries-delete.md' + - 'Locations Certificate Maps Certificate Map Entries Get': 'projects_locations-certificate-maps-certificate-map-entries-get.md' + - 'Locations Certificate Maps Certificate Map Entries List': 'projects_locations-certificate-maps-certificate-map-entries-list.md' + - 'Locations Certificate Maps Certificate Map Entries Patch': 'projects_locations-certificate-maps-certificate-map-entries-patch.md' + - 'Locations Certificate Maps Create': 'projects_locations-certificate-maps-create.md' + - 'Locations Certificate Maps Delete': 'projects_locations-certificate-maps-delete.md' + - 'Locations Certificate Maps Get': 'projects_locations-certificate-maps-get.md' + - 'Locations Certificate Maps List': 'projects_locations-certificate-maps-list.md' + - 'Locations Certificate Maps Patch': 'projects_locations-certificate-maps-patch.md' + - 'Locations Certificates Create': 'projects_locations-certificates-create.md' + - 'Locations Certificates Delete': 'projects_locations-certificates-delete.md' + - 'Locations Certificates Get': 'projects_locations-certificates-get.md' + - 'Locations Certificates List': 'projects_locations-certificates-list.md' + - 'Locations Certificates Patch': 'projects_locations-certificates-patch.md' + - 'Locations Dns Authorizations Create': 'projects_locations-dns-authorizations-create.md' + - 'Locations Dns Authorizations Delete': 'projects_locations-dns-authorizations-delete.md' + - 'Locations Dns Authorizations Get': 'projects_locations-dns-authorizations-get.md' + - 'Locations Dns Authorizations List': 'projects_locations-dns-authorizations-list.md' + - 'Locations Dns Authorizations Patch': 'projects_locations-dns-authorizations-patch.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/certificatemanager1-cli/src/client.rs b/gen/certificatemanager1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/certificatemanager1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/certificatemanager1-cli/src/main.rs b/gen/certificatemanager1-cli/src/main.rs index 10a317e0d4..6a79cae21f 100644 --- a/gen/certificatemanager1-cli/src/main.rs +++ b/gen/certificatemanager1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_certificatemanager1::{api, Error, oauth2}; +use google_certificatemanager1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,272 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_certificate_issuance_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "certificate-authority-config.certificate-authority-service-config.ca-pool" => Some(("certificateAuthorityConfig.certificateAuthorityServiceConfig.caPool", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "key-algorithm" => Some(("keyAlgorithm", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "lifetime" => Some(("lifetime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rotation-window-percentage" => Some(("rotationWindowPercentage", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["ca-pool", "certificate-authority-config", "certificate-authority-service-config", "create-time", "description", "key-algorithm", "labels", "lifetime", "name", "rotation-window-percentage", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CertificateIssuanceConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_certificate_issuance_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "certificate-issuance-config-id" => { + call = call.certificate_issuance_config_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["certificate-issuance-config-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_certificate_issuance_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_certificate_issuance_configs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_certificate_issuance_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_certificate_issuance_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_certificate_issuance_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_certificate_issuance_configs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_certificate_maps_certificate_map_entries_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -262,7 +527,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -365,7 +630,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -621,7 +886,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -720,7 +985,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -798,6 +1063,7 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "managed.dns-authorizations" => Some(("managed.dnsAuthorizations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "managed.domains" => Some(("managed.domains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "managed.issuance-config" => Some(("managed.issuanceConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "managed.provisioning-issue.details" => Some(("managed.provisioningIssue.details", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "managed.provisioning-issue.reason" => Some(("managed.provisioningIssue.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "managed.state" => Some(("managed.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -809,7 +1075,7 @@ where "self-managed.pem-private-key" => Some(("selfManaged.pemPrivateKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "details", "dns-authorizations", "domains", "expire-time", "labels", "managed", "name", "pem-certificate", "pem-private-key", "provisioning-issue", "reason", "san-dnsnames", "scope", "self-managed", "state", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "details", "dns-authorizations", "domains", "expire-time", "issuance-config", "labels", "managed", "name", "pem-certificate", "pem-private-key", "provisioning-issue", "reason", "san-dnsnames", "scope", "self-managed", "state", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -987,7 +1253,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1071,6 +1337,7 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "managed.dns-authorizations" => Some(("managed.dnsAuthorizations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "managed.domains" => Some(("managed.domains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "managed.issuance-config" => Some(("managed.issuanceConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "managed.provisioning-issue.details" => Some(("managed.provisioningIssue.details", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "managed.provisioning-issue.reason" => Some(("managed.provisioningIssue.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "managed.state" => Some(("managed.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1082,7 +1349,7 @@ where "self-managed.pem-private-key" => Some(("selfManaged.pemPrivateKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "details", "dns-authorizations", "domains", "expire-time", "labels", "managed", "name", "pem-certificate", "pem-private-key", "provisioning-issue", "reason", "san-dnsnames", "scope", "self-managed", "state", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "details", "dns-authorizations", "domains", "expire-time", "issuance-config", "labels", "managed", "name", "pem-certificate", "pem-private-key", "provisioning-issue", "reason", "san-dnsnames", "scope", "self-managed", "state", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1097,7 +1364,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1357,7 +1624,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1460,7 +1727,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1571,7 +1838,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1821,7 +2088,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1880,6 +2147,18 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-certificate-issuance-configs-create", Some(opt)) => { + call_result = self._projects_locations_certificate_issuance_configs_create(opt, dry_run, &mut err).await; + }, + ("locations-certificate-issuance-configs-delete", Some(opt)) => { + call_result = self._projects_locations_certificate_issuance_configs_delete(opt, dry_run, &mut err).await; + }, + ("locations-certificate-issuance-configs-get", Some(opt)) => { + call_result = self._projects_locations_certificate_issuance_configs_get(opt, dry_run, &mut err).await; + }, + ("locations-certificate-issuance-configs-list", Some(opt)) => { + call_result = self._projects_locations_certificate_issuance_configs_list(opt, dry_run, &mut err).await; + }, ("locations-certificate-maps-certificate-map-entries-create", Some(opt)) => { call_result = self._projects_locations_certificate_maps_certificate_map_entries_create(opt, dry_run, &mut err).await; }, @@ -2037,7 +2316,101 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-certificate-maps-certificate-map-entries-create', 'locations-certificate-maps-certificate-map-entries-delete', 'locations-certificate-maps-certificate-map-entries-get', 'locations-certificate-maps-certificate-map-entries-list', 'locations-certificate-maps-certificate-map-entries-patch', 'locations-certificate-maps-create', 'locations-certificate-maps-delete', 'locations-certificate-maps-get', 'locations-certificate-maps-list', 'locations-certificate-maps-patch', 'locations-certificates-create', 'locations-certificates-delete', 'locations-certificates-get', 'locations-certificates-list', 'locations-certificates-patch', 'locations-dns-authorizations-create', 'locations-dns-authorizations-delete', 'locations-dns-authorizations-get', 'locations-dns-authorizations-list', 'locations-dns-authorizations-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-certificate-issuance-configs-create', 'locations-certificate-issuance-configs-delete', 'locations-certificate-issuance-configs-get', 'locations-certificate-issuance-configs-list', 'locations-certificate-maps-certificate-map-entries-create', 'locations-certificate-maps-certificate-map-entries-delete', 'locations-certificate-maps-certificate-map-entries-get', 'locations-certificate-maps-certificate-map-entries-list', 'locations-certificate-maps-certificate-map-entries-patch', 'locations-certificate-maps-create', 'locations-certificate-maps-delete', 'locations-certificate-maps-get', 'locations-certificate-maps-list', 'locations-certificate-maps-patch', 'locations-certificates-create', 'locations-certificates-delete', 'locations-certificates-get', 'locations-certificates-list', 'locations-certificates-patch', 'locations-dns-authorizations-create', 'locations-dns-authorizations-delete', 'locations-dns-authorizations-get', 'locations-dns-authorizations-list', 'locations-dns-authorizations-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("locations-certificate-issuance-configs-create", + Some(r##"Creates a new CertificateIssuanceConfig in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_certificatemanager1_cli/projects_locations-certificate-issuance-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the certificate issuance config. Must be in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-certificate-issuance-configs-delete", + Some(r##"Deletes a single CertificateIssuanceConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_certificatemanager1_cli/projects_locations-certificate-issuance-configs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the certificate issuance config to delete. Must be in the format `projects/*/locations/*/certificateIssuanceConfigs/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-certificate-issuance-configs-get", + Some(r##"Gets details of a single CertificateIssuanceConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_certificatemanager1_cli/projects_locations-certificate-issuance-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the certificate issuance config to describe. Must be in the format `projects/*/locations/*/certificateIssuanceConfigs/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-certificate-issuance-configs-list", + Some(r##"Lists CertificateIssuanceConfigs in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_certificatemanager1_cli/projects_locations-certificate-issuance-configs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the certificate should be listed, specified in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-certificate-maps-certificate-map-entries-create", Some(r##"Creates a new CertificateMapEntry in a given project and location."##), "Details at http://byron.github.io/google-apis-rs/google_certificatemanager1_cli/projects_locations-certificate-maps-certificate-map-entries-create", @@ -2670,7 +3043,7 @@ async fn main() { let mut app = App::new("certificatemanager1") .author("Sebastian Thiel ") - .version("4.0.1+20220214") + .version("5.0.2+20230104") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_certificatemanager1_cli") .arg(Arg::with_name("url") diff --git a/gen/certificatemanager1/Cargo.toml b/gen/certificatemanager1/Cargo.toml index 1b14e8266f..faa80a20bd 100644 --- a/gen/certificatemanager1/Cargo.toml +++ b/gen/certificatemanager1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-certificatemanager1" -version = "5.0.2-beta-1+20230104" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Certificate Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/certificatemanager1" homepage = "https://cloud.google.com/certificate-manager" -documentation = "https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104" +documentation = "https://docs.rs/google-certificatemanager1/5.0.2+20230104" license = "MIT" keywords = ["certificatemanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/certificatemanager1/README.md b/gen/certificatemanager1/README.md index 51f8f0389c..46b017ef80 100644 --- a/gen/certificatemanager1/README.md +++ b/gen/certificatemanager1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-certificatemanager1` library allows access to all features of the *Google Certificate Manager* service. -This documentation was generated from *Certificate Manager* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *certificatemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Certificate Manager* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *certificatemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Certificate Manager* *v1* API can be found at the [official documentation site](https://cloud.google.com/certificate-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/CertificateManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/CertificateManager) ... * projects - * [*locations certificate issuance configs create*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigCreateCall), [*locations certificate issuance configs delete*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigDeleteCall), [*locations certificate issuance configs get*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigGetCall), [*locations certificate issuance configs list*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigListCall), [*locations certificate maps certificate map entries create*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryCreateCall), [*locations certificate maps certificate map entries delete*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryDeleteCall), [*locations certificate maps certificate map entries get*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryGetCall), [*locations certificate maps certificate map entries list*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryListCall), [*locations certificate maps certificate map entries patch*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryPatchCall), [*locations certificate maps create*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCreateCall), [*locations certificate maps delete*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapDeleteCall), [*locations certificate maps get*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapGetCall), [*locations certificate maps list*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapListCall), [*locations certificate maps patch*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapPatchCall), [*locations certificates create*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateCreateCall), [*locations certificates delete*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateDeleteCall), [*locations certificates get*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateGetCall), [*locations certificates list*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificateListCall), [*locations certificates patch*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationCertificatePatchCall), [*locations dns authorizations create*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationCreateCall), [*locations dns authorizations delete*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationDeleteCall), [*locations dns authorizations get*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationGetCall), [*locations dns authorizations list*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationListCall), [*locations dns authorizations patch*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationPatchCall), [*locations get*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/api::ProjectLocationOperationListCall) + * [*locations certificate issuance configs create*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigCreateCall), [*locations certificate issuance configs delete*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigDeleteCall), [*locations certificate issuance configs get*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigGetCall), [*locations certificate issuance configs list*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateIssuanceConfigListCall), [*locations certificate maps certificate map entries create*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryCreateCall), [*locations certificate maps certificate map entries delete*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryDeleteCall), [*locations certificate maps certificate map entries get*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryGetCall), [*locations certificate maps certificate map entries list*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryListCall), [*locations certificate maps certificate map entries patch*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCertificateMapEntryPatchCall), [*locations certificate maps create*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapCreateCall), [*locations certificate maps delete*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapDeleteCall), [*locations certificate maps get*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapGetCall), [*locations certificate maps list*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapListCall), [*locations certificate maps patch*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateMapPatchCall), [*locations certificates create*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateCreateCall), [*locations certificates delete*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateDeleteCall), [*locations certificates get*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateGetCall), [*locations certificates list*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificateListCall), [*locations certificates patch*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationCertificatePatchCall), [*locations dns authorizations create*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationCreateCall), [*locations dns authorizations delete*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationDeleteCall), [*locations dns authorizations get*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationGetCall), [*locations dns authorizations list*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationListCall), [*locations dns authorizations patch*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationDnsAuthorizationPatchCall), [*locations get*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/CertificateManager)** +* **[Hub](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/CertificateManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::CallBuilder) -* **[Resources](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::CallBuilder) +* **[Resources](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Part)** + * **[Parts](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Delegate) to the -[Method Builder](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Delegate) to the +[Method Builder](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::RequestValue) and -[decodable](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::RequestValue) and +[decodable](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-certificatemanager1/5.0.2-beta-1+20230104/google_certificatemanager1/client::RequestValue) are moved +* [request values](https://docs.rs/google-certificatemanager1/5.0.2+20230104/google_certificatemanager1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/certificatemanager1/src/api.rs b/gen/certificatemanager1/src/api.rs index 2c18e3b1bd..c6edc588e4 100644 --- a/gen/certificatemanager1/src/api.rs +++ b/gen/certificatemanager1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CertificateManager { CertificateManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://certificatemanager.googleapis.com/".to_string(), _root_url: "https://certificatemanager.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CertificateManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/certificatemanager1/src/client.rs b/gen/certificatemanager1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/certificatemanager1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/certificatemanager1/src/lib.rs b/gen/certificatemanager1/src/lib.rs index b3140082b5..b99e4ae229 100644 --- a/gen/certificatemanager1/src/lib.rs +++ b/gen/certificatemanager1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Certificate Manager* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *certificatemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Certificate Manager* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *certificatemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Certificate Manager* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/certificate-manager). diff --git a/gen/chromemanagement1-cli/Cargo.toml b/gen/chromemanagement1-cli/Cargo.toml index 11447976bf..a0fd31ff0d 100644 --- a/gen/chromemanagement1-cli/Cargo.toml +++ b/gen/chromemanagement1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-chromemanagement1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Chrome Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/chromemanagement1-cli" @@ -20,13 +20,13 @@ name = "chromemanagement1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-chromemanagement1] path = "../chromemanagement1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/chromemanagement1-cli/README.md b/gen/chromemanagement1-cli/README.md index 8c473a24ce..78af74dd4a 100644 --- a/gen/chromemanagement1-cli/README.md +++ b/gen/chromemanagement1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Chrome Management* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Chrome Management* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash chromemanagement1 [options] @@ -34,10 +34,15 @@ chromemanagement1 [options] apps-chrome-get [-p ]... [-o ] apps-count-chrome-app-requests [-p ]... [-o ] apps-web-get [-p ]... [-o ] + reports-count-chrome-devices-reaching-auto-expiration-date [-p ]... [-o ] + reports-count-chrome-devices-that-need-attention [-p ]... [-o ] + reports-count-chrome-hardware-fleet-devices [-p ]... [-o ] reports-count-chrome-versions [-p ]... [-o ] reports-count-installed-apps [-p ]... [-o ] reports-find-installed-app-devices [-p ]... [-o ] + telemetry-devices-get [-p ]... [-o ] telemetry-devices-list [-p ]... [-o ] + telemetry-events-list [-p ]... [-o ] chromemanagement1 --help Configuration: diff --git a/gen/chromemanagement1-cli/mkdocs.yml b/gen/chromemanagement1-cli/mkdocs.yml index 9430771159..c98c0cc3e6 100644 --- a/gen/chromemanagement1-cli/mkdocs.yml +++ b/gen/chromemanagement1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Chrome Management v4.0.1+20220305 +site_name: Chrome Management v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-chromemanagement1-cli site_description: A complete library to interact with Chrome Management (protocol v1) @@ -7,16 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/chromemanagement docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['customers_apps-android-get.md', 'Customers', 'Apps Android Get'] -- ['customers_apps-chrome-get.md', 'Customers', 'Apps Chrome Get'] -- ['customers_apps-count-chrome-app-requests.md', 'Customers', 'Apps Count Chrome App Requests'] -- ['customers_apps-web-get.md', 'Customers', 'Apps Web Get'] -- ['customers_reports-count-chrome-versions.md', 'Customers', 'Reports Count Chrome Versions'] -- ['customers_reports-count-installed-apps.md', 'Customers', 'Reports Count Installed Apps'] -- ['customers_reports-find-installed-app-devices.md', 'Customers', 'Reports Find Installed App Devices'] -- ['customers_telemetry-devices-list.md', 'Customers', 'Telemetry Devices List'] +nav: +- Home: 'index.md' +- 'Customers': + - 'Apps Android Get': 'customers_apps-android-get.md' + - 'Apps Chrome Get': 'customers_apps-chrome-get.md' + - 'Apps Count Chrome App Requests': 'customers_apps-count-chrome-app-requests.md' + - 'Apps Web Get': 'customers_apps-web-get.md' + - 'Reports Count Chrome Devices Reaching Auto Expiration Date': 'customers_reports-count-chrome-devices-reaching-auto-expiration-date.md' + - 'Reports Count Chrome Devices That Need Attention': 'customers_reports-count-chrome-devices-that-need-attention.md' + - 'Reports Count Chrome Hardware Fleet Devices': 'customers_reports-count-chrome-hardware-fleet-devices.md' + - 'Reports Count Chrome Versions': 'customers_reports-count-chrome-versions.md' + - 'Reports Count Installed Apps': 'customers_reports-count-installed-apps.md' + - 'Reports Find Installed App Devices': 'customers_reports-find-installed-app-devices.md' + - 'Telemetry Devices Get': 'customers_telemetry-devices-get.md' + - 'Telemetry Devices List': 'customers_telemetry-devices-list.md' + - 'Telemetry Events List': 'customers_telemetry-events-list.md' theme: readthedocs diff --git a/gen/chromemanagement1-cli/src/client.rs b/gen/chromemanagement1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/chromemanagement1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/chromemanagement1-cli/src/main.rs b/gen/chromemanagement1-cli/src/main.rs index 0e8f4c40c8..62da832ea7 100644 --- a/gen/chromemanagement1-cli/src/main.rs +++ b/gen/chromemanagement1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_chromemanagement1::{api, Error, oauth2}; +use google_chromemanagement1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -165,7 +164,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "org-unit-id" => { call = call.org_unit_id(value.unwrap_or("")); @@ -272,6 +271,186 @@ where } } + async fn _customers_reports_count_chrome_devices_reaching_auto_expiration_date(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().reports_count_chrome_devices_reaching_auto_expiration_date(opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "org-unit-id" => { + call = call.org_unit_id(value.unwrap_or("")); + }, + "min-aue-date" => { + call = call.min_aue_date(value.unwrap_or("")); + }, + "max-aue-date" => { + call = call.max_aue_date(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["max-aue-date", "min-aue-date", "org-unit-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_reports_count_chrome_devices_that_need_attention(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().reports_count_chrome_devices_that_need_attention(opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "read-mask" => { + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "org-unit-id" => { + call = call.org_unit_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["org-unit-id", "read-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_reports_count_chrome_hardware_fleet_devices(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().reports_count_chrome_hardware_fleet_devices(opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "read-mask" => { + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "org-unit-id" => { + call = call.org_unit_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["org-unit-id", "read-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _customers_reports_count_chrome_versions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.customers().reports_count_chrome_versions(opt.value_of("customer").unwrap_or("")); @@ -282,7 +461,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "org-unit-id" => { call = call.org_unit_id(value.unwrap_or("")); @@ -347,7 +526,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "org-unit-id" => { call = call.org_unit_id(value.unwrap_or("")); @@ -415,7 +594,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "org-unit-id" => { call = call.org_unit_id(value.unwrap_or("")); @@ -479,6 +658,62 @@ where } } + async fn _customers_telemetry_devices_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().telemetry_devices_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "read-mask" => { + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["read-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _customers_telemetry_devices_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.customers().telemetry_devices_list(opt.value_of("parent").unwrap_or("")); @@ -486,13 +721,78 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token", "read-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_telemetry_events_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().telemetry_events_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "read-mask" => { + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -563,6 +863,15 @@ where ("apps-web-get", Some(opt)) => { call_result = self._customers_apps_web_get(opt, dry_run, &mut err).await; }, + ("reports-count-chrome-devices-reaching-auto-expiration-date", Some(opt)) => { + call_result = self._customers_reports_count_chrome_devices_reaching_auto_expiration_date(opt, dry_run, &mut err).await; + }, + ("reports-count-chrome-devices-that-need-attention", Some(opt)) => { + call_result = self._customers_reports_count_chrome_devices_that_need_attention(opt, dry_run, &mut err).await; + }, + ("reports-count-chrome-hardware-fleet-devices", Some(opt)) => { + call_result = self._customers_reports_count_chrome_hardware_fleet_devices(opt, dry_run, &mut err).await; + }, ("reports-count-chrome-versions", Some(opt)) => { call_result = self._customers_reports_count_chrome_versions(opt, dry_run, &mut err).await; }, @@ -572,9 +881,15 @@ where ("reports-find-installed-app-devices", Some(opt)) => { call_result = self._customers_reports_find_installed_app_devices(opt, dry_run, &mut err).await; }, + ("telemetry-devices-get", Some(opt)) => { + call_result = self._customers_telemetry_devices_get(opt, dry_run, &mut err).await; + }, ("telemetry-devices-list", Some(opt)) => { call_result = self._customers_telemetry_devices_list(opt, dry_run, &mut err).await; }, + ("telemetry-events-list", Some(opt)) => { + call_result = self._customers_telemetry_events_list(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("customers".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -654,7 +969,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("customers", "methods: 'apps-android-get', 'apps-chrome-get', 'apps-count-chrome-app-requests', 'apps-web-get', 'reports-count-chrome-versions', 'reports-count-installed-apps', 'reports-find-installed-app-devices' and 'telemetry-devices-list'", vec![ + ("customers", "methods: 'apps-android-get', 'apps-chrome-get', 'apps-count-chrome-app-requests', 'apps-web-get', 'reports-count-chrome-devices-reaching-auto-expiration-date', 'reports-count-chrome-devices-that-need-attention', 'reports-count-chrome-hardware-fleet-devices', 'reports-count-chrome-versions', 'reports-count-installed-apps', 'reports-find-installed-app-devices', 'telemetry-devices-get', 'telemetry-devices-list' and 'telemetry-events-list'", vec![ ("apps-android-get", Some(r##"Get a specific app for a customer by its resource name."##), "Details at http://byron.github.io/google-apis-rs/google_chromemanagement1_cli/customers_apps-android-get", @@ -737,6 +1052,72 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("reports-count-chrome-devices-reaching-auto-expiration-date", + Some(r##"Generate report of the number of devices expiring in each month of the selected time frame. Devices are grouped by auto update expiration date and model. Further information can be found [here](https://support.google.com/chrome/a/answer/10564947)."##), + "Details at http://byron.github.io/google-apis-rs/google_chromemanagement1_cli/customers_reports-count-chrome-devices-reaching-auto-expiration-date", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. The customer ID or "my_customer" prefixed with "customers/"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("reports-count-chrome-devices-that-need-attention", + Some(r##"Counts of ChromeOS devices that have not synced policies or have lacked user activity in the past 28 days, are out of date, or are not complaint. Further information can be found here https://support.google.com/chrome/a/answer/10564947"##), + "Details at http://byron.github.io/google-apis-rs/google_chromemanagement1_cli/customers_reports-count-chrome-devices-that-need-attention", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. The customer ID or "my_customer" prefixed with "customers/"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("reports-count-chrome-hardware-fleet-devices", + Some(r##"Counts of devices with a specific hardware specification from the requested hardware type (for example model name, processor type). Further information can be found here https://support.google.com/chrome/a/answer/10564947"##), + "Details at http://byron.github.io/google-apis-rs/google_chromemanagement1_cli/customers_reports-count-chrome-hardware-fleet-devices", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. The customer ID or "my_customer"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -803,6 +1184,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("telemetry-devices-get", + Some(r##"Get telemetry device."##), + "Details at http://byron.github.io/google-apis-rs/google_chromemanagement1_cli/customers_telemetry-devices-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the `TelemetryDevice` to return."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -825,6 +1228,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("telemetry-events-list", + Some(r##"List telemetry events."##), + "Details at http://byron.github.io/google-apis-rs/google_chromemanagement1_cli/customers_telemetry-events-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Customer id or "my_customer" to use the customer associated to the account making the request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -837,7 +1262,7 @@ async fn main() { let mut app = App::new("chromemanagement1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("The Chrome Management API is a suite of services that allows Chrome administrators to view, manage and gain insights on their Chrome OS and Chrome Browser devices.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_chromemanagement1_cli") .arg(Arg::with_name("url") diff --git a/gen/chromemanagement1/Cargo.toml b/gen/chromemanagement1/Cargo.toml index a1bdaeb099..c5d47e09c4 100644 --- a/gen/chromemanagement1/Cargo.toml +++ b/gen/chromemanagement1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-chromemanagement1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Chrome Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/chromemanagement1" homepage = "http://developers.google.com/chrome/management/" -documentation = "https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-chromemanagement1/5.0.2+20230123" license = "MIT" keywords = ["chromemanagement", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/chromemanagement1/README.md b/gen/chromemanagement1/README.md index c23b45b631..040680157a 100644 --- a/gen/chromemanagement1/README.md +++ b/gen/chromemanagement1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-chromemanagement1` library allows access to all features of the *Google Chrome Management* service. -This documentation was generated from *Chrome Management* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *chromemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Chrome Management* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *chromemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Chrome Management* *v1* API can be found at the [official documentation site](http://developers.google.com/chrome/management/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/ChromeManagement) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/ChromeManagement) ... * customers - * [*apps android get*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerAppAndroidGetCall), [*apps chrome get*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerAppChromeGetCall), [*apps count chrome app requests*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerAppCountChromeAppRequestCall), [*apps web get*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerAppWebGetCall), [*reports count chrome devices reaching auto expiration date*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerReportCountChromeDevicesReachingAutoExpirationDateCall), [*reports count chrome devices that need attention*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerReportCountChromeDevicesThatNeedAttentionCall), [*reports count chrome hardware fleet devices*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerReportCountChromeHardwareFleetDeviceCall), [*reports count chrome versions*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerReportCountChromeVersionCall), [*reports count installed apps*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerReportCountInstalledAppCall), [*reports find installed app devices*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerReportFindInstalledAppDeviceCall), [*telemetry devices get*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerTelemetryDeviceGetCall), [*telemetry devices list*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerTelemetryDeviceListCall) and [*telemetry events list*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/api::CustomerTelemetryEventListCall) + * [*apps android get*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerAppAndroidGetCall), [*apps chrome get*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerAppChromeGetCall), [*apps count chrome app requests*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerAppCountChromeAppRequestCall), [*apps web get*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerAppWebGetCall), [*reports count chrome devices reaching auto expiration date*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerReportCountChromeDevicesReachingAutoExpirationDateCall), [*reports count chrome devices that need attention*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerReportCountChromeDevicesThatNeedAttentionCall), [*reports count chrome hardware fleet devices*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerReportCountChromeHardwareFleetDeviceCall), [*reports count chrome versions*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerReportCountChromeVersionCall), [*reports count installed apps*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerReportCountInstalledAppCall), [*reports find installed app devices*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerReportFindInstalledAppDeviceCall), [*telemetry devices get*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerTelemetryDeviceGetCall), [*telemetry devices list*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerTelemetryDeviceListCall) and [*telemetry events list*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/api::CustomerTelemetryEventListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/ChromeManagement)** +* **[Hub](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/ChromeManagement)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::CallBuilder) -* **[Resources](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::CallBuilder) +* **[Resources](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Part)** + * **[Parts](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Delegate) to the -[Method Builder](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Delegate) to the +[Method Builder](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::RequestValue) and -[decodable](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::RequestValue) and +[decodable](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-chromemanagement1/5.0.2-beta-1+20230123/google_chromemanagement1/client::RequestValue) are moved +* [request values](https://docs.rs/google-chromemanagement1/5.0.2+20230123/google_chromemanagement1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/chromemanagement1/src/api.rs b/gen/chromemanagement1/src/api.rs index e5cbe6bb80..dcd884d2ce 100644 --- a/gen/chromemanagement1/src/api.rs +++ b/gen/chromemanagement1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> ChromeManagement { ChromeManagement { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://chromemanagement.googleapis.com/".to_string(), _root_url: "https://chromemanagement.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> ChromeManagement { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/chromemanagement1/src/client.rs b/gen/chromemanagement1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/chromemanagement1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/chromemanagement1/src/lib.rs b/gen/chromemanagement1/src/lib.rs index 6280f81ea9..ee5d3b6c5f 100644 --- a/gen/chromemanagement1/src/lib.rs +++ b/gen/chromemanagement1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Chrome Management* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *chromemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Chrome Management* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *chromemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Chrome Management* *v1* API can be found at the //! [official documentation site](http://developers.google.com/chrome/management/). diff --git a/gen/chromepolicy1-cli/Cargo.toml b/gen/chromepolicy1-cli/Cargo.toml index 6dc038beda..cfe9a5a25d 100644 --- a/gen/chromepolicy1-cli/Cargo.toml +++ b/gen/chromepolicy1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-chromepolicy1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Chrome Policy (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/chromepolicy1-cli" @@ -20,13 +20,13 @@ name = "chromepolicy1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-chromepolicy1] path = "../chromepolicy1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/chromepolicy1-cli/README.md b/gen/chromepolicy1-cli/README.md index e1ca9f70bb..bca00073eb 100644 --- a/gen/chromepolicy1-cli/README.md +++ b/gen/chromepolicy1-cli/README.md @@ -25,11 +25,19 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Chrome Policy* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Chrome Policy* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash chromepolicy1 [options] customers + policies-groups-batch-delete (-r )... [-p ]... [-o ] + policies-groups-batch-modify (-r )... [-p ]... [-o ] + policies-groups-list-group-priority-ordering (-r )... [-p ]... [-o ] + policies-groups-update-group-priority-ordering (-r )... [-p ]... [-o ] + policies-networks-define-certificate (-r )... [-p ]... [-o ] + policies-networks-define-network (-r )... [-p ]... [-o ] + policies-networks-remove-certificate (-r )... [-p ]... [-o ] + policies-networks-remove-network (-r )... [-p ]... [-o ] policies-orgunits-batch-inherit (-r )... [-p ]... [-o ] policies-orgunits-batch-modify (-r )... [-p ]... [-o ] policies-resolve (-r )... [-p ]... [-o ] diff --git a/gen/chromepolicy1-cli/mkdocs.yml b/gen/chromepolicy1-cli/mkdocs.yml index fdd90cf514..4a67293afa 100644 --- a/gen/chromepolicy1-cli/mkdocs.yml +++ b/gen/chromepolicy1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Chrome Policy v4.0.1+20220305 +site_name: Chrome Policy v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-chromepolicy1-cli site_description: A complete library to interact with Chrome Policy (protocol v1) @@ -7,14 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/chromepolicy1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['customers_policies-orgunits-batch-inherit.md', 'Customers', 'Policies Orgunits Batch Inherit'] -- ['customers_policies-orgunits-batch-modify.md', 'Customers', 'Policies Orgunits Batch Modify'] -- ['customers_policies-resolve.md', 'Customers', 'Policies Resolve'] -- ['customers_policy-schemas-get.md', 'Customers', 'Policy Schemas Get'] -- ['customers_policy-schemas-list.md', 'Customers', 'Policy Schemas List'] -- ['media_upload.md', 'Media', 'Upload'] +nav: +- Home: 'index.md' +- 'Customers': + - 'Policies Groups Batch Delete': 'customers_policies-groups-batch-delete.md' + - 'Policies Groups Batch Modify': 'customers_policies-groups-batch-modify.md' + - 'Policies Groups List Group Priority Ordering': 'customers_policies-groups-list-group-priority-ordering.md' + - 'Policies Groups Update Group Priority Ordering': 'customers_policies-groups-update-group-priority-ordering.md' + - 'Policies Networks Define Certificate': 'customers_policies-networks-define-certificate.md' + - 'Policies Networks Define Network': 'customers_policies-networks-define-network.md' + - 'Policies Networks Remove Certificate': 'customers_policies-networks-remove-certificate.md' + - 'Policies Networks Remove Network': 'customers_policies-networks-remove-network.md' + - 'Policies Orgunits Batch Inherit': 'customers_policies-orgunits-batch-inherit.md' + - 'Policies Orgunits Batch Modify': 'customers_policies-orgunits-batch-modify.md' + - 'Policies Resolve': 'customers_policies-resolve.md' + - 'Policy Schemas Get': 'customers_policy-schemas-get.md' + - 'Policy Schemas List': 'customers_policy-schemas-list.md' +- 'Media': + - 'Upload': 'media_upload.md' theme: readthedocs diff --git a/gen/chromepolicy1-cli/src/client.rs b/gen/chromepolicy1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/chromepolicy1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/chromepolicy1-cli/src/main.rs b/gen/chromepolicy1-cli/src/main.rs index 61172049ec..e0e8e33ab7 100644 --- a/gen/chromepolicy1-cli/src/main.rs +++ b/gen/chromepolicy1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_chromepolicy1::{api, Error, oauth2}; +use google_chromepolicy1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,694 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _customers_policies_groups_batch_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1BatchDeleteGroupPoliciesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_groups_batch_delete(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_policies_groups_batch_modify(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1BatchModifyGroupPoliciesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_groups_batch_modify(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_policies_groups_list_group_priority_ordering(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy-namespace" => Some(("policyNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy-target-key.additional-target-keys" => Some(("policyTargetKey.additionalTargetKeys", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "policy-target-key.target-resource" => Some(("policyTargetKey.targetResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-target-keys", "policy-namespace", "policy-target-key", "target-resource"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1ListGroupPriorityOrderingRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_groups_list_group_priority_ordering(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_policies_groups_update_group_priority_ordering(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "group-ids" => Some(("groupIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "policy-namespace" => Some(("policyNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy-target-key.additional-target-keys" => Some(("policyTargetKey.additionalTargetKeys", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "policy-target-key.target-resource" => Some(("policyTargetKey.targetResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-target-keys", "group-ids", "policy-namespace", "policy-target-key", "target-resource"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1UpdateGroupPriorityOrderingRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_groups_update_group_priority_ordering(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_policies_networks_define_certificate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "ceritificate-name" => Some(("ceritificateName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "certificate" => Some(("certificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-resource" => Some(("targetResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["ceritificate-name", "certificate", "target-resource"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1DefineCertificateRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_networks_define_certificate(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_policies_networks_define_network(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-resource" => Some(("targetResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "target-resource"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1DefineNetworkRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_networks_define_network(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_policies_networks_remove_certificate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "network-id" => Some(("networkId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-resource" => Some(("targetResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["network-id", "target-resource"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1RemoveCertificateRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_networks_remove_certificate(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_policies_networks_remove_network(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "network-id" => Some(("networkId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-resource" => Some(("targetResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["network-id", "target-resource"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChromePolicyVersionsV1RemoveNetworkRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().policies_networks_remove_network(request, opt.value_of("customer").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _customers_policies_orgunits_batch_inherit(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -84,7 +771,7 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::GoogleChromePolicyV1BatchInheritOrgUnitPoliciesRequest = json::value::from_value(object).unwrap(); + let mut request: api::GoogleChromePolicyVersionsV1BatchInheritOrgUnitPoliciesRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.customers().policies_orgunits_batch_inherit(request, opt.value_of("customer").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); @@ -168,7 +855,7 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::GoogleChromePolicyV1BatchModifyOrgUnitPoliciesRequest = json::value::from_value(object).unwrap(); + let mut request: api::GoogleChromePolicyVersionsV1BatchModifyOrgUnitPoliciesRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.customers().policies_orgunits_batch_modify(request, opt.value_of("customer").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); @@ -257,7 +944,7 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::GoogleChromePolicyV1ResolveRequest = json::value::from_value(object).unwrap(); + let mut request: api::GoogleChromePolicyVersionsV1ResolveRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.customers().policies_resolve(request, opt.value_of("customer").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); @@ -370,7 +1057,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -456,7 +1143,7 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::GoogleChromePolicyV1UploadPolicyFileRequest = json::value::from_value(object).unwrap(); + let mut request: api::GoogleChromePolicyVersionsV1UploadPolicyFileRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.media().upload(request, opt.value_of("customer").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); @@ -517,6 +1204,30 @@ where match self.opt.subcommand() { ("customers", Some(opt)) => { match opt.subcommand() { + ("policies-groups-batch-delete", Some(opt)) => { + call_result = self._customers_policies_groups_batch_delete(opt, dry_run, &mut err).await; + }, + ("policies-groups-batch-modify", Some(opt)) => { + call_result = self._customers_policies_groups_batch_modify(opt, dry_run, &mut err).await; + }, + ("policies-groups-list-group-priority-ordering", Some(opt)) => { + call_result = self._customers_policies_groups_list_group_priority_ordering(opt, dry_run, &mut err).await; + }, + ("policies-groups-update-group-priority-ordering", Some(opt)) => { + call_result = self._customers_policies_groups_update_group_priority_ordering(opt, dry_run, &mut err).await; + }, + ("policies-networks-define-certificate", Some(opt)) => { + call_result = self._customers_policies_networks_define_certificate(opt, dry_run, &mut err).await; + }, + ("policies-networks-define-network", Some(opt)) => { + call_result = self._customers_policies_networks_define_network(opt, dry_run, &mut err).await; + }, + ("policies-networks-remove-certificate", Some(opt)) => { + call_result = self._customers_policies_networks_remove_certificate(opt, dry_run, &mut err).await; + }, + ("policies-networks-remove-network", Some(opt)) => { + call_result = self._customers_policies_networks_remove_network(opt, dry_run, &mut err).await; + }, ("policies-orgunits-batch-inherit", Some(opt)) => { call_result = self._customers_policies_orgunits_batch_inherit(opt, dry_run, &mut err).await; }, @@ -623,7 +1334,231 @@ async fn main() { let mut exit_status = 0i32; let upload_value_names = ["mode", "file"]; let arg_data = [ - ("customers", "methods: 'policies-orgunits-batch-inherit', 'policies-orgunits-batch-modify', 'policies-resolve', 'policy-schemas-get' and 'policy-schemas-list'", vec![ + ("customers", "methods: 'policies-groups-batch-delete', 'policies-groups-batch-modify', 'policies-groups-list-group-priority-ordering', 'policies-groups-update-group-priority-ordering', 'policies-networks-define-certificate', 'policies-networks-define-network', 'policies-networks-remove-certificate', 'policies-networks-remove-network', 'policies-orgunits-batch-inherit', 'policies-orgunits-batch-modify', 'policies-resolve', 'policy-schemas-get' and 'policy-schemas-list'", vec![ + ("policies-groups-batch-delete", + Some(r##"Delete multiple policy values that are applied to a specific group. All targets must have the same target format. That is to say that they must point to the same target resource and must have the same keys specified in `additionalTargetKeyNames`, though the values for those keys may be different. On failure the request will return the error details as part of the google.rpc.Status."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-groups-batch-delete", + vec![ + (Some(r##"customer"##), + None, + Some(r##"ID of the Google Workspace account or literal "my_customer" for the customer associated to the request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("policies-groups-batch-modify", + Some(r##"Modify multiple policy values that are applied to a specific group. All targets must have the same target format. That is to say that they must point to the same target resource and must have the same keys specified in `additionalTargetKeyNames`, though the values for those keys may be different. On failure the request will return the error details as part of the google.rpc.Status."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-groups-batch-modify", + vec![ + (Some(r##"customer"##), + None, + Some(r##"ID of the Google Workspace account or literal "my_customer" for the customer associated to the request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("policies-groups-list-group-priority-ordering", + Some(r##"Retrieve a group priority ordering for an app. The target app must be supplied in `additionalTargetKeyNames` in the PolicyTargetKey. On failure the request will return the error details as part of the google.rpc.Status."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-groups-list-group-priority-ordering", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. ID of the Google Workspace account or literal "my_customer" for the customer associated to the request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("policies-groups-update-group-priority-ordering", + Some(r##"Update a group priority ordering for an app. The target app must be supplied in `additionalTargetKeyNames` in the PolicyTargetKey. On failure the request will return the error details as part of the google.rpc.Status."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-groups-update-group-priority-ordering", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. ID of the Google Workspace account or literal "my_customer" for the customer associated to the request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("policies-networks-define-certificate", + Some(r##"Creates a certificate at a specified OU for a customer."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-networks-define-certificate", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. The customer for which the certificate will apply."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("policies-networks-define-network", + Some(r##"Define a new network."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-networks-define-network", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. The customer who will own this new network."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("policies-networks-remove-certificate", + Some(r##"Remove an existing certificate by guid."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-networks-remove-certificate", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. The customer whose certificate will be removed."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("policies-networks-remove-network", + Some(r##"Remove an existing network by guid."##), + "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-networks-remove-network", + vec![ + (Some(r##"customer"##), + None, + Some(r##"Required. The customer whose network will be removed."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("policies-orgunits-batch-inherit", Some(r##"Modify multiple policy values that are applied to a specific org unit so that they now inherit the value from a parent (if applicable). All targets must have the same target format. That is to say that they must point to the same target resource and must have the same keys specified in `additionalTargetKeyNames`, though the values for those keys may be different. On failure the request will return the error details as part of the google.rpc.Status."##), "Details at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli/customers_policies-orgunits-batch-inherit", @@ -795,7 +1730,7 @@ async fn main() { let mut app = App::new("chromepolicy1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("The Chrome Policy API is a suite of services that allows Chrome administrators to control the policies applied to their managed Chrome OS devices and Chrome browsers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_chromepolicy1_cli") .arg(Arg::with_name("url") diff --git a/gen/chromepolicy1/Cargo.toml b/gen/chromepolicy1/Cargo.toml index 55beb50a68..b77440d653 100644 --- a/gen/chromepolicy1/Cargo.toml +++ b/gen/chromepolicy1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-chromepolicy1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Chrome Policy (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/chromepolicy1" homepage = "http://developers.google.com/chrome/policy" -documentation = "https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-chromepolicy1/5.0.2+20230123" license = "MIT" keywords = ["chromepolicy", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/chromepolicy1/README.md b/gen/chromepolicy1/README.md index 6e8817580f..83299d6079 100644 --- a/gen/chromepolicy1/README.md +++ b/gen/chromepolicy1/README.md @@ -5,23 +5,23 @@ DO NOT EDIT ! --> The `google-chromepolicy1` library allows access to all features of the *Google Chrome Policy* service. -This documentation was generated from *Chrome Policy* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *chromepolicy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Chrome Policy* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *chromepolicy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Chrome Policy* *v1* API can be found at the [official documentation site](http://developers.google.com/chrome/policy). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/ChromePolicy) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/ChromePolicy) ... * customers - * [*policies groups batch delete*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyGroupBatchDeleteCall), [*policies groups batch modify*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyGroupBatchModifyCall), [*policies groups list group priority ordering*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyGroupListGroupPriorityOrderingCall), [*policies groups update group priority ordering*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyGroupUpdateGroupPriorityOrderingCall), [*policies networks define certificate*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyNetworkDefineCertificateCall), [*policies networks define network*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyNetworkDefineNetworkCall), [*policies networks remove certificate*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyNetworkRemoveCertificateCall), [*policies networks remove network*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyNetworkRemoveNetworkCall), [*policies orgunits batch inherit*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyOrgunitBatchInheritCall), [*policies orgunits batch modify*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyOrgunitBatchModifyCall), [*policies resolve*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicyResolveCall), [*policy schemas get*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicySchemaGetCall) and [*policy schemas list*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::CustomerPolicySchemaListCall) + * [*policies groups batch delete*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyGroupBatchDeleteCall), [*policies groups batch modify*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyGroupBatchModifyCall), [*policies groups list group priority ordering*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyGroupListGroupPriorityOrderingCall), [*policies groups update group priority ordering*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyGroupUpdateGroupPriorityOrderingCall), [*policies networks define certificate*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyNetworkDefineCertificateCall), [*policies networks define network*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyNetworkDefineNetworkCall), [*policies networks remove certificate*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyNetworkRemoveCertificateCall), [*policies networks remove network*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyNetworkRemoveNetworkCall), [*policies orgunits batch inherit*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyOrgunitBatchInheritCall), [*policies orgunits batch modify*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyOrgunitBatchModifyCall), [*policies resolve*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicyResolveCall), [*policy schemas get*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicySchemaGetCall) and [*policy schemas list*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::CustomerPolicySchemaListCall) * media - * [*upload*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::MediaUploadCall) + * [*upload*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::MediaUploadCall) Upload supported by ... -* [*upload media*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/api::MediaUploadCall) +* [*upload media*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/api::MediaUploadCall) @@ -29,17 +29,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/ChromePolicy)** +* **[Hub](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/ChromePolicy)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::CallBuilder) -* **[Resources](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::CallBuilder) +* **[Resources](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Part)** + * **[Parts](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Delegate) to the -[Method Builder](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Delegate) to the +[Method Builder](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::RequestValue) and -[decodable](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::RequestValue) and +[decodable](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-chromepolicy1/5.0.2-beta-1+20230123/google_chromepolicy1/client::RequestValue) are moved +* [request values](https://docs.rs/google-chromepolicy1/5.0.2+20230123/google_chromepolicy1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/chromepolicy1/src/api.rs b/gen/chromepolicy1/src/api.rs index 6279d1e55d..47aa032592 100644 --- a/gen/chromepolicy1/src/api.rs +++ b/gen/chromepolicy1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> ChromePolicy { ChromePolicy { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://chromepolicy.googleapis.com/".to_string(), _root_url: "https://chromepolicy.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> ChromePolicy { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/chromepolicy1/src/client.rs b/gen/chromepolicy1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/chromepolicy1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/chromepolicy1/src/lib.rs b/gen/chromepolicy1/src/lib.rs index a897559635..d14ce83430 100644 --- a/gen/chromepolicy1/src/lib.rs +++ b/gen/chromepolicy1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Chrome Policy* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *chromepolicy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Chrome Policy* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *chromepolicy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Chrome Policy* *v1* API can be found at the //! [official documentation site](http://developers.google.com/chrome/policy). diff --git a/gen/chromeuxreport1-cli/Cargo.toml b/gen/chromeuxreport1-cli/Cargo.toml index 35be5d6151..3384d48e27 100644 --- a/gen/chromeuxreport1-cli/Cargo.toml +++ b/gen/chromeuxreport1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-chromeuxreport1-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Chrome UX Report (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/chromeuxreport1-cli" @@ -20,13 +20,13 @@ name = "chromeuxreport1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-chromeuxreport1] path = "../chromeuxreport1" -version = "4.0.1+20220302" +version = "5.0.2+20230117" + diff --git a/gen/chromeuxreport1-cli/README.md b/gen/chromeuxreport1-cli/README.md index 85c79e52e3..ddba4e9d0b 100644 --- a/gen/chromeuxreport1-cli/README.md +++ b/gen/chromeuxreport1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Chrome UX Report* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *Chrome UX Report* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash chromeuxreport1 [options] diff --git a/gen/chromeuxreport1-cli/mkdocs.yml b/gen/chromeuxreport1-cli/mkdocs.yml index 0b195cbb13..283516f658 100644 --- a/gen/chromeuxreport1-cli/mkdocs.yml +++ b/gen/chromeuxreport1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Chrome UX Report v4.0.1+20220302 +site_name: Chrome UX Report v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-chromeuxreport1-cli site_description: A complete library to interact with Chrome UX Report (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/chromeuxreport1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['records_query-record.md', 'Records', 'Query Record'] +nav: +- Home: 'index.md' +- 'Records': + - 'Query Record': 'records_query-record.md' theme: readthedocs diff --git a/gen/chromeuxreport1-cli/src/client.rs b/gen/chromeuxreport1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/chromeuxreport1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/chromeuxreport1-cli/src/main.rs b/gen/chromeuxreport1-cli/src/main.rs index 6899781729..e5866bcb70 100644 --- a/gen/chromeuxreport1-cli/src/main.rs +++ b/gen/chromeuxreport1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_chromeuxreport1::{api, Error, oauth2}; +use google_chromeuxreport1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -255,7 +254,7 @@ async fn main() { let mut app = App::new("chromeuxreport1") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230117") .about("The Chrome UX Report API lets you view real user experience data for millions of websites. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_chromeuxreport1_cli") .arg(Arg::with_name("folder") diff --git a/gen/chromeuxreport1/Cargo.toml b/gen/chromeuxreport1/Cargo.toml index b85b73fb0f..66a8ba4823 100644 --- a/gen/chromeuxreport1/Cargo.toml +++ b/gen/chromeuxreport1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-chromeuxreport1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Chrome UX Report (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/chromeuxreport1" homepage = "https://developers.google.com/web/tools/chrome-user-experience-report/api/reference" -documentation = "https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-chromeuxreport1/5.0.2+20230117" license = "MIT" keywords = ["chromeuxreport", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/chromeuxreport1/README.md b/gen/chromeuxreport1/README.md index db41fc7665..d5b3dad92f 100644 --- a/gen/chromeuxreport1/README.md +++ b/gen/chromeuxreport1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-chromeuxreport1` library allows access to all features of the *Google Chrome UX Report* service. -This documentation was generated from *Chrome UX Report* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *chromeuxreport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Chrome UX Report* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *chromeuxreport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Chrome UX Report* *v1* API can be found at the [official documentation site](https://developers.google.com/web/tools/chrome-user-experience-report/api/reference). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/ChromeUXReport) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/ChromeUXReport) ... -* [records](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/api::Record) - * [*query record*](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/api::RecordQueryRecordCall) +* [records](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/api::Record) + * [*query record*](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/api::RecordQueryRecordCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/ChromeUXReport)** +* **[Hub](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/ChromeUXReport)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::CallBuilder) -* **[Resources](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::CallBuilder) +* **[Resources](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Part)** + * **[Parts](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Delegate) to the -[Method Builder](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Delegate) to the +[Method Builder](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::RequestValue) and -[decodable](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::RequestValue) and +[decodable](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-chromeuxreport1/5.0.2-beta-1+20230117/google_chromeuxreport1/client::RequestValue) are moved +* [request values](https://docs.rs/google-chromeuxreport1/5.0.2+20230117/google_chromeuxreport1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/chromeuxreport1/src/api.rs b/gen/chromeuxreport1/src/api.rs index fb394ea166..6df0fd7bf1 100644 --- a/gen/chromeuxreport1/src/api.rs +++ b/gen/chromeuxreport1/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> ChromeUXReport { ChromeUXReport { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://chromeuxreport.googleapis.com/".to_string(), _root_url: "https://chromeuxreport.googleapis.com/".to_string(), } @@ -114,7 +114,7 @@ impl<'a, S> ChromeUXReport { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/chromeuxreport1/src/client.rs b/gen/chromeuxreport1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/chromeuxreport1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/chromeuxreport1/src/lib.rs b/gen/chromeuxreport1/src/lib.rs index 51f2d73fdf..38cdaff21c 100644 --- a/gen/chromeuxreport1/src/lib.rs +++ b/gen/chromeuxreport1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Chrome UX Report* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *chromeuxreport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Chrome UX Report* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *chromeuxreport:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Chrome UX Report* *v1* API can be found at the //! [official documentation site](https://developers.google.com/web/tools/chrome-user-experience-report/api/reference). diff --git a/gen/classroom1-cli/Cargo.toml b/gen/classroom1-cli/Cargo.toml index a188035b8d..4b953e7f7f 100644 --- a/gen/classroom1-cli/Cargo.toml +++ b/gen/classroom1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-classroom1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with classroom (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/classroom1-cli" @@ -20,13 +20,13 @@ name = "classroom1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-classroom1] path = "../classroom1" -version = "4.0.1+20220224" +version = "5.0.2+20230119" + diff --git a/gen/classroom1-cli/README.md b/gen/classroom1-cli/README.md index d886387466..2eb9c96ea2 100644 --- a/gen/classroom1-cli/README.md +++ b/gen/classroom1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *classroom* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *classroom* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash classroom1 [options] diff --git a/gen/classroom1-cli/mkdocs.yml b/gen/classroom1-cli/mkdocs.yml index 2600c75256..511f7c8bda 100644 --- a/gen/classroom1-cli/mkdocs.yml +++ b/gen/classroom1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: classroom v4.0.1+20220224 +site_name: classroom v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-classroom1-cli site_description: A complete library to interact with classroom (protocol v1) @@ -7,69 +7,73 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/classroom1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['courses_aliases-create.md', 'Courses', 'Aliases Create'] -- ['courses_aliases-delete.md', 'Courses', 'Aliases Delete'] -- ['courses_aliases-list.md', 'Courses', 'Aliases List'] -- ['courses_announcements-create.md', 'Courses', 'Announcements Create'] -- ['courses_announcements-delete.md', 'Courses', 'Announcements Delete'] -- ['courses_announcements-get.md', 'Courses', 'Announcements Get'] -- ['courses_announcements-list.md', 'Courses', 'Announcements List'] -- ['courses_announcements-modify-assignees.md', 'Courses', 'Announcements Modify Assignees'] -- ['courses_announcements-patch.md', 'Courses', 'Announcements Patch'] -- ['courses_course-work-create.md', 'Courses', 'Course Work Create'] -- ['courses_course-work-delete.md', 'Courses', 'Course Work Delete'] -- ['courses_course-work-get.md', 'Courses', 'Course Work Get'] -- ['courses_course-work-list.md', 'Courses', 'Course Work List'] -- ['courses_course-work-modify-assignees.md', 'Courses', 'Course Work Modify Assignees'] -- ['courses_course-work-patch.md', 'Courses', 'Course Work Patch'] -- ['courses_course-work-student-submissions-get.md', 'Courses', 'Course Work Student Submissions Get'] -- ['courses_course-work-student-submissions-list.md', 'Courses', 'Course Work Student Submissions List'] -- ['courses_course-work-student-submissions-modify-attachments.md', 'Courses', 'Course Work Student Submissions Modify Attachments'] -- ['courses_course-work-student-submissions-patch.md', 'Courses', 'Course Work Student Submissions Patch'] -- ['courses_course-work-student-submissions-reclaim.md', 'Courses', 'Course Work Student Submissions Reclaim'] -- ['courses_course-work-student-submissions-return.md', 'Courses', 'Course Work Student Submissions Return'] -- ['courses_course-work-student-submissions-turn-in.md', 'Courses', 'Course Work Student Submissions Turn In'] -- ['courses_course-work-materials-create.md', 'Courses', 'Course Work Materials Create'] -- ['courses_course-work-materials-delete.md', 'Courses', 'Course Work Materials Delete'] -- ['courses_course-work-materials-get.md', 'Courses', 'Course Work Materials Get'] -- ['courses_course-work-materials-list.md', 'Courses', 'Course Work Materials List'] -- ['courses_course-work-materials-patch.md', 'Courses', 'Course Work Materials Patch'] -- ['courses_create.md', 'Courses', 'Create'] -- ['courses_delete.md', 'Courses', 'Delete'] -- ['courses_get.md', 'Courses', 'Get'] -- ['courses_list.md', 'Courses', 'List'] -- ['courses_patch.md', 'Courses', 'Patch'] -- ['courses_students-create.md', 'Courses', 'Students Create'] -- ['courses_students-delete.md', 'Courses', 'Students Delete'] -- ['courses_students-get.md', 'Courses', 'Students Get'] -- ['courses_students-list.md', 'Courses', 'Students List'] -- ['courses_teachers-create.md', 'Courses', 'Teachers Create'] -- ['courses_teachers-delete.md', 'Courses', 'Teachers Delete'] -- ['courses_teachers-get.md', 'Courses', 'Teachers Get'] -- ['courses_teachers-list.md', 'Courses', 'Teachers List'] -- ['courses_topics-create.md', 'Courses', 'Topics Create'] -- ['courses_topics-delete.md', 'Courses', 'Topics Delete'] -- ['courses_topics-get.md', 'Courses', 'Topics Get'] -- ['courses_topics-list.md', 'Courses', 'Topics List'] -- ['courses_topics-patch.md', 'Courses', 'Topics Patch'] -- ['courses_update.md', 'Courses', 'Update'] -- ['invitations_accept.md', 'Invitations', 'Accept'] -- ['invitations_create.md', 'Invitations', 'Create'] -- ['invitations_delete.md', 'Invitations', 'Delete'] -- ['invitations_get.md', 'Invitations', 'Get'] -- ['invitations_list.md', 'Invitations', 'List'] -- ['registrations_create.md', 'Registrations', 'Create'] -- ['registrations_delete.md', 'Registrations', 'Delete'] -- ['user-profiles_get.md', 'User Profiles', 'Get'] -- ['user-profiles_guardian-invitations-create.md', 'User Profiles', 'Guardian Invitations Create'] -- ['user-profiles_guardian-invitations-get.md', 'User Profiles', 'Guardian Invitations Get'] -- ['user-profiles_guardian-invitations-list.md', 'User Profiles', 'Guardian Invitations List'] -- ['user-profiles_guardian-invitations-patch.md', 'User Profiles', 'Guardian Invitations Patch'] -- ['user-profiles_guardians-delete.md', 'User Profiles', 'Guardians Delete'] -- ['user-profiles_guardians-get.md', 'User Profiles', 'Guardians Get'] -- ['user-profiles_guardians-list.md', 'User Profiles', 'Guardians List'] +nav: +- Home: 'index.md' +- 'Courses': + - 'Aliases Create': 'courses_aliases-create.md' + - 'Aliases Delete': 'courses_aliases-delete.md' + - 'Aliases List': 'courses_aliases-list.md' + - 'Announcements Create': 'courses_announcements-create.md' + - 'Announcements Delete': 'courses_announcements-delete.md' + - 'Announcements Get': 'courses_announcements-get.md' + - 'Announcements List': 'courses_announcements-list.md' + - 'Announcements Modify Assignees': 'courses_announcements-modify-assignees.md' + - 'Announcements Patch': 'courses_announcements-patch.md' + - 'Course Work Create': 'courses_course-work-create.md' + - 'Course Work Delete': 'courses_course-work-delete.md' + - 'Course Work Get': 'courses_course-work-get.md' + - 'Course Work List': 'courses_course-work-list.md' + - 'Course Work Modify Assignees': 'courses_course-work-modify-assignees.md' + - 'Course Work Patch': 'courses_course-work-patch.md' + - 'Course Work Student Submissions Get': 'courses_course-work-student-submissions-get.md' + - 'Course Work Student Submissions List': 'courses_course-work-student-submissions-list.md' + - 'Course Work Student Submissions Modify Attachments': 'courses_course-work-student-submissions-modify-attachments.md' + - 'Course Work Student Submissions Patch': 'courses_course-work-student-submissions-patch.md' + - 'Course Work Student Submissions Reclaim': 'courses_course-work-student-submissions-reclaim.md' + - 'Course Work Student Submissions Return': 'courses_course-work-student-submissions-return.md' + - 'Course Work Student Submissions Turn In': 'courses_course-work-student-submissions-turn-in.md' + - 'Course Work Materials Create': 'courses_course-work-materials-create.md' + - 'Course Work Materials Delete': 'courses_course-work-materials-delete.md' + - 'Course Work Materials Get': 'courses_course-work-materials-get.md' + - 'Course Work Materials List': 'courses_course-work-materials-list.md' + - 'Course Work Materials Patch': 'courses_course-work-materials-patch.md' + - 'Create': 'courses_create.md' + - 'Delete': 'courses_delete.md' + - 'Get': 'courses_get.md' + - 'List': 'courses_list.md' + - 'Patch': 'courses_patch.md' + - 'Students Create': 'courses_students-create.md' + - 'Students Delete': 'courses_students-delete.md' + - 'Students Get': 'courses_students-get.md' + - 'Students List': 'courses_students-list.md' + - 'Teachers Create': 'courses_teachers-create.md' + - 'Teachers Delete': 'courses_teachers-delete.md' + - 'Teachers Get': 'courses_teachers-get.md' + - 'Teachers List': 'courses_teachers-list.md' + - 'Topics Create': 'courses_topics-create.md' + - 'Topics Delete': 'courses_topics-delete.md' + - 'Topics Get': 'courses_topics-get.md' + - 'Topics List': 'courses_topics-list.md' + - 'Topics Patch': 'courses_topics-patch.md' + - 'Update': 'courses_update.md' +- 'Invitations': + - 'Accept': 'invitations_accept.md' + - 'Create': 'invitations_create.md' + - 'Delete': 'invitations_delete.md' + - 'Get': 'invitations_get.md' + - 'List': 'invitations_list.md' +- 'Registrations': + - 'Create': 'registrations_create.md' + - 'Delete': 'registrations_delete.md' +- 'User Profiles': + - 'Get': 'user-profiles_get.md' + - 'Guardian Invitations Create': 'user-profiles_guardian-invitations-create.md' + - 'Guardian Invitations Get': 'user-profiles_guardian-invitations-get.md' + - 'Guardian Invitations List': 'user-profiles_guardian-invitations-list.md' + - 'Guardian Invitations Patch': 'user-profiles_guardian-invitations-patch.md' + - 'Guardians Delete': 'user-profiles_guardians-delete.md' + - 'Guardians Get': 'user-profiles_guardians-get.md' + - 'Guardians List': 'user-profiles_guardians-list.md' theme: readthedocs diff --git a/gen/classroom1-cli/src/client.rs b/gen/classroom1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/classroom1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/classroom1-cli/src/main.rs b/gen/classroom1-cli/src/main.rs index 6fffcba0ec..8cd04fb9ca 100644 --- a/gen/classroom1-cli/src/main.rs +++ b/gen/classroom1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_classroom1::{api, Error, oauth2}; +use google_classroom1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -198,7 +197,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -456,7 +455,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -648,7 +647,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -927,7 +926,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1140,7 +1139,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1257,7 +1256,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "late" => { call = call.late(value.unwrap_or("")); @@ -1447,7 +1446,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1959,7 +1958,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2072,7 +2071,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2346,7 +2345,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "course-states" => { call = call.add_course_states(value.unwrap_or("")); @@ -2458,7 +2457,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2721,7 +2720,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2977,7 +2976,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3228,7 +3227,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3320,7 +3319,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3731,7 +3730,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "course-id" => { call = call.course_id(value.unwrap_or("")); @@ -4131,7 +4130,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "invited-email-address" => { call = call.invited_email_address(value.unwrap_or("")); @@ -4227,7 +4226,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4390,7 +4389,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "invited-email-address" => { call = call.invited_email_address(value.unwrap_or("")); @@ -5565,7 +5564,7 @@ async fn main() { Some(false)), ]), ("create", - Some(r##"Creates a course. The user specified in `ownerId` is the owner of the created course and added as a teacher. A non-admin requesting user can only create a course with themselves as the owner. Domain admins can create courses owned by any user within their domain. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create courses or for access errors. * `NOT_FOUND` if the primary teacher is not a valid user. * `FAILED_PRECONDITION` if the course owner's account is disabled or for the following request errors: * UserGroupsMembershipLimitReached * `ALREADY_EXISTS` if an alias was specified in the `id` and already exists."##), + Some(r##"Creates a course. The user specified in `ownerId` is the owner of the created course and added as a teacher. A non-admin requesting user can only create a course with themselves as the owner. Domain admins can create courses owned by any user within their domain. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create courses or for access errors. * `NOT_FOUND` if the primary teacher is not a valid user. * `FAILED_PRECONDITION` if the course owner's account is disabled or for the following request errors: * UserCannotOwnCourse * UserGroupsMembershipLimitReached * `ALREADY_EXISTS` if an alias was specified in the `id` and already exists."##), "Details at http://byron.github.io/google-apis-rs/google_classroom1_cli/courses_create", vec![ (Some(r##"kv"##), @@ -5647,7 +5646,7 @@ async fn main() { Some(false)), ]), ("patch", - Some(r##"Updates one or more fields in a course. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to modify the requested course or for access errors. * `NOT_FOUND` if no course exists with the requested ID. * `INVALID_ARGUMENT` if invalid fields are specified in the update mask or if no update mask is supplied. * `FAILED_PRECONDITION` for the following request errors: * CourseNotModifiable"##), + Some(r##"Updates one or more fields in a course. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to modify the requested course or for access errors. * `NOT_FOUND` if no course exists with the requested ID. * `INVALID_ARGUMENT` if invalid fields are specified in the update mask or if no update mask is supplied. * `FAILED_PRECONDITION` for the following request errors: * CourseNotModifiable * InactiveCourseOwner * IneligibleOwner"##), "Details at http://byron.github.io/google-apis-rs/google_classroom1_cli/courses_patch", vec![ (Some(r##"id"##), @@ -5675,7 +5674,7 @@ async fn main() { Some(false)), ]), ("students-create", - Some(r##"Adds a user as a student of a course. Domain administrators are permitted to [directly add](https://developers.google.com/classroom/guides/manage-users) users within their domain as students to courses within their domain. Students are permitted to add themselves to a course using an enrollment code. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create students in this course or for access errors. * `NOT_FOUND` if the requested course ID does not exist. * `FAILED_PRECONDITION` if the requested user's account is disabled, for the following request errors: * CourseMemberLimitReached * CourseNotModifiable * UserGroupsMembershipLimitReached * `ALREADY_EXISTS` if the user is already a student or teacher in the course."##), + Some(r##"Adds a user as a student of a course. Domain administrators are permitted to [directly add](https://developers.google.com/classroom/guides/manage-users) users within their domain as students to courses within their domain. Students are permitted to add themselves to a course using an enrollment code. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create students in this course or for access errors. * `NOT_FOUND` if the requested course ID does not exist. * `FAILED_PRECONDITION` if the requested user's account is disabled, for the following request errors: * CourseMemberLimitReached * CourseNotModifiable * UserGroupsMembershipLimitReached * InactiveCourseOwner * `ALREADY_EXISTS` if the user is already a student or teacher in the course."##), "Details at http://byron.github.io/google-apis-rs/google_classroom1_cli/courses_students-create", vec![ (Some(r##"course-id"##), @@ -5781,7 +5780,7 @@ async fn main() { Some(false)), ]), ("teachers-create", - Some(r##"Creates a teacher of a course. Domain administrators are permitted to [directly add](https://developers.google.com/classroom/guides/manage-users) users within their domain as teachers to courses within their domain. Non-admin users should send an Invitation instead. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create teachers in this course or for access errors. * `NOT_FOUND` if the requested course ID does not exist. * `FAILED_PRECONDITION` if the requested user's account is disabled, for the following request errors: * CourseMemberLimitReached * CourseNotModifiable * CourseTeacherLimitReached * UserGroupsMembershipLimitReached * `ALREADY_EXISTS` if the user is already a teacher or student in the course."##), + Some(r##"Creates a teacher of a course. Domain administrators are permitted to [directly add](https://developers.google.com/classroom/guides/manage-users) users within their domain as teachers to courses within their domain. Non-admin users should send an Invitation instead. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create teachers in this course or for access errors. * `NOT_FOUND` if the requested course ID does not exist. * `FAILED_PRECONDITION` if the requested user's account is disabled, for the following request errors: * CourseMemberLimitReached * CourseNotModifiable * CourseTeacherLimitReached * UserGroupsMembershipLimitReached * InactiveCourseOwner * `ALREADY_EXISTS` if the user is already a teacher or student in the course."##), "Details at http://byron.github.io/google-apis-rs/google_classroom1_cli/courses_teachers-create", vec![ (Some(r##"course-id"##), @@ -6080,7 +6079,7 @@ async fn main() { Some(false)), ]), ("create", - Some(r##"Creates an invitation. Only one invitation for a user and course may exist at a time. Delete and re-create an invitation to make changes. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create invitations for this course or for access errors. * `NOT_FOUND` if the course or the user does not exist. * `FAILED_PRECONDITION` if the requested user's account is disabled or if the user already has this role or a role with greater permissions. * `ALREADY_EXISTS` if an invitation for the specified user and course already exists."##), + Some(r##"Creates an invitation. Only one invitation for a user and course may exist at a time. Delete and re-create an invitation to make changes. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user is not permitted to create invitations for this course or for access errors. * `NOT_FOUND` if the course or the user does not exist. * `FAILED_PRECONDITION`: * if the requested user's account is disabled. * if the user already has this role or a role with greater permissions. * for the following request errors: * IneligibleOwner * `ALREADY_EXISTS` if an invitation for the specified user and course already exists."##), "Details at http://byron.github.io/google-apis-rs/google_classroom1_cli/invitations_create", vec![ (Some(r##"kv"##), @@ -6429,7 +6428,7 @@ async fn main() { let mut app = App::new("classroom1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230119") .about("Manages classes, rosters, and invitations in Google Classroom.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_classroom1_cli") .arg(Arg::with_name("url") diff --git a/gen/classroom1/Cargo.toml b/gen/classroom1/Cargo.toml index 1857e2c722..e5b3da826c 100644 --- a/gen/classroom1/Cargo.toml +++ b/gen/classroom1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-classroom1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with classroom (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/classroom1" homepage = "https://developers.google.com/classroom/" -documentation = "https://docs.rs/google-classroom1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-classroom1/5.0.2+20230119" license = "MIT" keywords = ["classroom", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/classroom1/README.md b/gen/classroom1/README.md index b62235750f..64776cee3f 100644 --- a/gen/classroom1/README.md +++ b/gen/classroom1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-classroom1` library allows access to all features of the *Google classroom* service. -This documentation was generated from *classroom* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *classroom:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *classroom* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *classroom:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *classroom* *v1* API can be found at the [official documentation site](https://developers.google.com/classroom/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/Classroom) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/Classroom) ... -* [courses](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::Course) - * [*aliases create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAliasCreateCall), [*aliases delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAliasDeleteCall), [*aliases list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAliasListCall), [*announcements create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAnnouncementCreateCall), [*announcements delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAnnouncementDeleteCall), [*announcements get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAnnouncementGetCall), [*announcements list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAnnouncementListCall), [*announcements modify assignees*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAnnouncementModifyAssigneeCall), [*announcements patch*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseAnnouncementPatchCall), [*course work create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkCreateCall), [*course work delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkDeleteCall), [*course work get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkGetCall), [*course work list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkListCall), [*course work modify assignees*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkModifyAssigneeCall), [*course work patch*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkPatchCall), [*course work student submissions get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionGetCall), [*course work student submissions list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionListCall), [*course work student submissions modify attachments*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionModifyAttachmentCall), [*course work student submissions patch*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionPatchCall), [*course work student submissions reclaim*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionReclaimCall), [*course work student submissions return*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionReturnCall), [*course work student submissions turn in*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionTurnInCall), [*course work materials create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkMaterialCreateCall), [*course work materials delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkMaterialDeleteCall), [*course work materials get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkMaterialGetCall), [*course work materials list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkMaterialListCall), [*course work materials patch*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCourseWorkMaterialPatchCall), [*create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseCreateCall), [*delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseDeleteCall), [*get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseGetCall), [*list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseListCall), [*patch*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CoursePatchCall), [*students create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseStudentCreateCall), [*students delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseStudentDeleteCall), [*students get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseStudentGetCall), [*students list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseStudentListCall), [*teachers create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTeacherCreateCall), [*teachers delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTeacherDeleteCall), [*teachers get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTeacherGetCall), [*teachers list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTeacherListCall), [*topics create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTopicCreateCall), [*topics delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTopicDeleteCall), [*topics get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTopicGetCall), [*topics list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTopicListCall), [*topics patch*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseTopicPatchCall) and [*update*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::CourseUpdateCall) -* [invitations](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::Invitation) - * [*accept*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::InvitationAcceptCall), [*create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::InvitationCreateCall), [*delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::InvitationDeleteCall), [*get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::InvitationGetCall) and [*list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::InvitationListCall) -* [registrations](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::Registration) - * [*create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::RegistrationCreateCall) and [*delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::RegistrationDeleteCall) -* [user profiles](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfile) - * [*get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGetCall), [*guardian invitations create*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGuardianInvitationCreateCall), [*guardian invitations get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGuardianInvitationGetCall), [*guardian invitations list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGuardianInvitationListCall), [*guardian invitations patch*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGuardianInvitationPatchCall), [*guardians delete*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGuardianDeleteCall), [*guardians get*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGuardianGetCall) and [*guardians list*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/api::UserProfileGuardianListCall) +* [courses](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::Course) + * [*aliases create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAliasCreateCall), [*aliases delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAliasDeleteCall), [*aliases list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAliasListCall), [*announcements create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAnnouncementCreateCall), [*announcements delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAnnouncementDeleteCall), [*announcements get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAnnouncementGetCall), [*announcements list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAnnouncementListCall), [*announcements modify assignees*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAnnouncementModifyAssigneeCall), [*announcements patch*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseAnnouncementPatchCall), [*course work create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkCreateCall), [*course work delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkDeleteCall), [*course work get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkGetCall), [*course work list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkListCall), [*course work modify assignees*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkModifyAssigneeCall), [*course work patch*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkPatchCall), [*course work student submissions get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionGetCall), [*course work student submissions list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionListCall), [*course work student submissions modify attachments*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionModifyAttachmentCall), [*course work student submissions patch*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionPatchCall), [*course work student submissions reclaim*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionReclaimCall), [*course work student submissions return*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionReturnCall), [*course work student submissions turn in*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkStudentSubmissionTurnInCall), [*course work materials create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkMaterialCreateCall), [*course work materials delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkMaterialDeleteCall), [*course work materials get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkMaterialGetCall), [*course work materials list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkMaterialListCall), [*course work materials patch*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCourseWorkMaterialPatchCall), [*create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseCreateCall), [*delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseDeleteCall), [*get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseGetCall), [*list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseListCall), [*patch*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CoursePatchCall), [*students create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseStudentCreateCall), [*students delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseStudentDeleteCall), [*students get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseStudentGetCall), [*students list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseStudentListCall), [*teachers create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTeacherCreateCall), [*teachers delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTeacherDeleteCall), [*teachers get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTeacherGetCall), [*teachers list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTeacherListCall), [*topics create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTopicCreateCall), [*topics delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTopicDeleteCall), [*topics get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTopicGetCall), [*topics list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTopicListCall), [*topics patch*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseTopicPatchCall) and [*update*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::CourseUpdateCall) +* [invitations](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::Invitation) + * [*accept*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::InvitationAcceptCall), [*create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::InvitationCreateCall), [*delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::InvitationDeleteCall), [*get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::InvitationGetCall) and [*list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::InvitationListCall) +* [registrations](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::Registration) + * [*create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::RegistrationCreateCall) and [*delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::RegistrationDeleteCall) +* [user profiles](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfile) + * [*get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGetCall), [*guardian invitations create*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGuardianInvitationCreateCall), [*guardian invitations get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGuardianInvitationGetCall), [*guardian invitations list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGuardianInvitationListCall), [*guardian invitations patch*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGuardianInvitationPatchCall), [*guardians delete*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGuardianDeleteCall), [*guardians get*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGuardianGetCall) and [*guardians list*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/api::UserProfileGuardianListCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/Classroom)** +* **[Hub](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/Classroom)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::CallBuilder) -* **[Resources](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::CallBuilder) +* **[Resources](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Part)** + * **[Parts](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -174,17 +174,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -194,29 +194,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Delegate) to the -[Method Builder](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Delegate) to the +[Method Builder](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::RequestValue) and -[decodable](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::RequestValue) and +[decodable](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-classroom1/5.0.2-beta-1+20230119/google_classroom1/client::RequestValue) are moved +* [request values](https://docs.rs/google-classroom1/5.0.2+20230119/google_classroom1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/classroom1/src/api.rs b/gen/classroom1/src/api.rs index b4e4cbb778..ff19b652e8 100644 --- a/gen/classroom1/src/api.rs +++ b/gen/classroom1/src/api.rs @@ -208,7 +208,7 @@ impl<'a, S> Classroom { Classroom { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://classroom.googleapis.com/".to_string(), _root_url: "https://classroom.googleapis.com/".to_string(), } @@ -228,7 +228,7 @@ impl<'a, S> Classroom { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/classroom1/src/client.rs b/gen/classroom1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/classroom1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/classroom1/src/lib.rs b/gen/classroom1/src/lib.rs index 797025e769..7c627dac8c 100644 --- a/gen/classroom1/src/lib.rs +++ b/gen/classroom1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *classroom* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *classroom:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *classroom* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *classroom:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *classroom* *v1* API can be found at the //! [official documentation site](https://developers.google.com/classroom/). diff --git a/gen/cloudasset1-cli/Cargo.toml b/gen/cloudasset1-cli/Cargo.toml index fbc9363cf5..04adb1b96c 100644 --- a/gen/cloudasset1-cli/Cargo.toml +++ b/gen/cloudasset1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudasset1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Asset (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudasset1-cli" @@ -20,13 +20,13 @@ name = "cloudasset1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudasset1] path = "../cloudasset1" -version = "4.0.1+20220225" +version = "5.0.2+20230121" + diff --git a/gen/cloudasset1-cli/README.md b/gen/cloudasset1-cli/README.md index 966a99983d..30e1821a1f 100644 --- a/gen/cloudasset1-cli/README.md +++ b/gen/cloudasset1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Asset* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Asset* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash cloudasset1 [options] @@ -43,8 +43,12 @@ cloudasset1 [options] analyze-iam-policy [-p ]... [-o ] analyze-iam-policy-longrunning (-r )... [-p ]... [-o ] analyze-move [-p ]... [-o ] + analyze-org-policies [-p ]... [-o ] + analyze-org-policy-governed-assets [-p ]... [-o ] + analyze-org-policy-governed-containers [-p ]... [-o ] batch-get-assets-history [-p ]... [-o ] export-assets (-r )... [-p ]... [-o ] + query-assets (-r )... [-p ]... [-o ] search-all-iam-policies [-p ]... [-o ] search-all-resources [-p ]... [-o ] operations diff --git a/gen/cloudasset1-cli/mkdocs.yml b/gen/cloudasset1-cli/mkdocs.yml index 5e658b5585..bf9f6becf3 100644 --- a/gen/cloudasset1-cli/mkdocs.yml +++ b/gen/cloudasset1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Asset v4.0.1+20220225 +site_name: Cloud Asset v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-cloudasset1-cli site_description: A complete library to interact with Cloud Asset (protocol v1) @@ -7,28 +7,38 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudasset1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['assets_list.md', 'Assets', 'List'] -- ['effective-iam-policies_batch-get.md', 'Effective Iam Policies', 'Batch Get'] -- ['feeds_create.md', 'Feeds', 'Create'] -- ['feeds_delete.md', 'Feeds', 'Delete'] -- ['feeds_get.md', 'Feeds', 'Get'] -- ['feeds_list.md', 'Feeds', 'List'] -- ['feeds_patch.md', 'Feeds', 'Patch'] -- ['methods_analyze-iam-policy.md', 'Methods', 'Analyze Iam Policy'] -- ['methods_analyze-iam-policy-longrunning.md', 'Methods', 'Analyze Iam Policy Longrunning'] -- ['methods_analyze-move.md', 'Methods', 'Analyze Move'] -- ['methods_batch-get-assets-history.md', 'Methods', 'Batch Get Assets History'] -- ['methods_export-assets.md', 'Methods', 'Export Assets'] -- ['methods_search-all-iam-policies.md', 'Methods', 'Search All Iam Policies'] -- ['methods_search-all-resources.md', 'Methods', 'Search All Resources'] -- ['operations_get.md', 'Operations', 'Get'] -- ['saved-queries_create.md', 'Saved Queries', 'Create'] -- ['saved-queries_delete.md', 'Saved Queries', 'Delete'] -- ['saved-queries_get.md', 'Saved Queries', 'Get'] -- ['saved-queries_list.md', 'Saved Queries', 'List'] -- ['saved-queries_patch.md', 'Saved Queries', 'Patch'] +nav: +- Home: 'index.md' +- 'Assets': + - 'List': 'assets_list.md' +- 'Effective Iam Policies': + - 'Batch Get': 'effective-iam-policies_batch-get.md' +- 'Feeds': + - 'Create': 'feeds_create.md' + - 'Delete': 'feeds_delete.md' + - 'Get': 'feeds_get.md' + - 'List': 'feeds_list.md' + - 'Patch': 'feeds_patch.md' +- 'Methods': + - 'Analyze Iam Policy': 'methods_analyze-iam-policy.md' + - 'Analyze Iam Policy Longrunning': 'methods_analyze-iam-policy-longrunning.md' + - 'Analyze Move': 'methods_analyze-move.md' + - 'Analyze Org Policies': 'methods_analyze-org-policies.md' + - 'Analyze Org Policy Governed Assets': 'methods_analyze-org-policy-governed-assets.md' + - 'Analyze Org Policy Governed Containers': 'methods_analyze-org-policy-governed-containers.md' + - 'Batch Get Assets History': 'methods_batch-get-assets-history.md' + - 'Export Assets': 'methods_export-assets.md' + - 'Query Assets': 'methods_query-assets.md' + - 'Search All Iam Policies': 'methods_search-all-iam-policies.md' + - 'Search All Resources': 'methods_search-all-resources.md' +- 'Operations': + - 'Get': 'operations_get.md' +- 'Saved Queries': + - 'Create': 'saved-queries_create.md' + - 'Delete': 'saved-queries_delete.md' + - 'Get': 'saved-queries_get.md' + - 'List': 'saved-queries_list.md' + - 'Patch': 'saved-queries_patch.md' theme: readthedocs diff --git a/gen/cloudasset1-cli/src/client.rs b/gen/cloudasset1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudasset1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudasset1-cli/src/main.rs b/gen/cloudasset1-cli/src/main.rs index 40c6b83d0a..736d63581b 100644 --- a/gen/cloudasset1-cli/src/main.rs +++ b/gen/cloudasset1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudasset1::{api, Error, oauth2}; +use google_cloudasset1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,13 +60,13 @@ where call = call.add_relationship_types(value.unwrap_or("")); }, "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "content-type" => { call = call.content_type(value.unwrap_or("")); @@ -534,34 +533,34 @@ where call = call.saved_analysis_query(value.unwrap_or("")); }, "execution-timeout" => { - call = call.execution_timeout(value.unwrap_or("")); + call = call.execution_timeout( value.map(|v| arg_from_str(v, err, "execution-timeout", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, "analysis-query-resource-selector-full-resource-name" => { call = call.analysis_query_resource_selector_full_resource_name(value.unwrap_or("")); }, "analysis-query-options-output-resource-edges" => { - call = call.analysis_query_options_output_resource_edges(arg_from_str(value.unwrap_or("false"), err, "analysis-query-options-output-resource-edges", "boolean")); + call = call.analysis_query_options_output_resource_edges( value.map(|v| arg_from_str(v, err, "analysis-query-options-output-resource-edges", "boolean")).unwrap_or(false)); }, "analysis-query-options-output-group-edges" => { - call = call.analysis_query_options_output_group_edges(arg_from_str(value.unwrap_or("false"), err, "analysis-query-options-output-group-edges", "boolean")); + call = call.analysis_query_options_output_group_edges( value.map(|v| arg_from_str(v, err, "analysis-query-options-output-group-edges", "boolean")).unwrap_or(false)); }, "analysis-query-options-expand-roles" => { - call = call.analysis_query_options_expand_roles(arg_from_str(value.unwrap_or("false"), err, "analysis-query-options-expand-roles", "boolean")); + call = call.analysis_query_options_expand_roles( value.map(|v| arg_from_str(v, err, "analysis-query-options-expand-roles", "boolean")).unwrap_or(false)); }, "analysis-query-options-expand-resources" => { - call = call.analysis_query_options_expand_resources(arg_from_str(value.unwrap_or("false"), err, "analysis-query-options-expand-resources", "boolean")); + call = call.analysis_query_options_expand_resources( value.map(|v| arg_from_str(v, err, "analysis-query-options-expand-resources", "boolean")).unwrap_or(false)); }, "analysis-query-options-expand-groups" => { - call = call.analysis_query_options_expand_groups(arg_from_str(value.unwrap_or("false"), err, "analysis-query-options-expand-groups", "boolean")); + call = call.analysis_query_options_expand_groups( value.map(|v| arg_from_str(v, err, "analysis-query-options-expand-groups", "boolean")).unwrap_or(false)); }, "analysis-query-options-analyze-service-account-impersonation" => { - call = call.analysis_query_options_analyze_service_account_impersonation(arg_from_str(value.unwrap_or("false"), err, "analysis-query-options-analyze-service-account-impersonation", "boolean")); + call = call.analysis_query_options_analyze_service_account_impersonation( value.map(|v| arg_from_str(v, err, "analysis-query-options-analyze-service-account-impersonation", "boolean")).unwrap_or(false)); }, "analysis-query-identity-selector-identity" => { call = call.analysis_query_identity_selector_identity(value.unwrap_or("")); }, "analysis-query-condition-context-access-time" => { - call = call.analysis_query_condition_context_access_time(value.unwrap_or("")); + call = call.analysis_query_condition_context_access_time( value.map(|v| arg_from_str(v, err, "analysis-query-condition-context-access-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "analysis-query-access-selector-roles" => { call = call.add_analysis_query_access_selector_roles(value.unwrap_or("")); @@ -777,6 +776,201 @@ where } } + async fn _methods_analyze_org_policies(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.methods().analyze_org_policies(opt.value_of("scope").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + "constraint" => { + call = call.constraint(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["constraint", "filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _methods_analyze_org_policy_governed_assets(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.methods().analyze_org_policy_governed_assets(opt.value_of("scope").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + "constraint" => { + call = call.constraint(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["constraint", "filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _methods_analyze_org_policy_governed_containers(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.methods().analyze_org_policy_governed_containers(opt.value_of("scope").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + "constraint" => { + call = call.constraint(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["constraint", "filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _methods_batch_get_assets_history(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.methods().batch_get_assets_history(opt.value_of("parent").unwrap_or("")); @@ -787,10 +981,10 @@ where call = call.add_relationship_types(value.unwrap_or("")); }, "read-time-window-start-time" => { - call = call.read_time_window_start_time(value.unwrap_or("")); + call = call.read_time_window_start_time( value.map(|v| arg_from_str(v, err, "read-time-window-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "read-time-window-end-time" => { - call = call.read_time_window_end_time(value.unwrap_or("")); + call = call.read_time_window_end_time( value.map(|v| arg_from_str(v, err, "read-time-window-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "content-type" => { call = call.content_type(value.unwrap_or("")); @@ -940,6 +1134,101 @@ where } } + async fn _methods_query_assets(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "job-reference" => Some(("jobReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "output-config.bigquery-destination.dataset" => Some(("outputConfig.bigqueryDestination.dataset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "output-config.bigquery-destination.table" => Some(("outputConfig.bigqueryDestination.table", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "output-config.bigquery-destination.write-disposition" => Some(("outputConfig.bigqueryDestination.writeDisposition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-time" => Some(("readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-time-window.end-time" => Some(("readTimeWindow.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-time-window.start-time" => Some(("readTimeWindow.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statement" => Some(("statement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "timeout" => Some(("timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["bigquery-destination", "dataset", "end-time", "job-reference", "output-config", "page-size", "page-token", "read-time", "read-time-window", "start-time", "statement", "table", "timeout", "write-disposition"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::QueryAssetsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.methods().query_assets(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _methods_search_all_iam_policies(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.methods().search_all_iam_policies(opt.value_of("scope").unwrap_or("")); @@ -953,7 +1242,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1015,7 +1304,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "query" => { call = call.query(value.unwrap_or("")); @@ -1024,7 +1313,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1352,7 +1641,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1462,7 +1751,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1572,12 +1861,24 @@ where ("analyze-move", Some(opt)) => { call_result = self._methods_analyze_move(opt, dry_run, &mut err).await; }, + ("analyze-org-policies", Some(opt)) => { + call_result = self._methods_analyze_org_policies(opt, dry_run, &mut err).await; + }, + ("analyze-org-policy-governed-assets", Some(opt)) => { + call_result = self._methods_analyze_org_policy_governed_assets(opt, dry_run, &mut err).await; + }, + ("analyze-org-policy-governed-containers", Some(opt)) => { + call_result = self._methods_analyze_org_policy_governed_containers(opt, dry_run, &mut err).await; + }, ("batch-get-assets-history", Some(opt)) => { call_result = self._methods_batch_get_assets_history(opt, dry_run, &mut err).await; }, ("export-assets", Some(opt)) => { call_result = self._methods_export_assets(opt, dry_run, &mut err).await; }, + ("query-assets", Some(opt)) => { + call_result = self._methods_query_assets(opt, dry_run, &mut err).await; + }, ("search-all-iam-policies", Some(opt)) => { call_result = self._methods_search_all_iam_policies(opt, dry_run, &mut err).await; }, @@ -1872,7 +2173,7 @@ async fn main() { ]), ]), - ("methods", "methods: 'analyze-iam-policy', 'analyze-iam-policy-longrunning', 'analyze-move', 'batch-get-assets-history', 'export-assets', 'search-all-iam-policies' and 'search-all-resources'", vec![ + ("methods", "methods: 'analyze-iam-policy', 'analyze-iam-policy-longrunning', 'analyze-move', 'analyze-org-policies', 'analyze-org-policy-governed-assets', 'analyze-org-policy-governed-containers', 'batch-get-assets-history', 'export-assets', 'query-assets', 'search-all-iam-policies' and 'search-all-resources'", vec![ ("analyze-iam-policy", Some(r##"Analyzes IAM policies to answer which identities have what accesses on which resources."##), "Details at http://byron.github.io/google-apis-rs/google_cloudasset1_cli/methods_analyze-iam-policy", @@ -1929,7 +2230,73 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"Required. Name of the resource to perform the analysis against. Only GCP Project are supported as of today. Hence, this can only be Project ID (such as "projects/my-project-id") or a Project Number (such as "projects/12345")."##), + Some(r##"Required. Name of the resource to perform the analysis against. Only Google Cloud projects are supported as of today. Hence, this can only be a project ID (such as "projects/my-project-id") or a project number (such as "projects/12345")."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("analyze-org-policies", + Some(r##"Analyzes organization policies under a scope."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudasset1_cli/methods_analyze-org-policies", + vec![ + (Some(r##"scope"##), + None, + Some(r##"Required. The organization to scope the request. Only organization policies within the scope will be analyzed. * organizations/{ORGANIZATION_NUMBER} (e.g., "organizations/123456")"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("analyze-org-policy-governed-assets", + Some(r##"Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: * storage.uniformBucketLevelAccess * iam.disableServiceAccountKeyCreation * iam.allowedPolicyMemberDomains * compute.vmExternalIpAccess * appengine.enforceServiceAccountActAsCheck * gcp.resourceLocations * compute.trustedImageProjects * compute.skipDefaultNetworkCreation * compute.requireOsLogin * compute.disableNestedVirtualization This RPC only returns either resources of types supported by [searchable asset types](https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types), or IAM policies."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudasset1_cli/methods_analyze-org-policy-governed-assets", + vec![ + (Some(r##"scope"##), + None, + Some(r##"Required. The organization to scope the request. Only organization policies within the scope will be analyzed. The output assets will also be limited to the ones governed by those in-scope organization policies. * organizations/{ORGANIZATION_NUMBER} (e.g., "organizations/123456")"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("analyze-org-policy-governed-containers", + Some(r##"Analyzes organization policies governed containers (projects, folders or organization) under a scope."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudasset1_cli/methods_analyze-org-policy-governed-containers", + vec![ + (Some(r##"scope"##), + None, + Some(r##"Required. The organization to scope the request. Only organization policies within the scope will be analyzed. The output containers will also be limited to the ones governed by those in-scope organization policies. * organizations/{ORGANIZATION_NUMBER} (e.g., "organizations/123456")"##), Some(true), Some(false)), @@ -1989,6 +2356,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("query-assets", + Some(r##"Issue a job that queries assets using a SQL statement compatible with [BigQuery Standard SQL](http://cloud/bigquery/docs/reference/standard-sql/enabling-standard-sql). If the query execution finishes within timeout and there's no pagination, the full query results will be returned in the `QueryAssetsResponse`. Otherwise, full query results can be obtained by issuing extra requests with the `job_reference` from the a previous `QueryAssets` call. Note, the query result has approximately 10 GB limitation enforced by BigQuery https://cloud.google.com/bigquery/docs/best-practices-performance-output, queries return larger results will result in errors."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudasset1_cli/methods_query-assets", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The relative name of the root asset. This can only be an organization number (such as "organizations/123"), a project ID (such as "projects/my-project-id"), or a project number (such as "projects/12345"), or a folder number (such as "folders/123"). Only assets belonging to the `parent` will be returned."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2018,7 +2413,7 @@ async fn main() { Some(false)), ]), ("search-all-resources", - Some(r##"Searches all Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the `cloudasset.assets.searchAllResources` permission on the desired scope, otherwise the request will be rejected."##), + Some(r##"Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the `cloudasset.assets.searchAllResources` permission on the desired scope, otherwise the request will be rejected."##), "Details at http://byron.github.io/google-apis-rs/google_cloudasset1_cli/methods_search-all-resources", vec![ (Some(r##"scope"##), @@ -2195,8 +2590,8 @@ async fn main() { let mut app = App::new("cloudasset1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") - .about("The cloud asset API manages the history and inventory of cloud resources.") + .version("5.0.2+20230121") + .about("The Cloud Asset API manages the history and inventory of Google Cloud resources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudasset1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/cloudasset1/Cargo.toml b/gen/cloudasset1/Cargo.toml index cdacb378e4..497c288ce9 100644 --- a/gen/cloudasset1/Cargo.toml +++ b/gen/cloudasset1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudasset1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Asset (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudasset1" homepage = "https://cloud.google.com/asset-inventory/docs/quickstart" -documentation = "https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-cloudasset1/5.0.2+20230121" license = "MIT" keywords = ["cloudasset", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudasset1/README.md b/gen/cloudasset1/README.md index 1b6fbdb89e..8daffade2e 100644 --- a/gen/cloudasset1/README.md +++ b/gen/cloudasset1/README.md @@ -5,38 +5,38 @@ DO NOT EDIT ! --> The `google-cloudasset1` library allows access to all features of the *Google Cloud Asset* service. -This documentation was generated from *Cloud Asset* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *cloudasset:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Asset* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *cloudasset:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Asset* *v1* API can be found at the [official documentation site](https://cloud.google.com/asset-inventory/docs/quickstart). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/CloudAsset) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/CloudAsset) ... -* [assets](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::Asset) - * [*list*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::AssetListCall) -* [effective iam policies](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::EffectiveIamPolicy) - * [*batch get*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::EffectiveIamPolicyBatchGetCall) -* [feeds](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::Feed) - * [*create*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::FeedCreateCall), [*delete*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::FeedDeleteCall), [*get*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::FeedGetCall), [*list*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::FeedListCall) and [*patch*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::FeedPatchCall) -* [operations](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::Operation) - * [*get*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::OperationGetCall) -* [saved queries](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::SavedQuery) - * [*create*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::SavedQueryCreateCall), [*delete*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::SavedQueryDeleteCall), [*get*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::SavedQueryGetCall), [*list*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::SavedQueryListCall) and [*patch*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::SavedQueryPatchCall) +* [assets](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::Asset) + * [*list*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::AssetListCall) +* [effective iam policies](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::EffectiveIamPolicy) + * [*batch get*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::EffectiveIamPolicyBatchGetCall) +* [feeds](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::Feed) + * [*create*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::FeedCreateCall), [*delete*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::FeedDeleteCall), [*get*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::FeedGetCall), [*list*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::FeedListCall) and [*patch*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::FeedPatchCall) +* [operations](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::Operation) + * [*get*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::OperationGetCall) +* [saved queries](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::SavedQuery) + * [*create*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::SavedQueryCreateCall), [*delete*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::SavedQueryDeleteCall), [*get*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::SavedQueryGetCall), [*list*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::SavedQueryListCall) and [*patch*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::SavedQueryPatchCall) Other activities are ... -* [analyze iam policy](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodAnalyzeIamPolicyCall) -* [analyze iam policy longrunning](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodAnalyzeIamPolicyLongrunningCall) -* [analyze move](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodAnalyzeMoveCall) -* [analyze org policies](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodAnalyzeOrgPolicyCall) -* [analyze org policy governed assets](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodAnalyzeOrgPolicyGovernedAssetCall) -* [analyze org policy governed containers](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodAnalyzeOrgPolicyGovernedContainerCall) -* [batch get assets history](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodBatchGetAssetsHistoryCall) -* [export assets](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodExportAssetCall) -* [query assets](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodQueryAssetCall) -* [search all iam policies](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodSearchAllIamPolicyCall) -* [search all resources](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/api::MethodSearchAllResourceCall) +* [analyze iam policy](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodAnalyzeIamPolicyCall) +* [analyze iam policy longrunning](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodAnalyzeIamPolicyLongrunningCall) +* [analyze move](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodAnalyzeMoveCall) +* [analyze org policies](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodAnalyzeOrgPolicyCall) +* [analyze org policy governed assets](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodAnalyzeOrgPolicyGovernedAssetCall) +* [analyze org policy governed containers](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodAnalyzeOrgPolicyGovernedContainerCall) +* [batch get assets history](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodBatchGetAssetsHistoryCall) +* [export assets](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodExportAssetCall) +* [query assets](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodQueryAssetCall) +* [search all iam policies](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodSearchAllIamPolicyCall) +* [search all resources](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/api::MethodSearchAllResourceCall) @@ -44,17 +44,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/CloudAsset)** +* **[Hub](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/CloudAsset)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Part)** + * **[Parts](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -149,17 +149,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -169,29 +169,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudasset1/5.0.2-beta-1+20230121/google_cloudasset1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudasset1/5.0.2+20230121/google_cloudasset1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudasset1/src/api.rs b/gen/cloudasset1/src/api.rs index 205a5cf88c..0f3005b5c9 100644 --- a/gen/cloudasset1/src/api.rs +++ b/gen/cloudasset1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudAsset { CloudAsset { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudasset.googleapis.com/".to_string(), _root_url: "https://cloudasset.googleapis.com/".to_string(), } @@ -151,7 +151,7 @@ impl<'a, S> CloudAsset { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudasset1/src/client.rs b/gen/cloudasset1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudasset1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudasset1/src/lib.rs b/gen/cloudasset1/src/lib.rs index 7636218f5f..b2d3e1c682 100644 --- a/gen/cloudasset1/src/lib.rs +++ b/gen/cloudasset1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Asset* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *cloudasset:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Asset* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *cloudasset:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Asset* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/asset-inventory/docs/quickstart). diff --git a/gen/cloudasset1_beta1-cli/Cargo.toml b/gen/cloudasset1_beta1-cli/Cargo.toml index 170eac9212..c7fe16978b 100644 --- a/gen/cloudasset1_beta1-cli/Cargo.toml +++ b/gen/cloudasset1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudasset1_beta1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Asset (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudasset1_beta1-cli" @@ -20,13 +20,13 @@ name = "cloudasset1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudasset1_beta1] path = "../cloudasset1_beta1" -version = "4.0.1+20220225" +version = "5.0.2+20230121" + diff --git a/gen/cloudasset1_beta1-cli/README.md b/gen/cloudasset1_beta1-cli/README.md index dd002ebfb6..8552d1a922 100644 --- a/gen/cloudasset1_beta1-cli/README.md +++ b/gen/cloudasset1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Asset* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Asset* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash cloudasset1-beta1 [options] diff --git a/gen/cloudasset1_beta1-cli/mkdocs.yml b/gen/cloudasset1_beta1-cli/mkdocs.yml index 7af77de64e..20a48d8371 100644 --- a/gen/cloudasset1_beta1-cli/mkdocs.yml +++ b/gen/cloudasset1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Asset v4.0.1+20220225 +site_name: Cloud Asset v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-cloudasset1_beta1-cli site_description: A complete library to interact with Cloud Asset (protocol v1beta1) @@ -7,16 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudasset1_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_export-assets.md', 'Folders', 'Export Assets'] -- ['folders_operations-get.md', 'Folders', 'Operations Get'] -- ['organizations_batch-get-assets-history.md', 'Organizations', 'Batch Get Assets History'] -- ['organizations_export-assets.md', 'Organizations', 'Export Assets'] -- ['organizations_operations-get.md', 'Organizations', 'Operations Get'] -- ['projects_batch-get-assets-history.md', 'Projects', 'Batch Get Assets History'] -- ['projects_export-assets.md', 'Projects', 'Export Assets'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Export Assets': 'folders_export-assets.md' + - 'Operations Get': 'folders_operations-get.md' +- 'Organizations': + - 'Batch Get Assets History': 'organizations_batch-get-assets-history.md' + - 'Export Assets': 'organizations_export-assets.md' + - 'Operations Get': 'organizations_operations-get.md' +- 'Projects': + - 'Batch Get Assets History': 'projects_batch-get-assets-history.md' + - 'Export Assets': 'projects_export-assets.md' + - 'Operations Get': 'projects_operations-get.md' theme: readthedocs diff --git a/gen/cloudasset1_beta1-cli/src/client.rs b/gen/cloudasset1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudasset1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudasset1_beta1-cli/src/main.rs b/gen/cloudasset1_beta1-cli/src/main.rs index e34657cdc9..a0c0e1d721 100644 --- a/gen/cloudasset1_beta1-cli/src/main.rs +++ b/gen/cloudasset1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudasset1_beta1::{api, Error, oauth2}; +use google_cloudasset1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -199,10 +198,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time-window-start-time" => { - call = call.read_time_window_start_time(value.unwrap_or("")); + call = call.read_time_window_start_time( value.map(|v| arg_from_str(v, err, "read-time-window-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "read-time-window-end-time" => { - call = call.read_time_window_end_time(value.unwrap_or("")); + call = call.read_time_window_end_time( value.map(|v| arg_from_str(v, err, "read-time-window-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "content-type" => { call = call.content_type(value.unwrap_or("")); @@ -405,10 +404,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time-window-start-time" => { - call = call.read_time_window_start_time(value.unwrap_or("")); + call = call.read_time_window_start_time( value.map(|v| arg_from_str(v, err, "read-time-window-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "read-time-window-end-time" => { - call = call.read_time_window_end_time(value.unwrap_or("")); + call = call.read_time_window_end_time( value.map(|v| arg_from_str(v, err, "read-time-window-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "content-type" => { call = call.content_type(value.unwrap_or("")); @@ -937,8 +936,8 @@ async fn main() { let mut app = App::new("cloudasset1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") - .about("The cloud asset API manages the history and inventory of cloud resources.") + .version("5.0.2+20230121") + .about("The Cloud Asset API manages the history and inventory of Google Cloud resources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudasset1_beta1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/cloudasset1_beta1/Cargo.toml b/gen/cloudasset1_beta1/Cargo.toml index 015a0d278e..a7577542e8 100644 --- a/gen/cloudasset1_beta1/Cargo.toml +++ b/gen/cloudasset1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudasset1_beta1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Asset (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudasset1_beta1" homepage = "https://cloud.google.com/asset-inventory/docs/quickstart" -documentation = "https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121" license = "MIT" keywords = ["cloudasset", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudasset1_beta1/README.md b/gen/cloudasset1_beta1/README.md index 0e0d54e068..b6fd768ac6 100644 --- a/gen/cloudasset1_beta1/README.md +++ b/gen/cloudasset1_beta1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-cloudasset1_beta1` library allows access to all features of the *Google Cloud Asset* service. -This documentation was generated from *Cloud Asset* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *cloudasset:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Asset* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *cloudasset:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Asset* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/asset-inventory/docs/quickstart). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/CloudAsset) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/CloudAsset) ... * folders - * [*export assets*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::FolderExportAssetCall) and [*operations get*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::FolderOperationGetCall) + * [*export assets*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::FolderExportAssetCall) and [*operations get*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::FolderOperationGetCall) * organizations - * [*batch get assets history*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::OrganizationBatchGetAssetsHistoryCall), [*export assets*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::OrganizationExportAssetCall) and [*operations get*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::OrganizationOperationGetCall) + * [*batch get assets history*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::OrganizationBatchGetAssetsHistoryCall), [*export assets*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::OrganizationExportAssetCall) and [*operations get*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::OrganizationOperationGetCall) * projects - * [*batch get assets history*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::ProjectBatchGetAssetsHistoryCall), [*export assets*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::ProjectExportAssetCall) and [*operations get*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/api::ProjectOperationGetCall) + * [*batch get assets history*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::ProjectBatchGetAssetsHistoryCall), [*export assets*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::ProjectExportAssetCall) and [*operations get*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/api::ProjectOperationGetCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/CloudAsset)** +* **[Hub](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/CloudAsset)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudasset1_beta1/5.0.2-beta-1+20230121/google_cloudasset1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudasset1_beta1/5.0.2+20230121/google_cloudasset1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudasset1_beta1/src/api.rs b/gen/cloudasset1_beta1/src/api.rs index ca15c1c477..b3345f2e85 100644 --- a/gen/cloudasset1_beta1/src/api.rs +++ b/gen/cloudasset1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudAsset { CloudAsset { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudasset.googleapis.com/".to_string(), _root_url: "https://cloudasset.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> CloudAsset { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudasset1_beta1/src/client.rs b/gen/cloudasset1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudasset1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudasset1_beta1/src/lib.rs b/gen/cloudasset1_beta1/src/lib.rs index 1aeb79b274..8ab7207679 100644 --- a/gen/cloudasset1_beta1/src/lib.rs +++ b/gen/cloudasset1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Asset* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *cloudasset:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Asset* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *cloudasset:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Asset* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/asset-inventory/docs/quickstart). diff --git a/gen/cloudbilling1-cli/Cargo.toml b/gen/cloudbilling1-cli/Cargo.toml index c5e9873afc..f72ae30072 100644 --- a/gen/cloudbilling1-cli/Cargo.toml +++ b/gen/cloudbilling1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudbilling1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20221206" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloudbilling (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudbilling1-cli" @@ -20,13 +20,13 @@ name = "cloudbilling1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudbilling1] path = "../cloudbilling1" -version = "4.0.1+20220305" +version = "5.0.2+20221206" + diff --git a/gen/cloudbilling1-cli/README.md b/gen/cloudbilling1-cli/README.md index e249c73034..c212593ec3 100644 --- a/gen/cloudbilling1-cli/README.md +++ b/gen/cloudbilling1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloudbilling* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloudbilling* API at revision *20221206*. The CLI is at version *5.0.2*. ```bash cloudbilling1 [options] diff --git a/gen/cloudbilling1-cli/mkdocs.yml b/gen/cloudbilling1-cli/mkdocs.yml index be21de83ee..d629b92fff 100644 --- a/gen/cloudbilling1-cli/mkdocs.yml +++ b/gen/cloudbilling1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloudbilling v4.0.1+20220305 +site_name: Cloudbilling v5.0.2+20221206 site_url: http://byron.github.io/google-apis-rs/google-cloudbilling1-cli site_description: A complete library to interact with Cloudbilling (protocol v1) @@ -7,20 +7,23 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudbilling1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['billing-accounts_create.md', 'Billing Accounts', 'Create'] -- ['billing-accounts_get.md', 'Billing Accounts', 'Get'] -- ['billing-accounts_get-iam-policy.md', 'Billing Accounts', 'Get Iam Policy'] -- ['billing-accounts_list.md', 'Billing Accounts', 'List'] -- ['billing-accounts_patch.md', 'Billing Accounts', 'Patch'] -- ['billing-accounts_projects-list.md', 'Billing Accounts', 'Projects List'] -- ['billing-accounts_set-iam-policy.md', 'Billing Accounts', 'Set Iam Policy'] -- ['billing-accounts_test-iam-permissions.md', 'Billing Accounts', 'Test Iam Permissions'] -- ['projects_get-billing-info.md', 'Projects', 'Get Billing Info'] -- ['projects_update-billing-info.md', 'Projects', 'Update Billing Info'] -- ['services_list.md', 'Services', 'List'] -- ['services_skus-list.md', 'Services', 'Skus List'] +nav: +- Home: 'index.md' +- 'Billing Accounts': + - 'Create': 'billing-accounts_create.md' + - 'Get': 'billing-accounts_get.md' + - 'Get Iam Policy': 'billing-accounts_get-iam-policy.md' + - 'List': 'billing-accounts_list.md' + - 'Patch': 'billing-accounts_patch.md' + - 'Projects List': 'billing-accounts_projects-list.md' + - 'Set Iam Policy': 'billing-accounts_set-iam-policy.md' + - 'Test Iam Permissions': 'billing-accounts_test-iam-permissions.md' +- 'Projects': + - 'Get Billing Info': 'projects_get-billing-info.md' + - 'Update Billing Info': 'projects_update-billing-info.md' +- 'Services': + - 'List': 'services_list.md' + - 'Skus List': 'services_skus-list.md' theme: readthedocs diff --git a/gen/cloudbilling1-cli/src/client.rs b/gen/cloudbilling1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudbilling1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudbilling1-cli/src/main.rs b/gen/cloudbilling1-cli/src/main.rs index 9c62c7e870..cf16399d47 100644 --- a/gen/cloudbilling1-cli/src/main.rs +++ b/gen/cloudbilling1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudbilling1::{api, Error, oauth2}; +use google_cloudbilling1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -198,7 +197,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -257,7 +256,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -352,7 +351,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -411,7 +410,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -782,7 +781,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -838,16 +837,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "end-time" => { - call = call.end_time(value.unwrap_or("")); + call = call.end_time( value.map(|v| arg_from_str(v, err, "end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "currency-code" => { call = call.currency_code(value.unwrap_or("")); @@ -1088,7 +1087,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1176,7 +1175,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1204,7 +1203,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1326,7 +1325,7 @@ async fn main() { let mut app = App::new("cloudbilling1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20221206") .about("Allows developers to manage billing for their Google Cloud Platform projects programmatically.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudbilling1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudbilling1/Cargo.toml b/gen/cloudbilling1/Cargo.toml index cefd8dd565..a9fd5284ac 100644 --- a/gen/cloudbilling1/Cargo.toml +++ b/gen/cloudbilling1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudbilling1" -version = "5.0.2-beta-1+20221206" +version = "5.0.2+20221206" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloudbilling (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudbilling1" homepage = "https://cloud.google.com/billing/" -documentation = "https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206" +documentation = "https://docs.rs/google-cloudbilling1/5.0.2+20221206" license = "MIT" keywords = ["cloudbilling", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudbilling1/README.md b/gen/cloudbilling1/README.md index d506f16011..93a4461ece 100644 --- a/gen/cloudbilling1/README.md +++ b/gen/cloudbilling1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-cloudbilling1` library allows access to all features of the *Google Cloudbilling* service. -This documentation was generated from *Cloudbilling* crate version *5.0.2-beta-1+20221206*, where *20221206* is the exact revision of the *cloudbilling:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloudbilling* crate version *5.0.2+20221206*, where *20221206* is the exact revision of the *cloudbilling:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloudbilling* *v1* API can be found at the [official documentation site](https://cloud.google.com/billing/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/Cloudbilling) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/Cloudbilling) ... -* [billing accounts](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccount) - * [*create*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountCreateCall), [*get*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountGetCall), [*get iam policy*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountGetIamPolicyCall), [*list*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountListCall), [*patch*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountPatchCall), [*projects list*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountProjectListCall), [*set iam policy*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::BillingAccountTestIamPermissionCall) +* [billing accounts](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccount) + * [*create*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountCreateCall), [*get*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountGetCall), [*get iam policy*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountGetIamPolicyCall), [*list*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountListCall), [*patch*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountPatchCall), [*projects list*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountProjectListCall), [*set iam policy*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::BillingAccountTestIamPermissionCall) * projects - * [*get billing info*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::ProjectGetBillingInfoCall) and [*update billing info*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::ProjectUpdateBillingInfoCall) -* [services](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::Service) - * [*list*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::ServiceListCall) and [*skus list*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/api::ServiceSkuListCall) + * [*get billing info*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::ProjectGetBillingInfoCall) and [*update billing info*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::ProjectUpdateBillingInfoCall) +* [services](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::Service) + * [*list*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::ServiceListCall) and [*skus list*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/api::ServiceSkuListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/Cloudbilling)** +* **[Hub](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/Cloudbilling)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Part)** + * **[Parts](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudbilling1/5.0.2-beta-1+20221206/google_cloudbilling1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudbilling1/5.0.2+20221206/google_cloudbilling1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudbilling1/src/api.rs b/gen/cloudbilling1/src/api.rs index ebe9e5be8b..5223f2ad4c 100644 --- a/gen/cloudbilling1/src/api.rs +++ b/gen/cloudbilling1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Cloudbilling { Cloudbilling { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudbilling.googleapis.com/".to_string(), _root_url: "https://cloudbilling.googleapis.com/".to_string(), } @@ -146,7 +146,7 @@ impl<'a, S> Cloudbilling { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudbilling1/src/client.rs b/gen/cloudbilling1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudbilling1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudbilling1/src/lib.rs b/gen/cloudbilling1/src/lib.rs index 7d0c40e3b4..2dfe38f312 100644 --- a/gen/cloudbilling1/src/lib.rs +++ b/gen/cloudbilling1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloudbilling* crate version *5.0.2-beta-1+20221206*, where *20221206* is the exact revision of the *cloudbilling:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloudbilling* crate version *5.0.2+20221206*, where *20221206* is the exact revision of the *cloudbilling:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloudbilling* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/billing/). diff --git a/gen/cloudbuild1-cli/Cargo.toml b/gen/cloudbuild1-cli/Cargo.toml index e69327d2c2..29489dd109 100644 --- a/gen/cloudbuild1-cli/Cargo.toml +++ b/gen/cloudbuild1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudbuild1-cli" -version = "4.0.1+20220218" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Build (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudbuild1-cli" @@ -20,13 +20,13 @@ name = "cloudbuild1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudbuild1] path = "../cloudbuild1" -version = "4.0.1+20220218" +version = "5.0.2+20230120" + diff --git a/gen/cloudbuild1-cli/README.md b/gen/cloudbuild1-cli/README.md index 5672ebe755..ef1721bf67 100644 --- a/gen/cloudbuild1-cli/README.md +++ b/gen/cloudbuild1-cli/README.md @@ -25,10 +25,12 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Build* API at revision *20220218*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Build* API at revision *20230120*. The CLI is at version *5.0.2*. ```bash cloudbuild1 [options] + github-dot-com-webhook + receive (-r )... [-p ]... [-o ] locations regional-webhook (-r )... [-p ]... [-o ] methods @@ -48,7 +50,6 @@ cloudbuild1 [options] github-enterprise-configs-get [-p ]... [-o ] github-enterprise-configs-list [-p ]... [-o ] github-enterprise-configs-patch (-r )... [-p ]... [-o ] - locations-bitbucket-server-configs-add-bitbucket-server-connected-repository (-r )... [-p ]... [-o ] locations-bitbucket-server-configs-connected-repositories-batch-create (-r )... [-p ]... [-o ] locations-bitbucket-server-configs-create (-r )... [-p ]... [-o ] locations-bitbucket-server-configs-delete [-p ]... [-o ] @@ -63,6 +64,14 @@ cloudbuild1 [options] locations-builds-get [-p ]... [-o ] locations-builds-list [-p ]... [-o ] locations-builds-retry (-r )... [-p ]... [-o ] + locations-git-lab-configs-connected-repositories-batch-create (-r )... [-p ]... [-o ] + locations-git-lab-configs-create (-r )... [-p ]... [-o ] + locations-git-lab-configs-delete [-p ]... [-o ] + locations-git-lab-configs-get [-p ]... [-o ] + locations-git-lab-configs-list [-p ]... [-o ] + locations-git-lab-configs-patch (-r )... [-p ]... [-o ] + locations-git-lab-configs-remove-git-lab-connected-repository (-r )... [-p ]... [-o ] + locations-git-lab-configs-repos-list [-p ]... [-o ] locations-github-enterprise-configs-create (-r )... [-p ]... [-o ] locations-github-enterprise-configs-delete [-p ]... [-o ] locations-github-enterprise-configs-get [-p ]... [-o ] diff --git a/gen/cloudbuild1-cli/mkdocs.yml b/gen/cloudbuild1-cli/mkdocs.yml index e568306155..020d155d19 100644 --- a/gen/cloudbuild1-cli/mkdocs.yml +++ b/gen/cloudbuild1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Build v4.0.1+20220218 +site_name: Cloud Build v5.0.2+20230120 site_url: http://byron.github.io/google-apis-rs/google-cloudbuild1-cli site_description: A complete library to interact with Cloud Build (protocol v1) @@ -7,64 +7,77 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudbuild1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['locations_regional-webhook.md', 'Locations', 'Regional Webhook'] -- ['methods_webhook.md', 'Methods', 'Webhook'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_get.md', 'Operations', 'Get'] -- ['projects_builds-approve.md', 'Projects', 'Builds Approve'] -- ['projects_builds-cancel.md', 'Projects', 'Builds Cancel'] -- ['projects_builds-create.md', 'Projects', 'Builds Create'] -- ['projects_builds-get.md', 'Projects', 'Builds Get'] -- ['projects_builds-list.md', 'Projects', 'Builds List'] -- ['projects_builds-retry.md', 'Projects', 'Builds Retry'] -- ['projects_github-enterprise-configs-create.md', 'Projects', 'Github Enterprise Configs Create'] -- ['projects_github-enterprise-configs-delete.md', 'Projects', 'Github Enterprise Configs Delete'] -- ['projects_github-enterprise-configs-get.md', 'Projects', 'Github Enterprise Configs Get'] -- ['projects_github-enterprise-configs-list.md', 'Projects', 'Github Enterprise Configs List'] -- ['projects_github-enterprise-configs-patch.md', 'Projects', 'Github Enterprise Configs Patch'] -- ['projects_locations-bitbucket-server-configs-add-bitbucket-server-connected-repository.md', 'Projects', 'Locations Bitbucket Server Configs Add Bitbucket Server Connected Repository'] -- ['projects_locations-bitbucket-server-configs-connected-repositories-batch-create.md', 'Projects', 'Locations Bitbucket Server Configs Connected Repositories Batch Create'] -- ['projects_locations-bitbucket-server-configs-create.md', 'Projects', 'Locations Bitbucket Server Configs Create'] -- ['projects_locations-bitbucket-server-configs-delete.md', 'Projects', 'Locations Bitbucket Server Configs Delete'] -- ['projects_locations-bitbucket-server-configs-get.md', 'Projects', 'Locations Bitbucket Server Configs Get'] -- ['projects_locations-bitbucket-server-configs-list.md', 'Projects', 'Locations Bitbucket Server Configs List'] -- ['projects_locations-bitbucket-server-configs-patch.md', 'Projects', 'Locations Bitbucket Server Configs Patch'] -- ['projects_locations-bitbucket-server-configs-remove-bitbucket-server-connected-repository.md', 'Projects', 'Locations Bitbucket Server Configs Remove Bitbucket Server Connected Repository'] -- ['projects_locations-bitbucket-server-configs-repos-list.md', 'Projects', 'Locations Bitbucket Server Configs Repos List'] -- ['projects_locations-builds-approve.md', 'Projects', 'Locations Builds Approve'] -- ['projects_locations-builds-cancel.md', 'Projects', 'Locations Builds Cancel'] -- ['projects_locations-builds-create.md', 'Projects', 'Locations Builds Create'] -- ['projects_locations-builds-get.md', 'Projects', 'Locations Builds Get'] -- ['projects_locations-builds-list.md', 'Projects', 'Locations Builds List'] -- ['projects_locations-builds-retry.md', 'Projects', 'Locations Builds Retry'] -- ['projects_locations-github-enterprise-configs-create.md', 'Projects', 'Locations Github Enterprise Configs Create'] -- ['projects_locations-github-enterprise-configs-delete.md', 'Projects', 'Locations Github Enterprise Configs Delete'] -- ['projects_locations-github-enterprise-configs-get.md', 'Projects', 'Locations Github Enterprise Configs Get'] -- ['projects_locations-github-enterprise-configs-list.md', 'Projects', 'Locations Github Enterprise Configs List'] -- ['projects_locations-github-enterprise-configs-patch.md', 'Projects', 'Locations Github Enterprise Configs Patch'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-triggers-create.md', 'Projects', 'Locations Triggers Create'] -- ['projects_locations-triggers-delete.md', 'Projects', 'Locations Triggers Delete'] -- ['projects_locations-triggers-get.md', 'Projects', 'Locations Triggers Get'] -- ['projects_locations-triggers-list.md', 'Projects', 'Locations Triggers List'] -- ['projects_locations-triggers-patch.md', 'Projects', 'Locations Triggers Patch'] -- ['projects_locations-triggers-run.md', 'Projects', 'Locations Triggers Run'] -- ['projects_locations-triggers-webhook.md', 'Projects', 'Locations Triggers Webhook'] -- ['projects_locations-worker-pools-create.md', 'Projects', 'Locations Worker Pools Create'] -- ['projects_locations-worker-pools-delete.md', 'Projects', 'Locations Worker Pools Delete'] -- ['projects_locations-worker-pools-get.md', 'Projects', 'Locations Worker Pools Get'] -- ['projects_locations-worker-pools-list.md', 'Projects', 'Locations Worker Pools List'] -- ['projects_locations-worker-pools-patch.md', 'Projects', 'Locations Worker Pools Patch'] -- ['projects_triggers-create.md', 'Projects', 'Triggers Create'] -- ['projects_triggers-delete.md', 'Projects', 'Triggers Delete'] -- ['projects_triggers-get.md', 'Projects', 'Triggers Get'] -- ['projects_triggers-list.md', 'Projects', 'Triggers List'] -- ['projects_triggers-patch.md', 'Projects', 'Triggers Patch'] -- ['projects_triggers-run.md', 'Projects', 'Triggers Run'] -- ['projects_triggers-webhook.md', 'Projects', 'Triggers Webhook'] +nav: +- Home: 'index.md' +- 'Github Dot Com Webhook': + - 'Receive': 'github-dot-com-webhook_receive.md' +- 'Locations': + - 'Regional Webhook': 'locations_regional-webhook.md' +- 'Methods': + - 'Webhook': 'methods_webhook.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Get': 'operations_get.md' +- 'Projects': + - 'Builds Approve': 'projects_builds-approve.md' + - 'Builds Cancel': 'projects_builds-cancel.md' + - 'Builds Create': 'projects_builds-create.md' + - 'Builds Get': 'projects_builds-get.md' + - 'Builds List': 'projects_builds-list.md' + - 'Builds Retry': 'projects_builds-retry.md' + - 'Github Enterprise Configs Create': 'projects_github-enterprise-configs-create.md' + - 'Github Enterprise Configs Delete': 'projects_github-enterprise-configs-delete.md' + - 'Github Enterprise Configs Get': 'projects_github-enterprise-configs-get.md' + - 'Github Enterprise Configs List': 'projects_github-enterprise-configs-list.md' + - 'Github Enterprise Configs Patch': 'projects_github-enterprise-configs-patch.md' + - 'Locations Bitbucket Server Configs Connected Repositories Batch Create': 'projects_locations-bitbucket-server-configs-connected-repositories-batch-create.md' + - 'Locations Bitbucket Server Configs Create': 'projects_locations-bitbucket-server-configs-create.md' + - 'Locations Bitbucket Server Configs Delete': 'projects_locations-bitbucket-server-configs-delete.md' + - 'Locations Bitbucket Server Configs Get': 'projects_locations-bitbucket-server-configs-get.md' + - 'Locations Bitbucket Server Configs List': 'projects_locations-bitbucket-server-configs-list.md' + - 'Locations Bitbucket Server Configs Patch': 'projects_locations-bitbucket-server-configs-patch.md' + - 'Locations Bitbucket Server Configs Remove Bitbucket Server Connected Repository': 'projects_locations-bitbucket-server-configs-remove-bitbucket-server-connected-repository.md' + - 'Locations Bitbucket Server Configs Repos List': 'projects_locations-bitbucket-server-configs-repos-list.md' + - 'Locations Builds Approve': 'projects_locations-builds-approve.md' + - 'Locations Builds Cancel': 'projects_locations-builds-cancel.md' + - 'Locations Builds Create': 'projects_locations-builds-create.md' + - 'Locations Builds Get': 'projects_locations-builds-get.md' + - 'Locations Builds List': 'projects_locations-builds-list.md' + - 'Locations Builds Retry': 'projects_locations-builds-retry.md' + - 'Locations Git Lab Configs Connected Repositories Batch Create': 'projects_locations-git-lab-configs-connected-repositories-batch-create.md' + - 'Locations Git Lab Configs Create': 'projects_locations-git-lab-configs-create.md' + - 'Locations Git Lab Configs Delete': 'projects_locations-git-lab-configs-delete.md' + - 'Locations Git Lab Configs Get': 'projects_locations-git-lab-configs-get.md' + - 'Locations Git Lab Configs List': 'projects_locations-git-lab-configs-list.md' + - 'Locations Git Lab Configs Patch': 'projects_locations-git-lab-configs-patch.md' + - 'Locations Git Lab Configs Remove Git Lab Connected Repository': 'projects_locations-git-lab-configs-remove-git-lab-connected-repository.md' + - 'Locations Git Lab Configs Repos List': 'projects_locations-git-lab-configs-repos-list.md' + - 'Locations Github Enterprise Configs Create': 'projects_locations-github-enterprise-configs-create.md' + - 'Locations Github Enterprise Configs Delete': 'projects_locations-github-enterprise-configs-delete.md' + - 'Locations Github Enterprise Configs Get': 'projects_locations-github-enterprise-configs-get.md' + - 'Locations Github Enterprise Configs List': 'projects_locations-github-enterprise-configs-list.md' + - 'Locations Github Enterprise Configs Patch': 'projects_locations-github-enterprise-configs-patch.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Triggers Create': 'projects_locations-triggers-create.md' + - 'Locations Triggers Delete': 'projects_locations-triggers-delete.md' + - 'Locations Triggers Get': 'projects_locations-triggers-get.md' + - 'Locations Triggers List': 'projects_locations-triggers-list.md' + - 'Locations Triggers Patch': 'projects_locations-triggers-patch.md' + - 'Locations Triggers Run': 'projects_locations-triggers-run.md' + - 'Locations Triggers Webhook': 'projects_locations-triggers-webhook.md' + - 'Locations Worker Pools Create': 'projects_locations-worker-pools-create.md' + - 'Locations Worker Pools Delete': 'projects_locations-worker-pools-delete.md' + - 'Locations Worker Pools Get': 'projects_locations-worker-pools-get.md' + - 'Locations Worker Pools List': 'projects_locations-worker-pools-list.md' + - 'Locations Worker Pools Patch': 'projects_locations-worker-pools-patch.md' + - 'Triggers Create': 'projects_triggers-create.md' + - 'Triggers Delete': 'projects_triggers-delete.md' + - 'Triggers Get': 'projects_triggers-get.md' + - 'Triggers List': 'projects_triggers-list.md' + - 'Triggers Patch': 'projects_triggers-patch.md' + - 'Triggers Run': 'projects_triggers-run.md' + - 'Triggers Webhook': 'projects_triggers-webhook.md' theme: readthedocs diff --git a/gen/cloudbuild1-cli/src/client.rs b/gen/cloudbuild1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudbuild1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudbuild1-cli/src/main.rs b/gen/cloudbuild1-cli/src/main.rs index deeacf9fee..2be59e1d3a 100644 --- a/gen/cloudbuild1-cli/src/main.rs +++ b/gen/cloudbuild1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudbuild1::{api, Error, oauth2}; +use google_cloudbuild1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,93 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _github_dot_com_webhook_receive(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "content-type" => Some(("contentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data" => Some(("data", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["content-type", "data"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::HttpBody = json::value::from_value(object).unwrap(); + let mut call = self.hub.github_dot_com_webhook().receive(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "webhook-key" => { + call = call.webhook_key(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["webhook-key"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _locations_regional_webhook(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -771,7 +857,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1246,7 +1332,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1295,93 +1381,6 @@ where } } - async fn _projects_locations_bitbucket_server_configs_add_bitbucket_server_connected_repository(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "connected-repository.project-key" => Some(("connectedRepository.projectKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connected-repository.repo-slug" => Some(("connectedRepository.repoSlug", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connected-repository.webhook-id" => Some(("connectedRepository.webhookId", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["connected-repository", "project-key", "repo-slug", "webhook-id"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::AddBitbucketServerConnectedRepositoryRequest = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_bitbucket_server_configs_add_bitbucket_server_connected_repository(request, opt.value_of("config").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_bitbucket_server_configs_connected_repositories_batch_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1679,7 +1678,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1778,7 +1777,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1924,7 +1923,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2386,7 +2385,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2525,6 +2524,596 @@ where } } + async fn _projects_locations_git_lab_configs_connected_repositories_batch_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::BatchCreateGitLabConnectedRepositoriesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_git_lab_configs_connected_repositories_batch_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_git_lab_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enterprise-config.host-uri" => Some(("enterpriseConfig.hostUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enterprise-config.service-directory-config.service" => Some(("enterpriseConfig.serviceDirectoryConfig.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enterprise-config.ssl-ca" => Some(("enterpriseConfig.sslCa", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.api-access-token-version" => Some(("secrets.apiAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.api-key-version" => Some(("secrets.apiKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.read-access-token-version" => Some(("secrets.readAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.webhook-secret-version" => Some(("secrets.webhookSecretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "username" => Some(("username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "webhook-key" => Some(("webhookKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-access-token-version", "api-key-version", "create-time", "enterprise-config", "host-uri", "name", "read-access-token-version", "secrets", "service", "service-directory-config", "ssl-ca", "username", "webhook-key", "webhook-secret-version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GitLabConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_git_lab_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "gitlab-config-id" => { + call = call.gitlab_config_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["gitlab-config-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_git_lab_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_git_lab_configs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_git_lab_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_git_lab_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_git_lab_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_git_lab_configs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_git_lab_configs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enterprise-config.host-uri" => Some(("enterpriseConfig.hostUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enterprise-config.service-directory-config.service" => Some(("enterpriseConfig.serviceDirectoryConfig.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enterprise-config.ssl-ca" => Some(("enterpriseConfig.sslCa", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.api-access-token-version" => Some(("secrets.apiAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.api-key-version" => Some(("secrets.apiKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.read-access-token-version" => Some(("secrets.readAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "secrets.webhook-secret-version" => Some(("secrets.webhookSecretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "username" => Some(("username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "webhook-key" => Some(("webhookKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-access-token-version", "api-key-version", "create-time", "enterprise-config", "host-uri", "name", "read-access-token-version", "secrets", "service", "service-directory-config", "ssl-ca", "username", "webhook-key", "webhook-secret-version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GitLabConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_git_lab_configs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_git_lab_configs_remove_git_lab_connected_repository(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "connected-repository.id" => Some(("connectedRepository.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "connected-repository.webhook-id" => Some(("connectedRepository.webhookId", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["connected-repository", "id", "webhook-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RemoveGitLabConnectedRepositoryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_git_lab_configs_remove_git_lab_connected_repository(request, opt.value_of("config").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_git_lab_configs_repos_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_git_lab_configs_repos_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_github_enterprise_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2861,7 +3450,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3190,14 +3779,42 @@ where "github.push.branch" => Some(("github.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "github.push.invert-regex" => Some(("github.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "github.push.tag" => Some(("github.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.create-time" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.host-uri" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.hostUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.service-directory-config.service" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.serviceDirectoryConfig.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.ssl-ca" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.sslCa", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.name" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-key-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.read-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.readAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.webhook-secret-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.webhookSecretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.username" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.webhook-key" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.webhookKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config-resource" => Some(("gitlabEnterpriseEventsConfig.gitlabConfigResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.project-namespace" => Some(("gitlabEnterpriseEventsConfig.projectNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.branch" => Some(("gitlabEnterpriseEventsConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.comment-control" => Some(("gitlabEnterpriseEventsConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.invert-regex" => Some(("gitlabEnterpriseEventsConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.branch" => Some(("gitlabEnterpriseEventsConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.invert-regex" => Some(("gitlabEnterpriseEventsConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.tag" => Some(("gitlabEnterpriseEventsConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ignored-files" => Some(("ignoredFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "include-build-logs" => Some(("includeBuildLogs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "included-files" => Some(("includedFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.service-account-email" => Some(("pubsubConfig.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.state" => Some(("pubsubConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.subscription" => Some(("pubsubConfig.subscription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.topic" => Some(("pubsubConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.branch" => Some(("repositoryEventConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.comment-control" => Some(("repositoryEventConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.invert-regex" => Some(("repositoryEventConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.branch" => Some(("repositoryEventConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.push.invert-regex" => Some(("repositoryEventConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.tag" => Some(("repositoryEventConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository" => Some(("repositoryEventConfig.repository", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository-type" => Some(("repositoryEventConfig.repositoryType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-to-build.bitbucket-server-config" => Some(("sourceToBuild.bitbucketServerConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3218,7 +3835,7 @@ where "webhook-config.secret" => Some(("webhookConfig.secret", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "webhook-config.state" => Some(("webhookConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-key", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "host-uri", "id", "ignored-files", "images", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service-account", "service-account-email", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version-name", "worker-pool"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-access-token-version", "api-key", "api-key-version", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "gitlab-config", "gitlab-config-resource", "gitlab-enterprise-events-config", "host-uri", "id", "ignored-files", "images", "include-build-logs", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "project-namespace", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "repository", "repository-event-config", "repository-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service", "service-account", "service-account-email", "service-directory-config", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version", "webhook-secret-version-name", "worker-pool"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3413,7 +4030,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3606,14 +4223,42 @@ where "github.push.branch" => Some(("github.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "github.push.invert-regex" => Some(("github.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "github.push.tag" => Some(("github.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.create-time" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.host-uri" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.hostUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.service-directory-config.service" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.serviceDirectoryConfig.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.ssl-ca" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.sslCa", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.name" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-key-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.read-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.readAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.webhook-secret-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.webhookSecretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.username" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.webhook-key" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.webhookKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config-resource" => Some(("gitlabEnterpriseEventsConfig.gitlabConfigResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.project-namespace" => Some(("gitlabEnterpriseEventsConfig.projectNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.branch" => Some(("gitlabEnterpriseEventsConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.comment-control" => Some(("gitlabEnterpriseEventsConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.invert-regex" => Some(("gitlabEnterpriseEventsConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.branch" => Some(("gitlabEnterpriseEventsConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.invert-regex" => Some(("gitlabEnterpriseEventsConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.tag" => Some(("gitlabEnterpriseEventsConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ignored-files" => Some(("ignoredFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "include-build-logs" => Some(("includeBuildLogs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "included-files" => Some(("includedFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.service-account-email" => Some(("pubsubConfig.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.state" => Some(("pubsubConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.subscription" => Some(("pubsubConfig.subscription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.topic" => Some(("pubsubConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.branch" => Some(("repositoryEventConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.comment-control" => Some(("repositoryEventConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.invert-regex" => Some(("repositoryEventConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.branch" => Some(("repositoryEventConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.push.invert-regex" => Some(("repositoryEventConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.tag" => Some(("repositoryEventConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository" => Some(("repositoryEventConfig.repository", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository-type" => Some(("repositoryEventConfig.repositoryType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-to-build.bitbucket-server-config" => Some(("sourceToBuild.bitbucketServerConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3634,7 +4279,7 @@ where "webhook-config.secret" => Some(("webhookConfig.secret", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "webhook-config.state" => Some(("webhookConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-key", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "host-uri", "id", "ignored-files", "images", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service-account", "service-account-email", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version-name", "worker-pool"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-access-token-version", "api-key", "api-key-version", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "gitlab-config", "gitlab-config-resource", "gitlab-enterprise-events-config", "host-uri", "id", "ignored-files", "images", "include-build-logs", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "project-namespace", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "repository", "repository-event-config", "repository-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service", "service-account", "service-account-email", "service-directory-config", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version", "webhook-secret-version-name", "worker-pool"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3919,13 +4564,14 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.network-config.egress-option" => Some(("privatePoolV1Config.networkConfig.egressOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.network-config.peered-network" => Some(("privatePoolV1Config.networkConfig.peeredNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "private-pool-v1-config.network-config.peered-network-ip-range" => Some(("privatePoolV1Config.networkConfig.peeredNetworkIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.worker-config.disk-size-gb" => Some(("privatePoolV1Config.workerConfig.diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.worker-config.machine-type" => Some(("privatePoolV1Config.workerConfig.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "create-time", "delete-time", "disk-size-gb", "display-name", "egress-option", "etag", "machine-type", "name", "network-config", "peered-network", "private-pool-v1-config", "state", "uid", "update-time", "worker-config"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "create-time", "delete-time", "disk-size-gb", "display-name", "egress-option", "etag", "machine-type", "name", "network-config", "peered-network", "peered-network-ip-range", "private-pool-v1-config", "state", "uid", "update-time", "worker-config"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3943,7 +4589,7 @@ where call = call.worker_pool_id(value.unwrap_or("")); }, "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3999,13 +4645,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "etag" => { call = call.etag(value.unwrap_or("")); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4116,7 +4762,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4196,13 +4842,14 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.network-config.egress-option" => Some(("privatePoolV1Config.networkConfig.egressOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.network-config.peered-network" => Some(("privatePoolV1Config.networkConfig.peeredNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "private-pool-v1-config.network-config.peered-network-ip-range" => Some(("privatePoolV1Config.networkConfig.peeredNetworkIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.worker-config.disk-size-gb" => Some(("privatePoolV1Config.workerConfig.diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-pool-v1-config.worker-config.machine-type" => Some(("privatePoolV1Config.workerConfig.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "create-time", "delete-time", "disk-size-gb", "display-name", "egress-option", "etag", "machine-type", "name", "network-config", "peered-network", "private-pool-v1-config", "state", "uid", "update-time", "worker-config"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "create-time", "delete-time", "disk-size-gb", "display-name", "egress-option", "etag", "machine-type", "name", "network-config", "peered-network", "peered-network-ip-range", "private-pool-v1-config", "state", "uid", "update-time", "worker-config"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4217,10 +4864,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4413,14 +5060,42 @@ where "github.push.branch" => Some(("github.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "github.push.invert-regex" => Some(("github.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "github.push.tag" => Some(("github.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.create-time" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.host-uri" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.hostUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.service-directory-config.service" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.serviceDirectoryConfig.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.ssl-ca" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.sslCa", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.name" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-key-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.read-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.readAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.webhook-secret-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.webhookSecretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.username" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.webhook-key" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.webhookKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config-resource" => Some(("gitlabEnterpriseEventsConfig.gitlabConfigResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.project-namespace" => Some(("gitlabEnterpriseEventsConfig.projectNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.branch" => Some(("gitlabEnterpriseEventsConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.comment-control" => Some(("gitlabEnterpriseEventsConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.invert-regex" => Some(("gitlabEnterpriseEventsConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.branch" => Some(("gitlabEnterpriseEventsConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.invert-regex" => Some(("gitlabEnterpriseEventsConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.tag" => Some(("gitlabEnterpriseEventsConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ignored-files" => Some(("ignoredFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "include-build-logs" => Some(("includeBuildLogs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "included-files" => Some(("includedFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.service-account-email" => Some(("pubsubConfig.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.state" => Some(("pubsubConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.subscription" => Some(("pubsubConfig.subscription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.topic" => Some(("pubsubConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.branch" => Some(("repositoryEventConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.comment-control" => Some(("repositoryEventConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.invert-regex" => Some(("repositoryEventConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.branch" => Some(("repositoryEventConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.push.invert-regex" => Some(("repositoryEventConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.tag" => Some(("repositoryEventConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository" => Some(("repositoryEventConfig.repository", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository-type" => Some(("repositoryEventConfig.repositoryType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-to-build.bitbucket-server-config" => Some(("sourceToBuild.bitbucketServerConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4441,7 +5116,7 @@ where "webhook-config.secret" => Some(("webhookConfig.secret", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "webhook-config.state" => Some(("webhookConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-key", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "host-uri", "id", "ignored-files", "images", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service-account", "service-account-email", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version-name", "worker-pool"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-access-token-version", "api-key", "api-key-version", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "gitlab-config", "gitlab-config-resource", "gitlab-enterprise-events-config", "host-uri", "id", "ignored-files", "images", "include-build-logs", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "project-namespace", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "repository", "repository-event-config", "repository-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service", "service-account", "service-account-email", "service-directory-config", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version", "webhook-secret-version-name", "worker-pool"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4630,7 +5305,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4823,14 +5498,42 @@ where "github.push.branch" => Some(("github.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "github.push.invert-regex" => Some(("github.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "github.push.tag" => Some(("github.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.create-time" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.host-uri" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.hostUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.service-directory-config.service" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.serviceDirectoryConfig.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.enterprise-config.ssl-ca" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.enterpriseConfig.sslCa", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.name" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.api-key-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.apiKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.read-access-token-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.readAccessTokenVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.secrets.webhook-secret-version" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.secrets.webhookSecretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.username" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config.webhook-key" => Some(("gitlabEnterpriseEventsConfig.gitlabConfig.webhookKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.gitlab-config-resource" => Some(("gitlabEnterpriseEventsConfig.gitlabConfigResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.project-namespace" => Some(("gitlabEnterpriseEventsConfig.projectNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.branch" => Some(("gitlabEnterpriseEventsConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.comment-control" => Some(("gitlabEnterpriseEventsConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.pull-request.invert-regex" => Some(("gitlabEnterpriseEventsConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.branch" => Some(("gitlabEnterpriseEventsConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.invert-regex" => Some(("gitlabEnterpriseEventsConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gitlab-enterprise-events-config.push.tag" => Some(("gitlabEnterpriseEventsConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ignored-files" => Some(("ignoredFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "include-build-logs" => Some(("includeBuildLogs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "included-files" => Some(("includedFiles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.service-account-email" => Some(("pubsubConfig.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.state" => Some(("pubsubConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.subscription" => Some(("pubsubConfig.subscription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pubsub-config.topic" => Some(("pubsubConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.branch" => Some(("repositoryEventConfig.pullRequest.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.comment-control" => Some(("repositoryEventConfig.pullRequest.commentControl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.pull-request.invert-regex" => Some(("repositoryEventConfig.pullRequest.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.branch" => Some(("repositoryEventConfig.push.branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.push.invert-regex" => Some(("repositoryEventConfig.push.invertRegex", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "repository-event-config.push.tag" => Some(("repositoryEventConfig.push.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository" => Some(("repositoryEventConfig.repository", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repository-event-config.repository-type" => Some(("repositoryEventConfig.repositoryType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-to-build.bitbucket-server-config" => Some(("sourceToBuild.bitbucketServerConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4851,7 +5554,7 @@ where "webhook-config.secret" => Some(("webhookConfig.secret", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "webhook-config.state" => Some(("webhookConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-key", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "host-uri", "id", "ignored-files", "images", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service-account", "service-account-email", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version-name", "worker-pool"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-access-token-version-name", "api-access-token-version", "api-key", "api-key-version", "approval", "approval-config", "approval-required", "approval-time", "approver-account", "artifact-manifest", "artifact-timing", "artifacts", "autodetect", "bitbucket-server-config", "bitbucket-server-config-resource", "bitbucket-server-trigger-config", "branch", "branch-name", "bucket", "build", "build-step-images", "build-step-outputs", "build-trigger-id", "comment", "comment-control", "commit-sha", "config", "create-time", "decision", "description", "detail", "dir", "disabled", "disk-size-gb", "dynamic-substitutions", "end-time", "enterprise-config", "enterprise-config-resource-name", "env", "event-type", "failure-info", "filename", "filter", "finish-time", "generation", "git-file-source", "github", "github-enterprise-config", "gitlab-config", "gitlab-config-resource", "gitlab-enterprise-events-config", "host-uri", "id", "ignored-files", "images", "include-build-logs", "included-files", "installation-id", "invert-regex", "location", "log-streaming-option", "log-url", "logging", "logs-bucket", "machine-type", "name", "num-artifacts", "object", "objects", "options", "owner", "path", "paths", "peered-network", "pool", "project-id", "project-key", "project-namespace", "pubsub-config", "pull-request", "push", "queue-ttl", "read-access-token-version", "read-access-token-version-name", "ref", "repo-name", "repo-slug", "repo-source", "repo-type", "repository", "repository-event-config", "repository-type", "requested-verify-option", "resolved-repo-source", "resolved-storage-source", "resolved-storage-source-manifest", "resource-name", "result", "results", "revision", "secret", "secret-env", "secrets", "service", "service-account", "service-account-email", "service-directory-config", "source", "source-provenance", "source-provenance-hash", "source-to-build", "ssl-ca", "start-time", "state", "status", "status-detail", "storage-source", "storage-source-manifest", "subscription", "substitution-option", "substitutions", "tag", "tag-name", "tags", "timeout", "timing", "topic", "trigger-template", "type", "uri", "url", "username", "webhook-config", "webhook-key", "webhook-secret-version", "webhook-secret-version-name", "worker-pool"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5102,6 +5805,17 @@ where let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option = None; match self.opt.subcommand() { + ("github-dot-com-webhook", Some(opt)) => { + match opt.subcommand() { + ("receive", Some(opt)) => { + call_result = self._github_dot_com_webhook_receive(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("github-dot-com-webhook".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("locations", Some(opt)) => { match opt.subcommand() { ("regional-webhook", Some(opt)) => { @@ -5173,9 +5887,6 @@ where ("github-enterprise-configs-patch", Some(opt)) => { call_result = self._projects_github_enterprise_configs_patch(opt, dry_run, &mut err).await; }, - ("locations-bitbucket-server-configs-add-bitbucket-server-connected-repository", Some(opt)) => { - call_result = self._projects_locations_bitbucket_server_configs_add_bitbucket_server_connected_repository(opt, dry_run, &mut err).await; - }, ("locations-bitbucket-server-configs-connected-repositories-batch-create", Some(opt)) => { call_result = self._projects_locations_bitbucket_server_configs_connected_repositories_batch_create(opt, dry_run, &mut err).await; }, @@ -5218,6 +5929,30 @@ where ("locations-builds-retry", Some(opt)) => { call_result = self._projects_locations_builds_retry(opt, dry_run, &mut err).await; }, + ("locations-git-lab-configs-connected-repositories-batch-create", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_connected_repositories_batch_create(opt, dry_run, &mut err).await; + }, + ("locations-git-lab-configs-create", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_create(opt, dry_run, &mut err).await; + }, + ("locations-git-lab-configs-delete", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_delete(opt, dry_run, &mut err).await; + }, + ("locations-git-lab-configs-get", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_get(opt, dry_run, &mut err).await; + }, + ("locations-git-lab-configs-list", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_list(opt, dry_run, &mut err).await; + }, + ("locations-git-lab-configs-patch", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_patch(opt, dry_run, &mut err).await; + }, + ("locations-git-lab-configs-remove-git-lab-connected-repository", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_remove_git_lab_connected_repository(opt, dry_run, &mut err).await; + }, + ("locations-git-lab-configs-repos-list", Some(opt)) => { + call_result = self._projects_locations_git_lab_configs_repos_list(opt, dry_run, &mut err).await; + }, ("locations-github-enterprise-configs-create", Some(opt)) => { call_result = self._projects_locations_github_enterprise_configs_create(opt, dry_run, &mut err).await; }, @@ -5375,6 +6110,31 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ + ("github-dot-com-webhook", "methods: 'receive'", vec![ + ("receive", + Some(r##"ReceiveGitHubDotComWebhook is called when the API receives a github.com webhook."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/github-dot-com-webhook_receive", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("locations", "methods: 'regional-webhook'", vec![ ("regional-webhook", Some(r##"ReceiveRegionalWebhook is called when the API receives a regional GitHub webhook."##), @@ -5484,7 +6244,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'builds-approve', 'builds-cancel', 'builds-create', 'builds-get', 'builds-list', 'builds-retry', 'github-enterprise-configs-create', 'github-enterprise-configs-delete', 'github-enterprise-configs-get', 'github-enterprise-configs-list', 'github-enterprise-configs-patch', 'locations-bitbucket-server-configs-add-bitbucket-server-connected-repository', 'locations-bitbucket-server-configs-connected-repositories-batch-create', 'locations-bitbucket-server-configs-create', 'locations-bitbucket-server-configs-delete', 'locations-bitbucket-server-configs-get', 'locations-bitbucket-server-configs-list', 'locations-bitbucket-server-configs-patch', 'locations-bitbucket-server-configs-remove-bitbucket-server-connected-repository', 'locations-bitbucket-server-configs-repos-list', 'locations-builds-approve', 'locations-builds-cancel', 'locations-builds-create', 'locations-builds-get', 'locations-builds-list', 'locations-builds-retry', 'locations-github-enterprise-configs-create', 'locations-github-enterprise-configs-delete', 'locations-github-enterprise-configs-get', 'locations-github-enterprise-configs-list', 'locations-github-enterprise-configs-patch', 'locations-operations-cancel', 'locations-operations-get', 'locations-triggers-create', 'locations-triggers-delete', 'locations-triggers-get', 'locations-triggers-list', 'locations-triggers-patch', 'locations-triggers-run', 'locations-triggers-webhook', 'locations-worker-pools-create', 'locations-worker-pools-delete', 'locations-worker-pools-get', 'locations-worker-pools-list', 'locations-worker-pools-patch', 'triggers-create', 'triggers-delete', 'triggers-get', 'triggers-list', 'triggers-patch', 'triggers-run' and 'triggers-webhook'", vec![ + ("projects", "methods: 'builds-approve', 'builds-cancel', 'builds-create', 'builds-get', 'builds-list', 'builds-retry', 'github-enterprise-configs-create', 'github-enterprise-configs-delete', 'github-enterprise-configs-get', 'github-enterprise-configs-list', 'github-enterprise-configs-patch', 'locations-bitbucket-server-configs-connected-repositories-batch-create', 'locations-bitbucket-server-configs-create', 'locations-bitbucket-server-configs-delete', 'locations-bitbucket-server-configs-get', 'locations-bitbucket-server-configs-list', 'locations-bitbucket-server-configs-patch', 'locations-bitbucket-server-configs-remove-bitbucket-server-connected-repository', 'locations-bitbucket-server-configs-repos-list', 'locations-builds-approve', 'locations-builds-cancel', 'locations-builds-create', 'locations-builds-get', 'locations-builds-list', 'locations-builds-retry', 'locations-git-lab-configs-connected-repositories-batch-create', 'locations-git-lab-configs-create', 'locations-git-lab-configs-delete', 'locations-git-lab-configs-get', 'locations-git-lab-configs-list', 'locations-git-lab-configs-patch', 'locations-git-lab-configs-remove-git-lab-connected-repository', 'locations-git-lab-configs-repos-list', 'locations-github-enterprise-configs-create', 'locations-github-enterprise-configs-delete', 'locations-github-enterprise-configs-get', 'locations-github-enterprise-configs-list', 'locations-github-enterprise-configs-patch', 'locations-operations-cancel', 'locations-operations-get', 'locations-triggers-create', 'locations-triggers-delete', 'locations-triggers-get', 'locations-triggers-list', 'locations-triggers-patch', 'locations-triggers-run', 'locations-triggers-webhook', 'locations-worker-pools-create', 'locations-worker-pools-delete', 'locations-worker-pools-get', 'locations-worker-pools-list', 'locations-worker-pools-patch', 'triggers-create', 'triggers-delete', 'triggers-get', 'triggers-list', 'triggers-patch', 'triggers-run' and 'triggers-webhook'", vec![ ("builds-approve", Some(r##"Approves or rejects a pending build. If approved, the returned LRO will be analogous to the LRO returned from a CreateBuild call. If rejected, the returned LRO will be immediately done."##), "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_builds-approve", @@ -5693,7 +6453,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}""##), + Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/locations/{$location_id}/githubEnterpriseConfigs/{$config_id}""##), Some(true), Some(false)), @@ -5715,7 +6475,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}""##), + Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/locations/{$location_id}/githubEnterpriseConfigs/{$config_id}""##), Some(true), Some(false)), @@ -5759,35 +6519,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Optional. The full resource name for the GitHubEnterpriseConfig For example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}""##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-bitbucket-server-configs-add-bitbucket-server-connected-repository", - Some(r##"Add a Bitbucket Server repository to a given BitbucketServerConfig's connected repositories. This API is experimental."##), - "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-bitbucket-server-configs-add-bitbucket-server-connected-repository", - vec![ - (Some(r##"config"##), - None, - Some(r##"Required. The name of the `BitbucketServerConfig` to add a connected repository. Format: `projects/{project}/locations/{location}/bitbucketServerConfigs/{config}`"##), + Some(r##"Optional. The full resource name for the GitHubEnterpriseConfig For example: "projects/{$project_id}/locations/{$location_id}/githubEnterpriseConfigs/{$config_id}""##), Some(true), Some(false)), @@ -5960,7 +6692,7 @@ async fn main() { Some(false)), ]), ("locations-bitbucket-server-configs-remove-bitbucket-server-connected-repository", - Some(r##"Remove a Bitbucket Server repository from an given BitbucketServerConfig’s connected repositories. This API is experimental."##), + Some(r##"Remove a Bitbucket Server repository from a given BitbucketServerConfig's connected repositories. This API is experimental."##), "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-bitbucket-server-configs-remove-bitbucket-server-connected-repository", vec![ (Some(r##"config"##), @@ -6121,7 +6853,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The parent of the collection of `Builds`. Format: `projects/{project}/locations/location`"##), + Some(r##"The parent of the collection of `Builds`. Format: `projects/{project}/locations/{location}`"##), Some(true), Some(false)), @@ -6159,6 +6891,206 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-connected-repositories-batch-create", + Some(r##"Batch connecting GitLab repositories to Cloud Build. This API is experimental."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-connected-repositories-batch-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"The name of the `GitLabConfig` that adds connected repositories. Format: `projects/{project}/locations/{location}/gitLabConfigs/{config}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-create", + Some(r##"Creates a new `GitLabConfig`. This API is experimental"##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the parent resource."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-delete", + Some(r##"Delete a `GitLabConfig`. This API is experimental"##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The config resource name."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-get", + Some(r##"Retrieves a `GitLabConfig`. This API is experimental"##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The config resource name."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-list", + Some(r##"List all `GitLabConfigs` for a given project. This API is experimental"##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the parent resource"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-patch", + Some(r##"Updates an existing `GitLabConfig`. This API is experimental"##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"The resource name for the config."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-remove-git-lab-connected-repository", + Some(r##"Remove a GitLab repository from a given GitLabConfig's connected repositories. This API is experimental."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-remove-git-lab-connected-repository", + vec![ + (Some(r##"config"##), + None, + Some(r##"Required. The name of the `GitLabConfig` to remove a connected repository. Format: `projects/{project}/locations/{location}/gitLabConfigs/{config}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-git-lab-configs-repos-list", + Some(r##"List all repositories for a given `GitLabConfig`. This API is experimental"##), + "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-git-lab-configs-repos-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the parent resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -6199,7 +7131,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}""##), + Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/locations/{$location_id}/githubEnterpriseConfigs/{$config_id}""##), Some(true), Some(false)), @@ -6221,7 +7153,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}""##), + Some(r##"This field should contain the name of the enterprise config resource. For example: "projects/{$project_id}/locations/{$location_id}/githubEnterpriseConfigs/{$config_id}""##), Some(true), Some(false)), @@ -6265,7 +7197,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Optional. The full resource name for the GitHubEnterpriseConfig For example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}""##), + Some(r##"Optional. The full resource name for the GitHubEnterpriseConfig For example: "projects/{$project_id}/locations/{$location_id}/githubEnterpriseConfigs/{$config_id}""##), Some(true), Some(false)), @@ -6460,7 +7392,7 @@ async fn main() { Some(false)), ]), ("locations-triggers-run", - Some(r##"Runs a `BuildTrigger` at a particular source revision."##), + Some(r##"Runs a `BuildTrigger` at a particular source revision. To run a regional or global trigger, use the POST request that includes the location endpoint in the path (ex. v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). The POST request that does not include the location endpoint in the path can only be used when running global triggers."##), "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_locations-triggers-run", vec![ (Some(r##"name"##), @@ -6549,7 +7481,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the `WorkerPool` to delete. Format: `projects/{project}/locations/{workerPool}/workerPools/{workerPool}`."##), + Some(r##"Required. The name of the `WorkerPool` to delete. Format: `projects/{project}/locations/{location}/workerPools/{workerPool}`."##), Some(true), Some(false)), @@ -6778,7 +7710,7 @@ async fn main() { Some(false)), ]), ("triggers-run", - Some(r##"Runs a `BuildTrigger` at a particular source revision."##), + Some(r##"Runs a `BuildTrigger` at a particular source revision. To run a regional or global trigger, use the POST request that includes the location endpoint in the path (ex. v1/projects/{projectId}/locations/{region}/triggers/{triggerId}:run). The POST request that does not include the location endpoint in the path can only be used when running global triggers."##), "Details at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli/projects_triggers-run", vec![ (Some(r##"project-id"##), @@ -6851,7 +7783,7 @@ async fn main() { let mut app = App::new("cloudbuild1") .author("Sebastian Thiel ") - .version("4.0.1+20220218") + .version("5.0.2+20230120") .about("Creates and manages builds on Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudbuild1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudbuild1/Cargo.toml b/gen/cloudbuild1/Cargo.toml index b60b98d8fe..e649e31b26 100644 --- a/gen/cloudbuild1/Cargo.toml +++ b/gen/cloudbuild1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudbuild1" -version = "5.0.2-beta-1+20230120" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Build (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudbuild1" homepage = "https://cloud.google.com/cloud-build/docs/" -documentation = "https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120" +documentation = "https://docs.rs/google-cloudbuild1/5.0.2+20230120" license = "MIT" keywords = ["cloudbuild", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudbuild1/README.md b/gen/cloudbuild1/README.md index c9c8087fa7..f0fc7b5371 100644 --- a/gen/cloudbuild1/README.md +++ b/gen/cloudbuild1/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-cloudbuild1` library allows access to all features of the *Google Cloud Build* service. -This documentation was generated from *Cloud Build* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *cloudbuild:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Build* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *cloudbuild:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Build* *v1* API can be found at the [official documentation site](https://cloud.google.com/cloud-build/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/CloudBuild) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/CloudBuild) ... * github dot com webhook - * [*receive*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::GithubDotComWebhookReceiveCall) + * [*receive*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::GithubDotComWebhookReceiveCall) * locations - * [*regional webhook*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::LocationRegionalWebhookCall) -* [operations](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::Operation) - * [*cancel*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::OperationCancelCall) and [*get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::OperationGetCall) + * [*regional webhook*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::LocationRegionalWebhookCall) +* [operations](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::Operation) + * [*cancel*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::OperationCancelCall) and [*get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::OperationGetCall) * projects - * [*builds approve*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectBuildApproveCall), [*builds cancel*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectBuildCancelCall), [*builds create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectBuildCreateCall), [*builds get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectBuildGetCall), [*builds list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectBuildListCall), [*builds retry*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectBuildRetryCall), [*github enterprise configs create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigCreateCall), [*github enterprise configs delete*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigDeleteCall), [*github enterprise configs get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigGetCall), [*github enterprise configs list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigListCall), [*github enterprise configs patch*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigPatchCall), [*locations bitbucket server configs connected repositories batch create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigConnectedRepositoryBatchCreateCall), [*locations bitbucket server configs create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigCreateCall), [*locations bitbucket server configs delete*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigDeleteCall), [*locations bitbucket server configs get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigGetCall), [*locations bitbucket server configs list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigListCall), [*locations bitbucket server configs patch*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigPatchCall), [*locations bitbucket server configs remove bitbucket server connected repository*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigRemoveBitbucketServerConnectedRepositoryCall), [*locations bitbucket server configs repos list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigRepoListCall), [*locations builds approve*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBuildApproveCall), [*locations builds cancel*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBuildCancelCall), [*locations builds create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBuildCreateCall), [*locations builds get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBuildGetCall), [*locations builds list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBuildListCall), [*locations builds retry*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationBuildRetryCall), [*locations git lab configs connected repositories batch create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigConnectedRepositoryBatchCreateCall), [*locations git lab configs create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigCreateCall), [*locations git lab configs delete*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigDeleteCall), [*locations git lab configs get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigGetCall), [*locations git lab configs list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigListCall), [*locations git lab configs patch*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigPatchCall), [*locations git lab configs remove git lab connected repository*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigRemoveGitLabConnectedRepositoryCall), [*locations git lab configs repos list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigRepoListCall), [*locations github enterprise configs create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigCreateCall), [*locations github enterprise configs delete*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigDeleteCall), [*locations github enterprise configs get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigGetCall), [*locations github enterprise configs list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigListCall), [*locations github enterprise configs patch*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigPatchCall), [*locations operations cancel*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationOperationGetCall), [*locations triggers create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationTriggerCreateCall), [*locations triggers delete*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationTriggerDeleteCall), [*locations triggers get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationTriggerGetCall), [*locations triggers list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationTriggerListCall), [*locations triggers patch*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationTriggerPatchCall), [*locations triggers run*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationTriggerRunCall), [*locations triggers webhook*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationTriggerWebhookCall), [*locations worker pools create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolCreateCall), [*locations worker pools delete*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolDeleteCall), [*locations worker pools get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolGetCall), [*locations worker pools list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolListCall), [*locations worker pools patch*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolPatchCall), [*triggers create*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectTriggerCreateCall), [*triggers delete*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectTriggerDeleteCall), [*triggers get*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectTriggerGetCall), [*triggers list*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectTriggerListCall), [*triggers patch*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectTriggerPatchCall), [*triggers run*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectTriggerRunCall) and [*triggers webhook*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::ProjectTriggerWebhookCall) + * [*builds approve*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectBuildApproveCall), [*builds cancel*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectBuildCancelCall), [*builds create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectBuildCreateCall), [*builds get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectBuildGetCall), [*builds list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectBuildListCall), [*builds retry*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectBuildRetryCall), [*github enterprise configs create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigCreateCall), [*github enterprise configs delete*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigDeleteCall), [*github enterprise configs get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigGetCall), [*github enterprise configs list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigListCall), [*github enterprise configs patch*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectGithubEnterpriseConfigPatchCall), [*locations bitbucket server configs connected repositories batch create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigConnectedRepositoryBatchCreateCall), [*locations bitbucket server configs create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigCreateCall), [*locations bitbucket server configs delete*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigDeleteCall), [*locations bitbucket server configs get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigGetCall), [*locations bitbucket server configs list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigListCall), [*locations bitbucket server configs patch*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigPatchCall), [*locations bitbucket server configs remove bitbucket server connected repository*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigRemoveBitbucketServerConnectedRepositoryCall), [*locations bitbucket server configs repos list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBitbucketServerConfigRepoListCall), [*locations builds approve*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBuildApproveCall), [*locations builds cancel*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBuildCancelCall), [*locations builds create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBuildCreateCall), [*locations builds get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBuildGetCall), [*locations builds list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBuildListCall), [*locations builds retry*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationBuildRetryCall), [*locations git lab configs connected repositories batch create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigConnectedRepositoryBatchCreateCall), [*locations git lab configs create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigCreateCall), [*locations git lab configs delete*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigDeleteCall), [*locations git lab configs get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigGetCall), [*locations git lab configs list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigListCall), [*locations git lab configs patch*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigPatchCall), [*locations git lab configs remove git lab connected repository*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigRemoveGitLabConnectedRepositoryCall), [*locations git lab configs repos list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGitLabConfigRepoListCall), [*locations github enterprise configs create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigCreateCall), [*locations github enterprise configs delete*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigDeleteCall), [*locations github enterprise configs get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigGetCall), [*locations github enterprise configs list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigListCall), [*locations github enterprise configs patch*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationGithubEnterpriseConfigPatchCall), [*locations operations cancel*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationOperationGetCall), [*locations triggers create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationTriggerCreateCall), [*locations triggers delete*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationTriggerDeleteCall), [*locations triggers get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationTriggerGetCall), [*locations triggers list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationTriggerListCall), [*locations triggers patch*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationTriggerPatchCall), [*locations triggers run*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationTriggerRunCall), [*locations triggers webhook*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationTriggerWebhookCall), [*locations worker pools create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolCreateCall), [*locations worker pools delete*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolDeleteCall), [*locations worker pools get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolGetCall), [*locations worker pools list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolListCall), [*locations worker pools patch*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectLocationWorkerPoolPatchCall), [*triggers create*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectTriggerCreateCall), [*triggers delete*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectTriggerDeleteCall), [*triggers get*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectTriggerGetCall), [*triggers list*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectTriggerListCall), [*triggers patch*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectTriggerPatchCall), [*triggers run*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectTriggerRunCall) and [*triggers webhook*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::ProjectTriggerWebhookCall) Other activities are ... -* [webhook](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/api::MethodWebhookCall) +* [webhook](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/api::MethodWebhookCall) @@ -32,17 +32,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/CloudBuild)** +* **[Hub](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/CloudBuild)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Part)** + * **[Parts](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -162,17 +162,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -182,29 +182,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudbuild1/5.0.2-beta-1+20230120/google_cloudbuild1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudbuild1/5.0.2+20230120/google_cloudbuild1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudbuild1/src/api.rs b/gen/cloudbuild1/src/api.rs index a531a918a6..3c213fea9c 100644 --- a/gen/cloudbuild1/src/api.rs +++ b/gen/cloudbuild1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> CloudBuild { CloudBuild { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudbuild.googleapis.com/".to_string(), _root_url: "https://cloudbuild.googleapis.com/".to_string(), } @@ -150,7 +150,7 @@ impl<'a, S> CloudBuild { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudbuild1/src/client.rs b/gen/cloudbuild1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudbuild1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudbuild1/src/lib.rs b/gen/cloudbuild1/src/lib.rs index 58048316f1..3d898da3da 100644 --- a/gen/cloudbuild1/src/lib.rs +++ b/gen/cloudbuild1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Build* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *cloudbuild:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Build* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *cloudbuild:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Build* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/cloud-build/docs/). diff --git a/gen/cloudchannel1-cli/Cargo.toml b/gen/cloudchannel1-cli/Cargo.toml index 4889052684..777edeecda 100644 --- a/gen/cloudchannel1-cli/Cargo.toml +++ b/gen/cloudchannel1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudchannel1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloudchannel (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudchannel1-cli" @@ -20,13 +20,13 @@ name = "cloudchannel1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudchannel1] path = "../cloudchannel1" -version = "4.0.1+20220303" +version = "5.0.2+20230123" + diff --git a/gen/cloudchannel1-cli/README.md b/gen/cloudchannel1-cli/README.md index 6864c90be5..6aa84d3ecc 100644 --- a/gen/cloudchannel1-cli/README.md +++ b/gen/cloudchannel1-cli/README.md @@ -25,11 +25,16 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloudchannel* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloudchannel* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash cloudchannel1 [options] accounts + channel-partner-links-channel-partner-repricing-configs-create (-r )... [-p ]... [-o ] + channel-partner-links-channel-partner-repricing-configs-delete [-p ]... [-o ] + channel-partner-links-channel-partner-repricing-configs-get [-p ]... [-o ] + channel-partner-links-channel-partner-repricing-configs-list [-p ]... [-o ] + channel-partner-links-channel-partner-repricing-configs-patch (-r )... [-p ]... [-o ] channel-partner-links-create (-r )... [-p ]... [-o ] channel-partner-links-customers-create (-r )... [-p ]... [-o ] channel-partner-links-customers-delete [-p ]... [-o ] @@ -42,6 +47,11 @@ cloudchannel1 [options] channel-partner-links-patch (-r )... [-p ]... [-o ] check-cloud-identity-accounts-exist (-r )... [-p ]... [-o ] customers-create (-r )... [-p ]... [-o ] + customers-customer-repricing-configs-create (-r )... [-p ]... [-o ] + customers-customer-repricing-configs-delete [-p ]... [-o ] + customers-customer-repricing-configs-get [-p ]... [-o ] + customers-customer-repricing-configs-list [-p ]... [-o ] + customers-customer-repricing-configs-patch (-r )... [-p ]... [-o ] customers-delete [-p ]... [-o ] customers-entitlements-activate (-r )... [-p ]... [-o ] customers-entitlements-cancel (-r )... [-p ]... [-o ] @@ -68,6 +78,9 @@ cloudchannel1 [options] list-transferable-skus (-r )... [-p ]... [-o ] offers-list [-p ]... [-o ] register (-r )... [-p ]... [-o ] + report-jobs-fetch-report-results (-r )... [-p ]... [-o ] + reports-list [-p ]... [-o ] + reports-run (-r )... [-p ]... [-o ] unregister (-r )... [-p ]... [-o ] operations cancel (-r )... [-p ]... [-o ] diff --git a/gen/cloudchannel1-cli/mkdocs.yml b/gen/cloudchannel1-cli/mkdocs.yml index 713a2393ce..0de756d0fa 100644 --- a/gen/cloudchannel1-cli/mkdocs.yml +++ b/gen/cloudchannel1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloudchannel v4.0.1+20220303 +site_name: Cloudchannel v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-cloudchannel1-cli site_description: A complete library to interact with Cloudchannel (protocol v1) @@ -7,53 +7,69 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudchannel1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_channel-partner-links-create.md', 'Accounts', 'Channel Partner Links Create'] -- ['accounts_channel-partner-links-customers-create.md', 'Accounts', 'Channel Partner Links Customers Create'] -- ['accounts_channel-partner-links-customers-delete.md', 'Accounts', 'Channel Partner Links Customers Delete'] -- ['accounts_channel-partner-links-customers-get.md', 'Accounts', 'Channel Partner Links Customers Get'] -- ['accounts_channel-partner-links-customers-import.md', 'Accounts', 'Channel Partner Links Customers Import'] -- ['accounts_channel-partner-links-customers-list.md', 'Accounts', 'Channel Partner Links Customers List'] -- ['accounts_channel-partner-links-customers-patch.md', 'Accounts', 'Channel Partner Links Customers Patch'] -- ['accounts_channel-partner-links-get.md', 'Accounts', 'Channel Partner Links Get'] -- ['accounts_channel-partner-links-list.md', 'Accounts', 'Channel Partner Links List'] -- ['accounts_channel-partner-links-patch.md', 'Accounts', 'Channel Partner Links Patch'] -- ['accounts_check-cloud-identity-accounts-exist.md', 'Accounts', 'Check Cloud Identity Accounts Exist'] -- ['accounts_customers-create.md', 'Accounts', 'Customers Create'] -- ['accounts_customers-delete.md', 'Accounts', 'Customers Delete'] -- ['accounts_customers-entitlements-activate.md', 'Accounts', 'Customers Entitlements Activate'] -- ['accounts_customers-entitlements-cancel.md', 'Accounts', 'Customers Entitlements Cancel'] -- ['accounts_customers-entitlements-change-offer.md', 'Accounts', 'Customers Entitlements Change Offer'] -- ['accounts_customers-entitlements-change-parameters.md', 'Accounts', 'Customers Entitlements Change Parameters'] -- ['accounts_customers-entitlements-change-renewal-settings.md', 'Accounts', 'Customers Entitlements Change Renewal Settings'] -- ['accounts_customers-entitlements-create.md', 'Accounts', 'Customers Entitlements Create'] -- ['accounts_customers-entitlements-get.md', 'Accounts', 'Customers Entitlements Get'] -- ['accounts_customers-entitlements-list.md', 'Accounts', 'Customers Entitlements List'] -- ['accounts_customers-entitlements-lookup-offer.md', 'Accounts', 'Customers Entitlements Lookup Offer'] -- ['accounts_customers-entitlements-start-paid-service.md', 'Accounts', 'Customers Entitlements Start Paid Service'] -- ['accounts_customers-entitlements-suspend.md', 'Accounts', 'Customers Entitlements Suspend'] -- ['accounts_customers-get.md', 'Accounts', 'Customers Get'] -- ['accounts_customers-import.md', 'Accounts', 'Customers Import'] -- ['accounts_customers-list.md', 'Accounts', 'Customers List'] -- ['accounts_customers-list-purchasable-offers.md', 'Accounts', 'Customers List Purchasable Offers'] -- ['accounts_customers-list-purchasable-skus.md', 'Accounts', 'Customers List Purchasable Skus'] -- ['accounts_customers-patch.md', 'Accounts', 'Customers Patch'] -- ['accounts_customers-provision-cloud-identity.md', 'Accounts', 'Customers Provision Cloud Identity'] -- ['accounts_customers-transfer-entitlements.md', 'Accounts', 'Customers Transfer Entitlements'] -- ['accounts_customers-transfer-entitlements-to-google.md', 'Accounts', 'Customers Transfer Entitlements To Google'] -- ['accounts_list-subscribers.md', 'Accounts', 'List Subscribers'] -- ['accounts_list-transferable-offers.md', 'Accounts', 'List Transferable Offers'] -- ['accounts_list-transferable-skus.md', 'Accounts', 'List Transferable Skus'] -- ['accounts_offers-list.md', 'Accounts', 'Offers List'] -- ['accounts_register.md', 'Accounts', 'Register'] -- ['accounts_unregister.md', 'Accounts', 'Unregister'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['products_list.md', 'Products', 'List'] -- ['products_skus-list.md', 'Products', 'Skus List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Channel Partner Links Channel Partner Repricing Configs Create': 'accounts_channel-partner-links-channel-partner-repricing-configs-create.md' + - 'Channel Partner Links Channel Partner Repricing Configs Delete': 'accounts_channel-partner-links-channel-partner-repricing-configs-delete.md' + - 'Channel Partner Links Channel Partner Repricing Configs Get': 'accounts_channel-partner-links-channel-partner-repricing-configs-get.md' + - 'Channel Partner Links Channel Partner Repricing Configs List': 'accounts_channel-partner-links-channel-partner-repricing-configs-list.md' + - 'Channel Partner Links Channel Partner Repricing Configs Patch': 'accounts_channel-partner-links-channel-partner-repricing-configs-patch.md' + - 'Channel Partner Links Create': 'accounts_channel-partner-links-create.md' + - 'Channel Partner Links Customers Create': 'accounts_channel-partner-links-customers-create.md' + - 'Channel Partner Links Customers Delete': 'accounts_channel-partner-links-customers-delete.md' + - 'Channel Partner Links Customers Get': 'accounts_channel-partner-links-customers-get.md' + - 'Channel Partner Links Customers Import': 'accounts_channel-partner-links-customers-import.md' + - 'Channel Partner Links Customers List': 'accounts_channel-partner-links-customers-list.md' + - 'Channel Partner Links Customers Patch': 'accounts_channel-partner-links-customers-patch.md' + - 'Channel Partner Links Get': 'accounts_channel-partner-links-get.md' + - 'Channel Partner Links List': 'accounts_channel-partner-links-list.md' + - 'Channel Partner Links Patch': 'accounts_channel-partner-links-patch.md' + - 'Check Cloud Identity Accounts Exist': 'accounts_check-cloud-identity-accounts-exist.md' + - 'Customers Create': 'accounts_customers-create.md' + - 'Customers Customer Repricing Configs Create': 'accounts_customers-customer-repricing-configs-create.md' + - 'Customers Customer Repricing Configs Delete': 'accounts_customers-customer-repricing-configs-delete.md' + - 'Customers Customer Repricing Configs Get': 'accounts_customers-customer-repricing-configs-get.md' + - 'Customers Customer Repricing Configs List': 'accounts_customers-customer-repricing-configs-list.md' + - 'Customers Customer Repricing Configs Patch': 'accounts_customers-customer-repricing-configs-patch.md' + - 'Customers Delete': 'accounts_customers-delete.md' + - 'Customers Entitlements Activate': 'accounts_customers-entitlements-activate.md' + - 'Customers Entitlements Cancel': 'accounts_customers-entitlements-cancel.md' + - 'Customers Entitlements Change Offer': 'accounts_customers-entitlements-change-offer.md' + - 'Customers Entitlements Change Parameters': 'accounts_customers-entitlements-change-parameters.md' + - 'Customers Entitlements Change Renewal Settings': 'accounts_customers-entitlements-change-renewal-settings.md' + - 'Customers Entitlements Create': 'accounts_customers-entitlements-create.md' + - 'Customers Entitlements Get': 'accounts_customers-entitlements-get.md' + - 'Customers Entitlements List': 'accounts_customers-entitlements-list.md' + - 'Customers Entitlements Lookup Offer': 'accounts_customers-entitlements-lookup-offer.md' + - 'Customers Entitlements Start Paid Service': 'accounts_customers-entitlements-start-paid-service.md' + - 'Customers Entitlements Suspend': 'accounts_customers-entitlements-suspend.md' + - 'Customers Get': 'accounts_customers-get.md' + - 'Customers Import': 'accounts_customers-import.md' + - 'Customers List': 'accounts_customers-list.md' + - 'Customers List Purchasable Offers': 'accounts_customers-list-purchasable-offers.md' + - 'Customers List Purchasable Skus': 'accounts_customers-list-purchasable-skus.md' + - 'Customers Patch': 'accounts_customers-patch.md' + - 'Customers Provision Cloud Identity': 'accounts_customers-provision-cloud-identity.md' + - 'Customers Transfer Entitlements': 'accounts_customers-transfer-entitlements.md' + - 'Customers Transfer Entitlements To Google': 'accounts_customers-transfer-entitlements-to-google.md' + - 'List Subscribers': 'accounts_list-subscribers.md' + - 'List Transferable Offers': 'accounts_list-transferable-offers.md' + - 'List Transferable Skus': 'accounts_list-transferable-skus.md' + - 'Offers List': 'accounts_offers-list.md' + - 'Register': 'accounts_register.md' + - 'Report Jobs Fetch Report Results': 'accounts_report-jobs-fetch-report-results.md' + - 'Reports List': 'accounts_reports-list.md' + - 'Reports Run': 'accounts_reports-run.md' + - 'Unregister': 'accounts_unregister.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Products': + - 'List': 'products_list.md' + - 'Skus List': 'products_skus-list.md' theme: readthedocs diff --git a/gen/cloudchannel1-cli/src/client.rs b/gen/cloudchannel1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudchannel1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudchannel1-cli/src/main.rs b/gen/cloudchannel1-cli/src/main.rs index 0dadbf44fc..0c86a12072 100644 --- a/gen/cloudchannel1-cli/src/main.rs +++ b/gen/cloudchannel1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudchannel1::{api, Error, oauth2}; +use google_cloudchannel1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,356 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _accounts_channel_partner_links_channel_partner_repricing_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.adjustment.percentage-adjustment.percentage.value" => Some(("repricingConfig.adjustment.percentageAdjustment.percentage.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.day" => Some(("repricingConfig.effectiveInvoiceMonth.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.month" => Some(("repricingConfig.effectiveInvoiceMonth.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.year" => Some(("repricingConfig.effectiveInvoiceMonth.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.entitlement-granularity.entitlement" => Some(("repricingConfig.entitlementGranularity.entitlement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.rebilling-basis" => Some(("repricingConfig.rebillingBasis", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["adjustment", "day", "effective-invoice-month", "entitlement", "entitlement-granularity", "month", "name", "percentage", "percentage-adjustment", "rebilling-basis", "repricing-config", "update-time", "value", "year"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudChannelV1ChannelPartnerRepricingConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().channel_partner_links_channel_partner_repricing_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_channel_partner_links_channel_partner_repricing_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().channel_partner_links_channel_partner_repricing_configs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_channel_partner_links_channel_partner_repricing_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().channel_partner_links_channel_partner_repricing_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_channel_partner_links_channel_partner_repricing_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().channel_partner_links_channel_partner_repricing_configs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_channel_partner_links_channel_partner_repricing_configs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.adjustment.percentage-adjustment.percentage.value" => Some(("repricingConfig.adjustment.percentageAdjustment.percentage.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.day" => Some(("repricingConfig.effectiveInvoiceMonth.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.month" => Some(("repricingConfig.effectiveInvoiceMonth.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.year" => Some(("repricingConfig.effectiveInvoiceMonth.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.entitlement-granularity.entitlement" => Some(("repricingConfig.entitlementGranularity.entitlement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.rebilling-basis" => Some(("repricingConfig.rebillingBasis", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["adjustment", "day", "effective-invoice-month", "entitlement", "entitlement-granularity", "month", "name", "percentage", "percentage-adjustment", "rebilling-basis", "repricing-config", "update-time", "value", "year"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudChannelV1ChannelPartnerRepricingConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().channel_partner_links_channel_partner_repricing_configs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_channel_partner_links_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -476,7 +825,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -603,7 +952,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -721,7 +1070,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1077,6 +1426,356 @@ where } } + async fn _accounts_customers_customer_repricing_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.adjustment.percentage-adjustment.percentage.value" => Some(("repricingConfig.adjustment.percentageAdjustment.percentage.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.day" => Some(("repricingConfig.effectiveInvoiceMonth.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.month" => Some(("repricingConfig.effectiveInvoiceMonth.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.year" => Some(("repricingConfig.effectiveInvoiceMonth.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.entitlement-granularity.entitlement" => Some(("repricingConfig.entitlementGranularity.entitlement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.rebilling-basis" => Some(("repricingConfig.rebillingBasis", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["adjustment", "day", "effective-invoice-month", "entitlement", "entitlement-granularity", "month", "name", "percentage", "percentage-adjustment", "rebilling-basis", "repricing-config", "update-time", "value", "year"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudChannelV1CustomerRepricingConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().customers_customer_repricing_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_customers_customer_repricing_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().customers_customer_repricing_configs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_customers_customer_repricing_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().customers_customer_repricing_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_customers_customer_repricing_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().customers_customer_repricing_configs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_customers_customer_repricing_configs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.adjustment.percentage-adjustment.percentage.value" => Some(("repricingConfig.adjustment.percentageAdjustment.percentage.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.day" => Some(("repricingConfig.effectiveInvoiceMonth.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.month" => Some(("repricingConfig.effectiveInvoiceMonth.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.effective-invoice-month.year" => Some(("repricingConfig.effectiveInvoiceMonth.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "repricing-config.entitlement-granularity.entitlement" => Some(("repricingConfig.entitlementGranularity.entitlement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "repricing-config.rebilling-basis" => Some(("repricingConfig.rebillingBasis", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["adjustment", "day", "effective-invoice-month", "entitlement", "entitlement-granularity", "month", "name", "percentage", "percentage-adjustment", "rebilling-basis", "repricing-config", "update-time", "value", "year"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudChannelV1CustomerRepricingConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().customers_customer_repricing_configs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_customers_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().customers_delete(opt.value_of("name").unwrap_or("")); @@ -1729,7 +2428,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2152,7 +2851,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2214,7 +2913,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -2285,7 +2984,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -2421,7 +3120,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2749,7 +3448,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2988,7 +3687,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3128,6 +3827,266 @@ where } } + async fn _accounts_report_jobs_fetch_report_results(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["page-size", "page-token"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudChannelV1FetchReportResultsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().report_jobs_fetch_report_results(request, opt.value_of("report-job").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_reports_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().reports_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "language-code" => { + call = call.language_code(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["language-code", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_reports_run(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "date-range.invoice-end-date.day" => Some(("dateRange.invoiceEndDate.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.invoice-end-date.month" => Some(("dateRange.invoiceEndDate.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.invoice-end-date.year" => Some(("dateRange.invoiceEndDate.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.invoice-start-date.day" => Some(("dateRange.invoiceStartDate.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.invoice-start-date.month" => Some(("dateRange.invoiceStartDate.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.invoice-start-date.year" => Some(("dateRange.invoiceStartDate.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.day" => Some(("dateRange.usageEndDateTime.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.hours" => Some(("dateRange.usageEndDateTime.hours", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.minutes" => Some(("dateRange.usageEndDateTime.minutes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.month" => Some(("dateRange.usageEndDateTime.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.nanos" => Some(("dateRange.usageEndDateTime.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.seconds" => Some(("dateRange.usageEndDateTime.seconds", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.time-zone.id" => Some(("dateRange.usageEndDateTime.timeZone.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.time-zone.version" => Some(("dateRange.usageEndDateTime.timeZone.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.utc-offset" => Some(("dateRange.usageEndDateTime.utcOffset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "date-range.usage-end-date-time.year" => Some(("dateRange.usageEndDateTime.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.day" => Some(("dateRange.usageStartDateTime.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.hours" => Some(("dateRange.usageStartDateTime.hours", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.minutes" => Some(("dateRange.usageStartDateTime.minutes", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.month" => Some(("dateRange.usageStartDateTime.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.nanos" => Some(("dateRange.usageStartDateTime.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.seconds" => Some(("dateRange.usageStartDateTime.seconds", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.time-zone.id" => Some(("dateRange.usageStartDateTime.timeZone.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.time-zone.version" => Some(("dateRange.usageStartDateTime.timeZone.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.utc-offset" => Some(("dateRange.usageStartDateTime.utcOffset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "date-range.usage-start-date-time.year" => Some(("dateRange.usageStartDateTime.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "language-code" => Some(("languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["date-range", "day", "filter", "hours", "id", "invoice-end-date", "invoice-start-date", "language-code", "minutes", "month", "nanos", "seconds", "time-zone", "usage-end-date-time", "usage-start-date-time", "utc-offset", "version", "year"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudChannelV1RunReportJobRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().reports_run(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_unregister(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3411,7 +4370,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3473,7 +4432,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3538,7 +4497,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3600,6 +4559,21 @@ where match self.opt.subcommand() { ("accounts", Some(opt)) => { match opt.subcommand() { + ("channel-partner-links-channel-partner-repricing-configs-create", Some(opt)) => { + call_result = self._accounts_channel_partner_links_channel_partner_repricing_configs_create(opt, dry_run, &mut err).await; + }, + ("channel-partner-links-channel-partner-repricing-configs-delete", Some(opt)) => { + call_result = self._accounts_channel_partner_links_channel_partner_repricing_configs_delete(opt, dry_run, &mut err).await; + }, + ("channel-partner-links-channel-partner-repricing-configs-get", Some(opt)) => { + call_result = self._accounts_channel_partner_links_channel_partner_repricing_configs_get(opt, dry_run, &mut err).await; + }, + ("channel-partner-links-channel-partner-repricing-configs-list", Some(opt)) => { + call_result = self._accounts_channel_partner_links_channel_partner_repricing_configs_list(opt, dry_run, &mut err).await; + }, + ("channel-partner-links-channel-partner-repricing-configs-patch", Some(opt)) => { + call_result = self._accounts_channel_partner_links_channel_partner_repricing_configs_patch(opt, dry_run, &mut err).await; + }, ("channel-partner-links-create", Some(opt)) => { call_result = self._accounts_channel_partner_links_create(opt, dry_run, &mut err).await; }, @@ -3636,6 +4610,21 @@ where ("customers-create", Some(opt)) => { call_result = self._accounts_customers_create(opt, dry_run, &mut err).await; }, + ("customers-customer-repricing-configs-create", Some(opt)) => { + call_result = self._accounts_customers_customer_repricing_configs_create(opt, dry_run, &mut err).await; + }, + ("customers-customer-repricing-configs-delete", Some(opt)) => { + call_result = self._accounts_customers_customer_repricing_configs_delete(opt, dry_run, &mut err).await; + }, + ("customers-customer-repricing-configs-get", Some(opt)) => { + call_result = self._accounts_customers_customer_repricing_configs_get(opt, dry_run, &mut err).await; + }, + ("customers-customer-repricing-configs-list", Some(opt)) => { + call_result = self._accounts_customers_customer_repricing_configs_list(opt, dry_run, &mut err).await; + }, + ("customers-customer-repricing-configs-patch", Some(opt)) => { + call_result = self._accounts_customers_customer_repricing_configs_patch(opt, dry_run, &mut err).await; + }, ("customers-delete", Some(opt)) => { call_result = self._accounts_customers_delete(opt, dry_run, &mut err).await; }, @@ -3714,6 +4703,15 @@ where ("register", Some(opt)) => { call_result = self._accounts_register(opt, dry_run, &mut err).await; }, + ("report-jobs-fetch-report-results", Some(opt)) => { + call_result = self._accounts_report_jobs_fetch_report_results(opt, dry_run, &mut err).await; + }, + ("reports-list", Some(opt)) => { + call_result = self._accounts_reports_list(opt, dry_run, &mut err).await; + }, + ("reports-run", Some(opt)) => { + call_result = self._accounts_reports_run(opt, dry_run, &mut err).await; + }, ("unregister", Some(opt)) => { call_result = self._accounts_unregister(opt, dry_run, &mut err).await; }, @@ -3830,7 +4828,129 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("accounts", "methods: 'channel-partner-links-create', 'channel-partner-links-customers-create', 'channel-partner-links-customers-delete', 'channel-partner-links-customers-get', 'channel-partner-links-customers-import', 'channel-partner-links-customers-list', 'channel-partner-links-customers-patch', 'channel-partner-links-get', 'channel-partner-links-list', 'channel-partner-links-patch', 'check-cloud-identity-accounts-exist', 'customers-create', 'customers-delete', 'customers-entitlements-activate', 'customers-entitlements-cancel', 'customers-entitlements-change-offer', 'customers-entitlements-change-parameters', 'customers-entitlements-change-renewal-settings', 'customers-entitlements-create', 'customers-entitlements-get', 'customers-entitlements-list', 'customers-entitlements-lookup-offer', 'customers-entitlements-start-paid-service', 'customers-entitlements-suspend', 'customers-get', 'customers-import', 'customers-list', 'customers-list-purchasable-offers', 'customers-list-purchasable-skus', 'customers-patch', 'customers-provision-cloud-identity', 'customers-transfer-entitlements', 'customers-transfer-entitlements-to-google', 'list-subscribers', 'list-transferable-offers', 'list-transferable-skus', 'offers-list', 'register' and 'unregister'", vec![ + ("accounts", "methods: 'channel-partner-links-channel-partner-repricing-configs-create', 'channel-partner-links-channel-partner-repricing-configs-delete', 'channel-partner-links-channel-partner-repricing-configs-get', 'channel-partner-links-channel-partner-repricing-configs-list', 'channel-partner-links-channel-partner-repricing-configs-patch', 'channel-partner-links-create', 'channel-partner-links-customers-create', 'channel-partner-links-customers-delete', 'channel-partner-links-customers-get', 'channel-partner-links-customers-import', 'channel-partner-links-customers-list', 'channel-partner-links-customers-patch', 'channel-partner-links-get', 'channel-partner-links-list', 'channel-partner-links-patch', 'check-cloud-identity-accounts-exist', 'customers-create', 'customers-customer-repricing-configs-create', 'customers-customer-repricing-configs-delete', 'customers-customer-repricing-configs-get', 'customers-customer-repricing-configs-list', 'customers-customer-repricing-configs-patch', 'customers-delete', 'customers-entitlements-activate', 'customers-entitlements-cancel', 'customers-entitlements-change-offer', 'customers-entitlements-change-parameters', 'customers-entitlements-change-renewal-settings', 'customers-entitlements-create', 'customers-entitlements-get', 'customers-entitlements-list', 'customers-entitlements-lookup-offer', 'customers-entitlements-start-paid-service', 'customers-entitlements-suspend', 'customers-get', 'customers-import', 'customers-list', 'customers-list-purchasable-offers', 'customers-list-purchasable-skus', 'customers-patch', 'customers-provision-cloud-identity', 'customers-transfer-entitlements', 'customers-transfer-entitlements-to-google', 'list-subscribers', 'list-transferable-offers', 'list-transferable-skus', 'offers-list', 'register', 'report-jobs-fetch-report-results', 'reports-list', 'reports-run' and 'unregister'", vec![ + ("channel-partner-links-channel-partner-repricing-configs-create", + Some(r##"Creates a ChannelPartnerRepricingConfig. Call this method to set modifications for a specific ChannelPartner's bill. You can only create configs if the RepricingConfig.effective_invoice_month is a future month. If needed, you can create a config for the current month, with some restrictions. When creating a config for a future month, make sure there are no existing configs for that RepricingConfig.effective_invoice_month. The following restrictions are for creating configs in the current month. * This functionality is reserved for recovering from an erroneous config, and should not be used for regular business cases. * The new config will not modify exports used with other configs. Changes to the config may be immediate, but may take up to 24 hours. * There is a limit of ten configs for any ChannelPartner or RepricingConfig.effective_invoice_month. * The contained ChannelPartnerRepricingConfig.repricing_config vaule must be different from the value used in the current config for a ChannelPartner. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * INVALID_ARGUMENT: Missing or invalid required parameters in the request. Also displays if the updated config is for the current month or past months. * NOT_FOUND: The ChannelPartnerRepricingConfig specified does not exist or is not associated with the given account. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the updated ChannelPartnerRepricingConfig resource, otherwise returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_channel-partner-links-channel-partner-repricing-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the ChannelPartner that will receive the repricing config. Parent uses the format: accounts/{account_id}/channelPartnerLinks/{channel_partner_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("channel-partner-links-channel-partner-repricing-configs-delete", + Some(r##"Deletes the given ChannelPartnerRepricingConfig permanently. You can only delete configs if their RepricingConfig.effective_invoice_month is set to a date after the current month. Possible error codes: * PERMISSION_DENIED: The account making the request does not own this customer. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * FAILED_PRECONDITION: The ChannelPartnerRepricingConfig is active or in the past. * NOT_FOUND: No ChannelPartnerRepricingConfig found for the name in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_channel-partner-links-channel-partner-repricing-configs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the channel partner repricing config rule to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("channel-partner-links-channel-partner-repricing-configs-get", + Some(r##"Gets information about how a Distributor modifies their bill before sending it to a ChannelPartner. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * NOT_FOUND: The ChannelPartnerRepricingConfig was not found. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the ChannelPartnerRepricingConfig resource, otherwise returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_channel-partner-links-channel-partner-repricing-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the ChannelPartnerRepricingConfig Format: accounts/{account_id}/channelPartnerLinks/{channel_partner_id}/channelPartnerRepricingConfigs/{id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("channel-partner-links-channel-partner-repricing-configs-list", + Some(r##"Lists information about how a Reseller modifies their bill before sending it to a ChannelPartner. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * NOT_FOUND: The ChannelPartnerRepricingConfig specified does not exist or is not associated with the given account. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the ChannelPartnerRepricingConfig resources. The data for each resource is displayed in the ascending order of: * channel partner ID * RepricingConfig.effective_invoice_month * ChannelPartnerRepricingConfig.update_time If unsuccessful, returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_channel-partner-links-channel-partner-repricing-configs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the account's ChannelPartnerLink. Parent uses the format: accounts/{account_id}/channelPartnerLinks/{channel_partner_id}. Supports accounts/{account_id}/channelPartnerLinks/- to retrieve configs for all channel partners."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("channel-partner-links-channel-partner-repricing-configs-patch", + Some(r##"Updates a ChannelPartnerRepricingConfig. Call this method to set modifications for a specific ChannelPartner's bill. This method overwrites the existing CustomerRepricingConfig. You can only update configs if the RepricingConfig.effective_invoice_month is a future month. To make changes to configs for the current month, use CreateChannelPartnerRepricingConfig, taking note of its restrictions. You cannot update the RepricingConfig.effective_invoice_month. When updating a config in the future: * This config must already exist. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * INVALID_ARGUMENT: Missing or invalid required parameters in the request. Also displays if the updated config is for the current month or past months. * NOT_FOUND: The ChannelPartnerRepricingConfig specified does not exist or is not associated with the given account. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the updated ChannelPartnerRepricingConfig resource, otherwise returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_channel-partner-links-channel-partner-repricing-configs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. Resource name of the ChannelPartnerRepricingConfig. Format: accounts/{account_id}/channelPartnerLinks/{channel_partner_id}/channelPartnerRepricingConfigs/{id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("channel-partner-links-create", Some(r##"Initiates a channel partner link between a distributor and a reseller, or between resellers in an n-tier reseller channel. Invited partners need to follow the invite_link_uri provided in the response to accept. After accepting the invitation, a link is set up between the two parties. You must be a distributor to call this method. Possible error codes: * PERMISSION_DENIED: The reseller account making the request is different from the reseller account in the API request. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * ALREADY_EXISTS: The ChannelPartnerLink sent in the request already exists. * NOT_FOUND: No Cloud Identity customer exists for provided domain. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The new ChannelPartnerLink resource."##), "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_channel-partner-links-create", @@ -4131,6 +5251,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("customers-customer-repricing-configs-create", + Some(r##"Creates a CustomerRepricingConfig. Call this method to set modifications for a specific customer's bill. You can only create configs if the RepricingConfig.effective_invoice_month is a future month. If needed, you can create a config for the current month, with some restrictions. When creating a config for a future month, make sure there are no existing configs for that RepricingConfig.effective_invoice_month. The following restrictions are for creating configs in the current month. * This functionality is reserved for recovering from an erroneous config, and should not be used for regular business cases. * The new config will not modify exports used with other configs. Changes to the config may be immediate, but may take up to 24 hours. * There is a limit of ten configs for any RepricingConfig.EntitlementGranularity.entitlement or RepricingConfig.effective_invoice_month. * The contained CustomerRepricingConfig.repricing_config vaule must be different from the value used in the current config for a RepricingConfig.EntitlementGranularity.entitlement. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * INVALID_ARGUMENT: Missing or invalid required parameters in the request. Also displays if the updated config is for the current month or past months. * NOT_FOUND: The CustomerRepricingConfig specified does not exist or is not associated with the given account. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the updated CustomerRepricingConfig resource, otherwise returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_customers-customer-repricing-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the customer that will receive this repricing config. Parent uses the format: accounts/{account_id}/customers/{customer_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("customers-customer-repricing-configs-delete", + Some(r##"Deletes the given CustomerRepricingConfig permanently. You can only delete configs if their RepricingConfig.effective_invoice_month is set to a date after the current month. Possible error codes: * PERMISSION_DENIED: The account making the request does not own this customer. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * FAILED_PRECONDITION: The CustomerRepricingConfig is active or in the past. * NOT_FOUND: No CustomerRepricingConfig found for the name in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_customers-customer-repricing-configs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the customer repricing config rule to delete. Format: accounts/{account_id}/customers/{customer_id}/customerRepricingConfigs/{id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("customers-customer-repricing-configs-get", + Some(r##"Gets information about how a Reseller modifies their bill before sending it to a Customer. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * NOT_FOUND: The CustomerRepricingConfig was not found. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the CustomerRepricingConfig resource, otherwise returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_customers-customer-repricing-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the CustomerRepricingConfig. Format: accounts/{account_id}/customers/{customer_id}/customerRepricingConfigs/{id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("customers-customer-repricing-configs-list", + Some(r##"Lists information about how a Reseller modifies their bill before sending it to a Customer. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * NOT_FOUND: The CustomerRepricingConfig specified does not exist or is not associated with the given account. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the CustomerRepricingConfig resources. The data for each resource is displayed in the ascending order of: * customer ID * RepricingConfig.EntitlementGranularity.entitlement * RepricingConfig.effective_invoice_month * CustomerRepricingConfig.update_time If unsuccessful, returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_customers-customer-repricing-configs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the customer. Parent uses the format: accounts/{account_id}/customers/{customer_id}. Supports accounts/{account_id}/customers/- to retrieve configs for all customers."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("customers-customer-repricing-configs-patch", + Some(r##"Updates a CustomerRepricingConfig. Call this method to set modifications for a specific customer's bill. This method overwrites the existing CustomerRepricingConfig. You can only update configs if the RepricingConfig.effective_invoice_month is a future month. To make changes to configs for the current month, use CreateCustomerRepricingConfig, taking note of its restrictions. You cannot update the RepricingConfig.effective_invoice_month. When updating a config in the future: * This config must already exist. Possible Error Codes: * PERMISSION_DENIED: If the account making the request and the account being queried are different. * INVALID_ARGUMENT: Missing or invalid required parameters in the request. Also displays if the updated config is for the current month or past months. * NOT_FOUND: The CustomerRepricingConfig specified does not exist or is not associated with the given account. * INTERNAL: Any non-user error related to technical issues in the backend. In this case, contact Cloud Channel support. Return Value: If successful, the updated CustomerRepricingConfig resource, otherwise returns an error."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_customers-customer-repricing-configs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. Resource name of the CustomerRepricingConfig. Format: accounts/{account_id}/customers/{customer_id}/customerRepricingConfigs/{id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4700,7 +5942,7 @@ async fn main() { Some(false)), ]), ("list-transferable-offers", - Some(r##"List TransferableOffers of a customer based on Cloud Identity ID or Customer Name in the request. Use this method when a reseller gets the entitlement information of an unowned customer. The reseller should provide the customer's Cloud Identity ID or Customer Name. Possible error codes: * PERMISSION_DENIED: * The customer doesn't belong to the reseller and has no auth token. * The supplied auth token is invalid. * The reseller account making the request is different from the reseller account in the query. * INVALID_ARGUMENT: Required request parameters are missing or invalid. Return value: List of TransferableOffer for the given customer and SKU."##), + Some(r##"List TransferableOffers of a customer based on Cloud Identity ID or Customer Name in the request. Use this method when a reseller gets the entitlement information of an unowned customer. The reseller should provide the customer's Cloud Identity ID or Customer Name. Possible error codes: * PERMISSION_DENIED: * The customer doesn't belong to the reseller and has no auth token. * The customer provided incorrect reseller information when generating auth token. * The reseller account making the request is different from the reseller account in the query. * INVALID_ARGUMENT: Required request parameters are missing or invalid. Return value: List of TransferableOffer for the given customer and SKU."##), "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_list-transferable-offers", vec![ (Some(r##"parent"##), @@ -4799,6 +6041,84 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("report-jobs-fetch-report-results", + Some(r##"Retrieves data generated by CloudChannelReportsService.RunReportJob."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_report-jobs-fetch-report-results", + vec![ + (Some(r##"report-job"##), + None, + Some(r##"Required. The report job created by CloudChannelReportsService.RunReportJob. Report_job uses the format: accounts/{account_id}/reportJobs/{report_job_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("reports-list", + Some(r##"Lists the reports that RunReportJob can run. These reports include an ID, a description, and the list of columns that will be in the result."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_reports-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the partner account to list available reports for. Parent uses the format: accounts/{account_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("reports-run", + Some(r##"Begins generation of data for a given report. The report identifier is a UID (for example, `613bf59q`). Possible error codes: * PERMISSION_DENIED: The user doesn't have access to this report. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The report identifier was not found. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The ID of a long-running operation. To get the results of the operation, call the GetOperation method of CloudChannelOperationsService. The Operation metadata contains an instance of OperationMetadata. To get the results of report generation, call CloudChannelReportsService.FetchReportResults with the RunReportJobResponse.report_job."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli/accounts_reports-run", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The report's resource name. Specifies the account and report used to generate report data. The report_id identifier is a UID (for example, `613bf59q`). Name uses the format: accounts/{account_id}/reports/{report_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4977,7 +6297,7 @@ async fn main() { let mut app = App::new("cloudchannel1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230123") .about("The Cloud Channel API enables Google Cloud partners to have a single unified resale platform and APIs across all of Google Cloud including GCP, Workspace, Maps and Chrome.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudchannel1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudchannel1/Cargo.toml b/gen/cloudchannel1/Cargo.toml index 7663185cc0..6a199ad235 100644 --- a/gen/cloudchannel1/Cargo.toml +++ b/gen/cloudchannel1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudchannel1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloudchannel (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudchannel1" homepage = "https://cloud.google.com/channel" -documentation = "https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-cloudchannel1/5.0.2+20230123" license = "MIT" keywords = ["cloudchannel", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudchannel1/README.md b/gen/cloudchannel1/README.md index 3fd7c65e42..0b1999121d 100644 --- a/gen/cloudchannel1/README.md +++ b/gen/cloudchannel1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-cloudchannel1` library allows access to all features of the *Google Cloudchannel* service. -This documentation was generated from *Cloudchannel* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *cloudchannel:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloudchannel* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *cloudchannel:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloudchannel* *v1* API can be found at the [official documentation site](https://cloud.google.com/channel). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/Cloudchannel) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/Cloudchannel) ... * accounts - * [*channel partner links channel partner repricing configs create*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigCreateCall), [*channel partner links channel partner repricing configs delete*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigDeleteCall), [*channel partner links channel partner repricing configs get*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigGetCall), [*channel partner links channel partner repricing configs list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigListCall), [*channel partner links channel partner repricing configs patch*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigPatchCall), [*channel partner links create*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCreateCall), [*channel partner links customers create*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerCreateCall), [*channel partner links customers delete*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerDeleteCall), [*channel partner links customers get*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerGetCall), [*channel partner links customers import*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerImportCall), [*channel partner links customers list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerListCall), [*channel partner links customers patch*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerPatchCall), [*channel partner links get*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkGetCall), [*channel partner links list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkListCall), [*channel partner links patch*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkPatchCall), [*check cloud identity accounts exist*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCheckCloudIdentityAccountsExistCall), [*customers create*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerCreateCall), [*customers customer repricing configs create*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigCreateCall), [*customers customer repricing configs delete*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigDeleteCall), [*customers customer repricing configs get*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigGetCall), [*customers customer repricing configs list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigListCall), [*customers customer repricing configs patch*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigPatchCall), [*customers delete*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerDeleteCall), [*customers entitlements activate*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementActivateCall), [*customers entitlements cancel*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementCancelCall), [*customers entitlements change offer*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementChangeOfferCall), [*customers entitlements change parameters*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementChangeParameterCall), [*customers entitlements change renewal settings*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementChangeRenewalSettingCall), [*customers entitlements create*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementCreateCall), [*customers entitlements get*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementGetCall), [*customers entitlements list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementListCall), [*customers entitlements lookup offer*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementLookupOfferCall), [*customers entitlements start paid service*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementStartPaidServiceCall), [*customers entitlements suspend*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerEntitlementSuspendCall), [*customers get*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerGetCall), [*customers import*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerImportCall), [*customers list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerListCall), [*customers list purchasable offers*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerListPurchasableOfferCall), [*customers list purchasable skus*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerListPurchasableSkuCall), [*customers patch*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerPatchCall), [*customers provision cloud identity*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerProvisionCloudIdentityCall), [*customers transfer entitlements*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerTransferEntitlementCall), [*customers transfer entitlements to google*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountCustomerTransferEntitlementsToGoogleCall), [*list subscribers*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountListSubscriberCall), [*list transferable offers*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountListTransferableOfferCall), [*list transferable skus*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountListTransferableSkuCall), [*offers list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountOfferListCall), [*register*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountRegisterCall), [*report jobs fetch report results*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountReportJobFetchReportResultCall), [*reports list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountReportListCall), [*reports run*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountReportRunCall) and [*unregister*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::AccountUnregisterCall) + * [*channel partner links channel partner repricing configs create*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigCreateCall), [*channel partner links channel partner repricing configs delete*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigDeleteCall), [*channel partner links channel partner repricing configs get*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigGetCall), [*channel partner links channel partner repricing configs list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigListCall), [*channel partner links channel partner repricing configs patch*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkChannelPartnerRepricingConfigPatchCall), [*channel partner links create*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCreateCall), [*channel partner links customers create*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerCreateCall), [*channel partner links customers delete*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerDeleteCall), [*channel partner links customers get*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerGetCall), [*channel partner links customers import*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerImportCall), [*channel partner links customers list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerListCall), [*channel partner links customers patch*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkCustomerPatchCall), [*channel partner links get*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkGetCall), [*channel partner links list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkListCall), [*channel partner links patch*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountChannelPartnerLinkPatchCall), [*check cloud identity accounts exist*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCheckCloudIdentityAccountsExistCall), [*customers create*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerCreateCall), [*customers customer repricing configs create*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigCreateCall), [*customers customer repricing configs delete*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigDeleteCall), [*customers customer repricing configs get*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigGetCall), [*customers customer repricing configs list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigListCall), [*customers customer repricing configs patch*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerCustomerRepricingConfigPatchCall), [*customers delete*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerDeleteCall), [*customers entitlements activate*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementActivateCall), [*customers entitlements cancel*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementCancelCall), [*customers entitlements change offer*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementChangeOfferCall), [*customers entitlements change parameters*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementChangeParameterCall), [*customers entitlements change renewal settings*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementChangeRenewalSettingCall), [*customers entitlements create*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementCreateCall), [*customers entitlements get*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementGetCall), [*customers entitlements list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementListCall), [*customers entitlements lookup offer*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementLookupOfferCall), [*customers entitlements start paid service*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementStartPaidServiceCall), [*customers entitlements suspend*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerEntitlementSuspendCall), [*customers get*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerGetCall), [*customers import*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerImportCall), [*customers list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerListCall), [*customers list purchasable offers*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerListPurchasableOfferCall), [*customers list purchasable skus*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerListPurchasableSkuCall), [*customers patch*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerPatchCall), [*customers provision cloud identity*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerProvisionCloudIdentityCall), [*customers transfer entitlements*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerTransferEntitlementCall), [*customers transfer entitlements to google*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountCustomerTransferEntitlementsToGoogleCall), [*list subscribers*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountListSubscriberCall), [*list transferable offers*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountListTransferableOfferCall), [*list transferable skus*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountListTransferableSkuCall), [*offers list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountOfferListCall), [*register*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountRegisterCall), [*report jobs fetch report results*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountReportJobFetchReportResultCall), [*reports list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountReportListCall), [*reports run*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountReportRunCall) and [*unregister*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::AccountUnregisterCall) * operations - * [*cancel*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::OperationCancelCall), [*delete*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::OperationDeleteCall), [*get*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::OperationListCall) + * [*cancel*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::OperationCancelCall), [*delete*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::OperationDeleteCall), [*get*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::OperationListCall) * products - * [*list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::ProductListCall) and [*skus list*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/api::ProductSkuListCall) + * [*list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::ProductListCall) and [*skus list*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/api::ProductSkuListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/Cloudchannel)** +* **[Hub](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/Cloudchannel)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Part)** + * **[Parts](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -140,17 +140,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -160,29 +160,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudchannel1/5.0.2-beta-1+20230123/google_cloudchannel1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudchannel1/5.0.2+20230123/google_cloudchannel1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudchannel1/src/api.rs b/gen/cloudchannel1/src/api.rs index e89592b217..c7c80b0e5a 100644 --- a/gen/cloudchannel1/src/api.rs +++ b/gen/cloudchannel1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Cloudchannel { Cloudchannel { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudchannel.googleapis.com/".to_string(), _root_url: "https://cloudchannel.googleapis.com/".to_string(), } @@ -146,7 +146,7 @@ impl<'a, S> Cloudchannel { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudchannel1/src/client.rs b/gen/cloudchannel1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudchannel1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudchannel1/src/lib.rs b/gen/cloudchannel1/src/lib.rs index 61a7455b5a..a99e99be79 100644 --- a/gen/cloudchannel1/src/lib.rs +++ b/gen/cloudchannel1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloudchannel* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *cloudchannel:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloudchannel* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *cloudchannel:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloudchannel* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/channel). diff --git a/gen/clouddebugger2-cli/Cargo.toml b/gen/clouddebugger2-cli/Cargo.toml index 01612181fd..4a05799b06 100644 --- a/gen/clouddebugger2-cli/Cargo.toml +++ b/gen/clouddebugger2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-clouddebugger2-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Debugger (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouddebugger2-cli" @@ -20,13 +20,13 @@ name = "clouddebugger2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-clouddebugger2] path = "../clouddebugger2" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/clouddebugger2-cli/README.md b/gen/clouddebugger2-cli/README.md index 4825e2aae8..23216e3fdf 100644 --- a/gen/clouddebugger2-cli/README.md +++ b/gen/clouddebugger2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Debugger* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Debugger* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash clouddebugger2 [options] diff --git a/gen/clouddebugger2-cli/mkdocs.yml b/gen/clouddebugger2-cli/mkdocs.yml index 62204cda9a..3cdbbebba1 100644 --- a/gen/clouddebugger2-cli/mkdocs.yml +++ b/gen/clouddebugger2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Debugger v4.0.1+20220225 +site_name: Cloud Debugger v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-clouddebugger2-cli site_description: A complete library to interact with Cloud Debugger (protocol v2) @@ -7,16 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/clouddebugger2-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['controller_debuggees-breakpoints-list.md', 'Controller', 'Debuggees Breakpoints List'] -- ['controller_debuggees-breakpoints-update.md', 'Controller', 'Debuggees Breakpoints Update'] -- ['controller_debuggees-register.md', 'Controller', 'Debuggees Register'] -- ['debugger_debuggees-breakpoints-delete.md', 'Debugger', 'Debuggees Breakpoints Delete'] -- ['debugger_debuggees-breakpoints-get.md', 'Debugger', 'Debuggees Breakpoints Get'] -- ['debugger_debuggees-breakpoints-list.md', 'Debugger', 'Debuggees Breakpoints List'] -- ['debugger_debuggees-breakpoints-set.md', 'Debugger', 'Debuggees Breakpoints Set'] -- ['debugger_debuggees-list.md', 'Debugger', 'Debuggees List'] +nav: +- Home: 'index.md' +- 'Controller': + - 'Debuggees Breakpoints List': 'controller_debuggees-breakpoints-list.md' + - 'Debuggees Breakpoints Update': 'controller_debuggees-breakpoints-update.md' + - 'Debuggees Register': 'controller_debuggees-register.md' +- 'Debugger': + - 'Debuggees Breakpoints Delete': 'debugger_debuggees-breakpoints-delete.md' + - 'Debuggees Breakpoints Get': 'debugger_debuggees-breakpoints-get.md' + - 'Debuggees Breakpoints List': 'debugger_debuggees-breakpoints-list.md' + - 'Debuggees Breakpoints Set': 'debugger_debuggees-breakpoints-set.md' + - 'Debuggees List': 'debugger_debuggees-list.md' theme: readthedocs diff --git a/gen/clouddebugger2-cli/src/client.rs b/gen/clouddebugger2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/clouddebugger2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/clouddebugger2-cli/src/main.rs b/gen/clouddebugger2-cli/src/main.rs index 5457bdabea..e44697ce8d 100644 --- a/gen/clouddebugger2-cli/src/main.rs +++ b/gen/clouddebugger2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_clouddebugger2::{api, Error, oauth2}; +use google_clouddebugger2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.wait_token(value.unwrap_or("")); }, "success-on-timeout" => { - call = call.success_on_timeout(arg_from_str(value.unwrap_or("false"), err, "success-on-timeout", "boolean")); + call = call.success_on_timeout( value.map(|v| arg_from_str(v, err, "success-on-timeout", "boolean")).unwrap_or(false)); }, "agent-id" => { call = call.agent_id(value.unwrap_or("")); @@ -436,13 +435,13 @@ where call = call.wait_token(value.unwrap_or("")); }, "strip-results" => { - call = call.strip_results(arg_from_str(value.unwrap_or("false"), err, "strip-results", "boolean")); + call = call.strip_results( value.map(|v| arg_from_str(v, err, "strip-results", "boolean")).unwrap_or(false)); }, "include-inactive" => { - call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); + call = call.include_inactive( value.map(|v| arg_from_str(v, err, "include-inactive", "boolean")).unwrap_or(false)); }, "include-all-users" => { - call = call.include_all_users(arg_from_str(value.unwrap_or("false"), err, "include-all-users", "boolean")); + call = call.include_all_users( value.map(|v| arg_from_str(v, err, "include-all-users", "boolean")).unwrap_or(false)); }, "client-version" => { call = call.client_version(value.unwrap_or("")); @@ -618,7 +617,7 @@ where call = call.project(value.unwrap_or("")); }, "include-inactive" => { - call = call.include_inactive(arg_from_str(value.unwrap_or("false"), err, "include-inactive", "boolean")); + call = call.include_inactive( value.map(|v| arg_from_str(v, err, "include-inactive", "boolean")).unwrap_or(false)); }, "client-version" => { call = call.client_version(value.unwrap_or("")); @@ -998,7 +997,7 @@ async fn main() { let mut app = App::new("clouddebugger2") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("Examines the call stack and variables of a running application without stopping or slowing it down. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_clouddebugger2_cli") .arg(Arg::with_name("url") diff --git a/gen/clouddebugger2/Cargo.toml b/gen/clouddebugger2/Cargo.toml index 8a935b88d5..783c6a3d2c 100644 --- a/gen/clouddebugger2/Cargo.toml +++ b/gen/clouddebugger2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-clouddebugger2" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Debugger (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouddebugger2" homepage = "https://cloud.google.com/debugger" -documentation = "https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-clouddebugger2/5.0.2+20230113" license = "MIT" keywords = ["clouddebugger", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/clouddebugger2/README.md b/gen/clouddebugger2/README.md index 8b21f6a4b2..d91c0560d7 100644 --- a/gen/clouddebugger2/README.md +++ b/gen/clouddebugger2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-clouddebugger2` library allows access to all features of the *Google Cloud Debugger* service. -This documentation was generated from *Cloud Debugger* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *clouddebugger:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Debugger* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *clouddebugger:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Debugger* *v2* API can be found at the [official documentation site](https://cloud.google.com/debugger). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/CloudDebugger) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/CloudDebugger) ... * controller - * [*debuggees breakpoints list*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::ControllerDebuggeeBreakpointListCall), [*debuggees breakpoints update*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::ControllerDebuggeeBreakpointUpdateCall) and [*debuggees register*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::ControllerDebuggeeRegisterCall) + * [*debuggees breakpoints list*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::ControllerDebuggeeBreakpointListCall), [*debuggees breakpoints update*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::ControllerDebuggeeBreakpointUpdateCall) and [*debuggees register*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::ControllerDebuggeeRegisterCall) * debugger - * [*debuggees breakpoints delete*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointDeleteCall), [*debuggees breakpoints get*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointGetCall), [*debuggees breakpoints list*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointListCall), [*debuggees breakpoints set*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointSetCall) and [*debuggees list*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/api::DebuggerDebuggeeListCall) + * [*debuggees breakpoints delete*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointDeleteCall), [*debuggees breakpoints get*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointGetCall), [*debuggees breakpoints list*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointListCall), [*debuggees breakpoints set*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::DebuggerDebuggeeBreakpointSetCall) and [*debuggees list*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/api::DebuggerDebuggeeListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/CloudDebugger)** +* **[Hub](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/CloudDebugger)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::CallBuilder) -* **[Resources](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::CallBuilder) +* **[Resources](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Part)** + * **[Parts](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Delegate) to the -[Method Builder](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Delegate) to the +[Method Builder](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::RequestValue) and -[decodable](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::RequestValue) and +[decodable](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-clouddebugger2/5.0.2-beta-1+20230113/google_clouddebugger2/client::RequestValue) are moved +* [request values](https://docs.rs/google-clouddebugger2/5.0.2+20230113/google_clouddebugger2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/clouddebugger2/src/api.rs b/gen/clouddebugger2/src/api.rs index dd7cf9375a..c2bd2d556e 100644 --- a/gen/clouddebugger2/src/api.rs +++ b/gen/clouddebugger2/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> CloudDebugger { CloudDebugger { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://clouddebugger.googleapis.com/".to_string(), _root_url: "https://clouddebugger.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> CloudDebugger { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/clouddebugger2/src/client.rs b/gen/clouddebugger2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/clouddebugger2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/clouddebugger2/src/lib.rs b/gen/clouddebugger2/src/lib.rs index adb56b4fcd..6cbb4d2178 100644 --- a/gen/clouddebugger2/src/lib.rs +++ b/gen/clouddebugger2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Debugger* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *clouddebugger:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Debugger* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *clouddebugger:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Debugger* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/debugger). diff --git a/gen/clouddeploy1-cli/Cargo.toml b/gen/clouddeploy1-cli/Cargo.toml index a2907673ee..70fb5bdca7 100644 --- a/gen/clouddeploy1-cli/Cargo.toml +++ b/gen/clouddeploy1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-clouddeploy1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Deploy (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouddeploy1-cli" @@ -20,13 +20,13 @@ name = "clouddeploy1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-clouddeploy1] path = "../clouddeploy1" -version = "4.0.1+20220223" +version = "5.0.2+20230105" + diff --git a/gen/clouddeploy1-cli/README.md b/gen/clouddeploy1-cli/README.md index ed39046d97..e886f3e06e 100644 --- a/gen/clouddeploy1-cli/README.md +++ b/gen/clouddeploy1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Deploy* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Deploy* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash clouddeploy1 [options] @@ -36,13 +36,17 @@ clouddeploy1 [options] locations-delivery-pipelines-get-iam-policy [-p ]... [-o ] locations-delivery-pipelines-list [-p ]... [-o ] locations-delivery-pipelines-patch (-r )... [-p ]... [-o ] + locations-delivery-pipelines-releases-abandon (-r )... [-p ]... [-o ] locations-delivery-pipelines-releases-create (-r )... [-p ]... [-o ] locations-delivery-pipelines-releases-get [-p ]... [-o ] locations-delivery-pipelines-releases-list [-p ]... [-o ] locations-delivery-pipelines-releases-rollouts-approve (-r )... [-p ]... [-o ] locations-delivery-pipelines-releases-rollouts-create (-r )... [-p ]... [-o ] locations-delivery-pipelines-releases-rollouts-get [-p ]... [-o ] + locations-delivery-pipelines-releases-rollouts-job-runs-get [-p ]... [-o ] + locations-delivery-pipelines-releases-rollouts-job-runs-list [-p ]... [-o ] locations-delivery-pipelines-releases-rollouts-list [-p ]... [-o ] + locations-delivery-pipelines-releases-rollouts-retry-job (-r )... [-p ]... [-o ] locations-delivery-pipelines-set-iam-policy (-r )... [-p ]... [-o ] locations-delivery-pipelines-test-iam-permissions (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] diff --git a/gen/clouddeploy1-cli/mkdocs.yml b/gen/clouddeploy1-cli/mkdocs.yml index 4f466b6b3a..8a6f47b4af 100644 --- a/gen/clouddeploy1-cli/mkdocs.yml +++ b/gen/clouddeploy1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Deploy v4.0.1+20220223 +site_name: Cloud Deploy v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-clouddeploy1-cli site_description: A complete library to interact with Cloud Deploy (protocol v1) @@ -7,38 +7,43 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/clouddeploy1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-delivery-pipelines-create.md', 'Projects', 'Locations Delivery Pipelines Create'] -- ['projects_locations-delivery-pipelines-delete.md', 'Projects', 'Locations Delivery Pipelines Delete'] -- ['projects_locations-delivery-pipelines-get.md', 'Projects', 'Locations Delivery Pipelines Get'] -- ['projects_locations-delivery-pipelines-get-iam-policy.md', 'Projects', 'Locations Delivery Pipelines Get Iam Policy'] -- ['projects_locations-delivery-pipelines-list.md', 'Projects', 'Locations Delivery Pipelines List'] -- ['projects_locations-delivery-pipelines-patch.md', 'Projects', 'Locations Delivery Pipelines Patch'] -- ['projects_locations-delivery-pipelines-releases-create.md', 'Projects', 'Locations Delivery Pipelines Releases Create'] -- ['projects_locations-delivery-pipelines-releases-get.md', 'Projects', 'Locations Delivery Pipelines Releases Get'] -- ['projects_locations-delivery-pipelines-releases-list.md', 'Projects', 'Locations Delivery Pipelines Releases List'] -- ['projects_locations-delivery-pipelines-releases-rollouts-approve.md', 'Projects', 'Locations Delivery Pipelines Releases Rollouts Approve'] -- ['projects_locations-delivery-pipelines-releases-rollouts-create.md', 'Projects', 'Locations Delivery Pipelines Releases Rollouts Create'] -- ['projects_locations-delivery-pipelines-releases-rollouts-get.md', 'Projects', 'Locations Delivery Pipelines Releases Rollouts Get'] -- ['projects_locations-delivery-pipelines-releases-rollouts-list.md', 'Projects', 'Locations Delivery Pipelines Releases Rollouts List'] -- ['projects_locations-delivery-pipelines-set-iam-policy.md', 'Projects', 'Locations Delivery Pipelines Set Iam Policy'] -- ['projects_locations-delivery-pipelines-test-iam-permissions.md', 'Projects', 'Locations Delivery Pipelines Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-get-config.md', 'Projects', 'Locations Get Config'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-targets-create.md', 'Projects', 'Locations Targets Create'] -- ['projects_locations-targets-delete.md', 'Projects', 'Locations Targets Delete'] -- ['projects_locations-targets-get.md', 'Projects', 'Locations Targets Get'] -- ['projects_locations-targets-get-iam-policy.md', 'Projects', 'Locations Targets Get Iam Policy'] -- ['projects_locations-targets-list.md', 'Projects', 'Locations Targets List'] -- ['projects_locations-targets-patch.md', 'Projects', 'Locations Targets Patch'] -- ['projects_locations-targets-set-iam-policy.md', 'Projects', 'Locations Targets Set Iam Policy'] -- ['projects_locations-targets-test-iam-permissions.md', 'Projects', 'Locations Targets Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Delivery Pipelines Create': 'projects_locations-delivery-pipelines-create.md' + - 'Locations Delivery Pipelines Delete': 'projects_locations-delivery-pipelines-delete.md' + - 'Locations Delivery Pipelines Get': 'projects_locations-delivery-pipelines-get.md' + - 'Locations Delivery Pipelines Get Iam Policy': 'projects_locations-delivery-pipelines-get-iam-policy.md' + - 'Locations Delivery Pipelines List': 'projects_locations-delivery-pipelines-list.md' + - 'Locations Delivery Pipelines Patch': 'projects_locations-delivery-pipelines-patch.md' + - 'Locations Delivery Pipelines Releases Abandon': 'projects_locations-delivery-pipelines-releases-abandon.md' + - 'Locations Delivery Pipelines Releases Create': 'projects_locations-delivery-pipelines-releases-create.md' + - 'Locations Delivery Pipelines Releases Get': 'projects_locations-delivery-pipelines-releases-get.md' + - 'Locations Delivery Pipelines Releases List': 'projects_locations-delivery-pipelines-releases-list.md' + - 'Locations Delivery Pipelines Releases Rollouts Approve': 'projects_locations-delivery-pipelines-releases-rollouts-approve.md' + - 'Locations Delivery Pipelines Releases Rollouts Create': 'projects_locations-delivery-pipelines-releases-rollouts-create.md' + - 'Locations Delivery Pipelines Releases Rollouts Get': 'projects_locations-delivery-pipelines-releases-rollouts-get.md' + - 'Locations Delivery Pipelines Releases Rollouts Job Runs Get': 'projects_locations-delivery-pipelines-releases-rollouts-job-runs-get.md' + - 'Locations Delivery Pipelines Releases Rollouts Job Runs List': 'projects_locations-delivery-pipelines-releases-rollouts-job-runs-list.md' + - 'Locations Delivery Pipelines Releases Rollouts List': 'projects_locations-delivery-pipelines-releases-rollouts-list.md' + - 'Locations Delivery Pipelines Releases Rollouts Retry Job': 'projects_locations-delivery-pipelines-releases-rollouts-retry-job.md' + - 'Locations Delivery Pipelines Set Iam Policy': 'projects_locations-delivery-pipelines-set-iam-policy.md' + - 'Locations Delivery Pipelines Test Iam Permissions': 'projects_locations-delivery-pipelines-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Get Config': 'projects_locations-get-config.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Targets Create': 'projects_locations-targets-create.md' + - 'Locations Targets Delete': 'projects_locations-targets-delete.md' + - 'Locations Targets Get': 'projects_locations-targets-get.md' + - 'Locations Targets Get Iam Policy': 'projects_locations-targets-get-iam-policy.md' + - 'Locations Targets List': 'projects_locations-targets-list.md' + - 'Locations Targets Patch': 'projects_locations-targets-patch.md' + - 'Locations Targets Set Iam Policy': 'projects_locations-targets-set-iam-policy.md' + - 'Locations Targets Test Iam Permissions': 'projects_locations-targets-test-iam-permissions.md' theme: readthedocs diff --git a/gen/clouddeploy1-cli/src/client.rs b/gen/clouddeploy1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/clouddeploy1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/clouddeploy1-cli/src/main.rs b/gen/clouddeploy1-cli/src/main.rs index fb0805d283..2318f3b06b 100644 --- a/gen/clouddeploy1-cli/src/main.rs +++ b/gen/clouddeploy1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_clouddeploy1::{api, Error, oauth2}; +use google_clouddeploy1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -80,15 +79,18 @@ where "condition.targets-present-condition.missing-targets" => Some(("condition.targetsPresentCondition.missingTargets", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "condition.targets-present-condition.status" => Some(("condition.targetsPresentCondition.status", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "condition.targets-present-condition.update-time" => Some(("condition.targetsPresentCondition.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "condition.targets-type-condition.error-details" => Some(("condition.targetsTypeCondition.errorDetails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "condition.targets-type-condition.status" => Some(("condition.targetsTypeCondition.status", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suspended" => Some(("suspended", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "condition", "create-time", "description", "etag", "labels", "missing-targets", "name", "pipeline-ready-condition", "status", "targets-present-condition", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "condition", "create-time", "description", "error-details", "etag", "labels", "missing-targets", "name", "pipeline-ready-condition", "status", "suspended", "targets-present-condition", "targets-type-condition", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -103,7 +105,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -165,19 +167,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, "etag" => { call = call.etag(value.unwrap_or("")); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -285,7 +287,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -344,7 +346,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -428,15 +430,18 @@ where "condition.targets-present-condition.missing-targets" => Some(("condition.targetsPresentCondition.missingTargets", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "condition.targets-present-condition.status" => Some(("condition.targetsPresentCondition.status", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "condition.targets-present-condition.update-time" => Some(("condition.targetsPresentCondition.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "condition.targets-type-condition.error-details" => Some(("condition.targetsTypeCondition.errorDetails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "condition.targets-type-condition.status" => Some(("condition.targetsTypeCondition.status", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suspended" => Some(("suspended", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "condition", "create-time", "description", "etag", "labels", "missing-targets", "name", "pipeline-ready-condition", "status", "targets-present-condition", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "condition", "create-time", "description", "error-details", "etag", "labels", "missing-targets", "name", "pipeline-ready-condition", "status", "suspended", "targets-present-condition", "targets-type-condition", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -451,16 +456,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -509,6 +514,90 @@ where } } + async fn _projects_locations_delivery_pipelines_releases_abandon(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::AbandonReleaseRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_delivery_pipelines_releases_abandon(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_delivery_pipelines_releases_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -532,6 +621,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "abandoned" => Some(("abandoned", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.annotations" => Some(("deliveryPipelineSnapshot.annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -540,11 +630,14 @@ where "delivery-pipeline-snapshot.condition.targets-present-condition.missing-targets" => Some(("deliveryPipelineSnapshot.condition.targetsPresentCondition.missingTargets", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "delivery-pipeline-snapshot.condition.targets-present-condition.status" => Some(("deliveryPipelineSnapshot.condition.targetsPresentCondition.status", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.condition.targets-present-condition.update-time" => Some(("deliveryPipelineSnapshot.condition.targetsPresentCondition.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "delivery-pipeline-snapshot.condition.targets-type-condition.error-details" => Some(("deliveryPipelineSnapshot.condition.targetsTypeCondition.errorDetails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "delivery-pipeline-snapshot.condition.targets-type-condition.status" => Some(("deliveryPipelineSnapshot.condition.targetsTypeCondition.status", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.create-time" => Some(("deliveryPipelineSnapshot.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.description" => Some(("deliveryPipelineSnapshot.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.etag" => Some(("deliveryPipelineSnapshot.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.labels" => Some(("deliveryPipelineSnapshot.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "delivery-pipeline-snapshot.name" => Some(("deliveryPipelineSnapshot.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "delivery-pipeline-snapshot.suspended" => Some(("deliveryPipelineSnapshot.suspended", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.uid" => Some(("deliveryPipelineSnapshot.uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delivery-pipeline-snapshot.update-time" => Some(("deliveryPipelineSnapshot.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -559,7 +652,7 @@ where "skaffold-version" => Some(("skaffoldVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "condition", "create-time", "delivery-pipeline-snapshot", "description", "etag", "labels", "missing-targets", "name", "pipeline-ready-condition", "render-end-time", "render-start-time", "render-state", "skaffold-config-path", "skaffold-config-uri", "skaffold-version", "status", "targets-present-condition", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoned", "annotations", "condition", "create-time", "delivery-pipeline-snapshot", "description", "error-details", "etag", "labels", "missing-targets", "name", "pipeline-ready-condition", "render-end-time", "render-start-time", "render-state", "skaffold-config-path", "skaffold-config-uri", "skaffold-version", "status", "suspended", "targets-present-condition", "targets-type-condition", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -574,7 +667,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -691,7 +784,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -867,12 +960,15 @@ where "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "failure-reason" => Some(("failureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "metadata.cloud-run.revision" => Some(("metadata.cloudRun.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "metadata.cloud-run.service" => Some(("metadata.cloudRun.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "metadata.cloud-run.service-urls" => Some(("metadata.cloudRun.serviceUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target-id" => Some(("targetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "approval-state", "approve-time", "create-time", "deploy-end-time", "deploy-failure-cause", "deploy-start-time", "deploying-build", "description", "enqueue-time", "etag", "failure-reason", "labels", "name", "state", "target-id", "uid"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "approval-state", "approve-time", "cloud-run", "create-time", "deploy-end-time", "deploy-failure-cause", "deploy-start-time", "deploying-build", "description", "enqueue-time", "etag", "failure-reason", "labels", "metadata", "name", "revision", "service", "service-urls", "state", "target-id", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -887,7 +983,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "rollout-id" => { call = call.rollout_id(value.unwrap_or("")); @@ -994,9 +1090,61 @@ where } } - async fn _projects_locations_delivery_pipelines_releases_rollouts_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _projects_locations_delivery_pipelines_releases_rollouts_job_runs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_delivery_pipelines_releases_rollouts_list(opt.value_of("parent").unwrap_or("")); + let mut call = self.hub.projects().locations_delivery_pipelines_releases_rollouts_job_runs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_delivery_pipelines_releases_rollouts_job_runs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_delivery_pipelines_releases_rollouts_job_runs_list(opt.value_of("parent").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -1004,7 +1152,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1059,6 +1207,157 @@ where } } + async fn _projects_locations_delivery_pipelines_releases_rollouts_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_delivery_pipelines_releases_rollouts_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_delivery_pipelines_releases_rollouts_retry_job(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "job-id" => Some(("jobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "phase-id" => Some(("phaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["job-id", "phase-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RetryJobRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_delivery_pipelines_releases_rollouts_retry_job(request, opt.value_of("rollout").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_delivery_pipelines_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1345,7 +1644,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1595,7 +1894,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1680,11 +1979,12 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "require-approval" => Some(("requireApproval", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "run.location" => Some(("run.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target-id" => Some(("targetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "anthos-cluster", "cluster", "create-time", "description", "etag", "gke", "internal-ip", "labels", "membership", "name", "require-approval", "target-id", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "anthos-cluster", "cluster", "create-time", "description", "etag", "gke", "internal-ip", "labels", "location", "membership", "name", "require-approval", "run", "target-id", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1699,7 +1999,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "target-id" => { call = call.target_id(value.unwrap_or("")); @@ -1761,7 +2061,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -1770,7 +2070,7 @@ where call = call.etag(value.unwrap_or("")); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1878,7 +2178,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1937,7 +2237,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2025,11 +2325,12 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "require-approval" => Some(("requireApproval", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "run.location" => Some(("run.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target-id" => Some(("targetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "anthos-cluster", "cluster", "create-time", "description", "etag", "gke", "internal-ip", "labels", "membership", "name", "require-approval", "target-id", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "anthos-cluster", "cluster", "create-time", "description", "etag", "gke", "internal-ip", "labels", "location", "membership", "name", "require-approval", "run", "target-id", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2044,16 +2345,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2299,6 +2600,9 @@ where ("locations-delivery-pipelines-patch", Some(opt)) => { call_result = self._projects_locations_delivery_pipelines_patch(opt, dry_run, &mut err).await; }, + ("locations-delivery-pipelines-releases-abandon", Some(opt)) => { + call_result = self._projects_locations_delivery_pipelines_releases_abandon(opt, dry_run, &mut err).await; + }, ("locations-delivery-pipelines-releases-create", Some(opt)) => { call_result = self._projects_locations_delivery_pipelines_releases_create(opt, dry_run, &mut err).await; }, @@ -2317,9 +2621,18 @@ where ("locations-delivery-pipelines-releases-rollouts-get", Some(opt)) => { call_result = self._projects_locations_delivery_pipelines_releases_rollouts_get(opt, dry_run, &mut err).await; }, + ("locations-delivery-pipelines-releases-rollouts-job-runs-get", Some(opt)) => { + call_result = self._projects_locations_delivery_pipelines_releases_rollouts_job_runs_get(opt, dry_run, &mut err).await; + }, + ("locations-delivery-pipelines-releases-rollouts-job-runs-list", Some(opt)) => { + call_result = self._projects_locations_delivery_pipelines_releases_rollouts_job_runs_list(opt, dry_run, &mut err).await; + }, ("locations-delivery-pipelines-releases-rollouts-list", Some(opt)) => { call_result = self._projects_locations_delivery_pipelines_releases_rollouts_list(opt, dry_run, &mut err).await; }, + ("locations-delivery-pipelines-releases-rollouts-retry-job", Some(opt)) => { + call_result = self._projects_locations_delivery_pipelines_releases_rollouts_retry_job(opt, dry_run, &mut err).await; + }, ("locations-delivery-pipelines-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_delivery_pipelines_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -2450,7 +2763,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-delivery-pipelines-create', 'locations-delivery-pipelines-delete', 'locations-delivery-pipelines-get', 'locations-delivery-pipelines-get-iam-policy', 'locations-delivery-pipelines-list', 'locations-delivery-pipelines-patch', 'locations-delivery-pipelines-releases-create', 'locations-delivery-pipelines-releases-get', 'locations-delivery-pipelines-releases-list', 'locations-delivery-pipelines-releases-rollouts-approve', 'locations-delivery-pipelines-releases-rollouts-create', 'locations-delivery-pipelines-releases-rollouts-get', 'locations-delivery-pipelines-releases-rollouts-list', 'locations-delivery-pipelines-set-iam-policy', 'locations-delivery-pipelines-test-iam-permissions', 'locations-get', 'locations-get-config', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-targets-create', 'locations-targets-delete', 'locations-targets-get', 'locations-targets-get-iam-policy', 'locations-targets-list', 'locations-targets-patch', 'locations-targets-set-iam-policy' and 'locations-targets-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-delivery-pipelines-create', 'locations-delivery-pipelines-delete', 'locations-delivery-pipelines-get', 'locations-delivery-pipelines-get-iam-policy', 'locations-delivery-pipelines-list', 'locations-delivery-pipelines-patch', 'locations-delivery-pipelines-releases-abandon', 'locations-delivery-pipelines-releases-create', 'locations-delivery-pipelines-releases-get', 'locations-delivery-pipelines-releases-list', 'locations-delivery-pipelines-releases-rollouts-approve', 'locations-delivery-pipelines-releases-rollouts-create', 'locations-delivery-pipelines-releases-rollouts-get', 'locations-delivery-pipelines-releases-rollouts-job-runs-get', 'locations-delivery-pipelines-releases-rollouts-job-runs-list', 'locations-delivery-pipelines-releases-rollouts-list', 'locations-delivery-pipelines-releases-rollouts-retry-job', 'locations-delivery-pipelines-set-iam-policy', 'locations-delivery-pipelines-test-iam-permissions', 'locations-get', 'locations-get-config', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-targets-create', 'locations-targets-delete', 'locations-targets-get', 'locations-targets-get-iam-policy', 'locations-targets-list', 'locations-targets-patch', 'locations-targets-set-iam-policy' and 'locations-targets-test-iam-permissions'", vec![ ("locations-delivery-pipelines-create", Some(r##"Creates a new DeliveryPipeline in a given project and location."##), "Details at http://byron.github.io/google-apis-rs/google_clouddeploy1_cli/projects_locations-delivery-pipelines-create", @@ -2529,7 +2842,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2589,6 +2902,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-delivery-pipelines-releases-abandon", + Some(r##"Abandons a Release in the Delivery Pipeline."##), + "Details at http://byron.github.io/google-apis-rs/google_clouddeploy1_cli/projects_locations-delivery-pipelines-releases-abandon", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the Release. Format is projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/ releases/{release}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2739,6 +3080,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-delivery-pipelines-releases-rollouts-job-runs-get", + Some(r##"Gets details of a single JobRun."##), + "Details at http://byron.github.io/google-apis-rs/google_clouddeploy1_cli/projects_locations-delivery-pipelines-releases-rollouts-job-runs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the `JobRun`. Format must be projects/{project_id}/locations/{location_name}/deliveryPipelines/{pipeline_name}/releases/{release_name}/rollouts/{rollout_name}/jobRuns/{job_run_name}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-delivery-pipelines-releases-rollouts-job-runs-list", + Some(r##"Lists JobRuns in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_clouddeploy1_cli/projects_locations-delivery-pipelines-releases-rollouts-job-runs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The `Rollout` which owns this collection of `JobRun` objects."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2761,6 +3146,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-delivery-pipelines-releases-rollouts-retry-job", + Some(r##"Retries the specified Job in a Rollout."##), + "Details at http://byron.github.io/google-apis-rs/google_clouddeploy1_cli/projects_locations-delivery-pipelines-releases-rollouts-retry-job", + vec![ + (Some(r##"rollout"##), + None, + Some(r##"Required. Name of the Rollout. Format is projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/ releases/{release}/rollouts/{rollout}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2773,7 +3186,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2801,7 +3214,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3061,7 +3474,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3133,7 +3546,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3161,7 +3574,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3189,7 +3602,7 @@ async fn main() { let mut app = App::new("clouddeploy1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20230105") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_clouddeploy1_cli") .arg(Arg::with_name("url") diff --git a/gen/clouddeploy1/Cargo.toml b/gen/clouddeploy1/Cargo.toml index 06d596f150..d1951039c2 100644 --- a/gen/clouddeploy1/Cargo.toml +++ b/gen/clouddeploy1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-clouddeploy1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Deploy (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouddeploy1" homepage = "https://cloud.google.com/deploy/" -documentation = "https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-clouddeploy1/5.0.2+20230105" license = "MIT" keywords = ["clouddeploy", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/clouddeploy1/README.md b/gen/clouddeploy1/README.md index f7da747395..23081acce0 100644 --- a/gen/clouddeploy1/README.md +++ b/gen/clouddeploy1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-clouddeploy1` library allows access to all features of the *Google Cloud Deploy* service. -This documentation was generated from *Cloud Deploy* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *clouddeploy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Deploy* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *clouddeploy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Deploy* *v1* API can be found at the [official documentation site](https://cloud.google.com/deploy/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/CloudDeploy) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/CloudDeploy) ... * projects - * [*locations delivery pipelines create*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineCreateCall), [*locations delivery pipelines delete*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineDeleteCall), [*locations delivery pipelines get*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineGetCall), [*locations delivery pipelines get iam policy*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineGetIamPolicyCall), [*locations delivery pipelines list*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineListCall), [*locations delivery pipelines patch*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelinePatchCall), [*locations delivery pipelines releases abandon*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseAbandonCall), [*locations delivery pipelines releases create*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseCreateCall), [*locations delivery pipelines releases get*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseGetCall), [*locations delivery pipelines releases list*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseListCall), [*locations delivery pipelines releases rollouts approve*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutApproveCall), [*locations delivery pipelines releases rollouts create*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutCreateCall), [*locations delivery pipelines releases rollouts get*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutGetCall), [*locations delivery pipelines releases rollouts job runs get*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutJobRunGetCall), [*locations delivery pipelines releases rollouts job runs list*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutJobRunListCall), [*locations delivery pipelines releases rollouts list*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutListCall), [*locations delivery pipelines releases rollouts retry job*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutRetryJobCall), [*locations delivery pipelines set iam policy*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineSetIamPolicyCall), [*locations delivery pipelines test iam permissions*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineTestIamPermissionCall), [*locations get*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationGetCall), [*locations get config*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationGetConfigCall), [*locations list*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationOperationListCall), [*locations targets create*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetCreateCall), [*locations targets delete*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetDeleteCall), [*locations targets get*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetGetCall), [*locations targets get iam policy*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetGetIamPolicyCall), [*locations targets list*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetListCall), [*locations targets patch*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetPatchCall), [*locations targets set iam policy*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetSetIamPolicyCall) and [*locations targets test iam permissions*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/api::ProjectLocationTargetTestIamPermissionCall) + * [*locations delivery pipelines create*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineCreateCall), [*locations delivery pipelines delete*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineDeleteCall), [*locations delivery pipelines get*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineGetCall), [*locations delivery pipelines get iam policy*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineGetIamPolicyCall), [*locations delivery pipelines list*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineListCall), [*locations delivery pipelines patch*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelinePatchCall), [*locations delivery pipelines releases abandon*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseAbandonCall), [*locations delivery pipelines releases create*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseCreateCall), [*locations delivery pipelines releases get*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseGetCall), [*locations delivery pipelines releases list*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseListCall), [*locations delivery pipelines releases rollouts approve*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutApproveCall), [*locations delivery pipelines releases rollouts create*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutCreateCall), [*locations delivery pipelines releases rollouts get*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutGetCall), [*locations delivery pipelines releases rollouts job runs get*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutJobRunGetCall), [*locations delivery pipelines releases rollouts job runs list*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutJobRunListCall), [*locations delivery pipelines releases rollouts list*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutListCall), [*locations delivery pipelines releases rollouts retry job*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineReleaseRolloutRetryJobCall), [*locations delivery pipelines set iam policy*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineSetIamPolicyCall), [*locations delivery pipelines test iam permissions*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationDeliveryPipelineTestIamPermissionCall), [*locations get*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationGetCall), [*locations get config*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationGetConfigCall), [*locations list*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationOperationListCall), [*locations targets create*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetCreateCall), [*locations targets delete*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetDeleteCall), [*locations targets get*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetGetCall), [*locations targets get iam policy*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetGetIamPolicyCall), [*locations targets list*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetListCall), [*locations targets patch*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetPatchCall), [*locations targets set iam policy*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetSetIamPolicyCall) and [*locations targets test iam permissions*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/api::ProjectLocationTargetTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/CloudDeploy)** +* **[Hub](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/CloudDeploy)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::CallBuilder) -* **[Resources](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::CallBuilder) +* **[Resources](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Part)** + * **[Parts](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Delegate) to the -[Method Builder](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Delegate) to the +[Method Builder](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::RequestValue) and -[decodable](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::RequestValue) and +[decodable](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-clouddeploy1/5.0.2-beta-1+20230105/google_clouddeploy1/client::RequestValue) are moved +* [request values](https://docs.rs/google-clouddeploy1/5.0.2+20230105/google_clouddeploy1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/clouddeploy1/src/api.rs b/gen/clouddeploy1/src/api.rs index 9f8ec15164..0db03d2b02 100644 --- a/gen/clouddeploy1/src/api.rs +++ b/gen/clouddeploy1/src/api.rs @@ -124,7 +124,7 @@ impl<'a, S> CloudDeploy { CloudDeploy { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://clouddeploy.googleapis.com/".to_string(), _root_url: "https://clouddeploy.googleapis.com/".to_string(), } @@ -135,7 +135,7 @@ impl<'a, S> CloudDeploy { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/clouddeploy1/src/client.rs b/gen/clouddeploy1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/clouddeploy1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/clouddeploy1/src/lib.rs b/gen/clouddeploy1/src/lib.rs index 6b56f6ecf9..57c09ef0ae 100644 --- a/gen/clouddeploy1/src/lib.rs +++ b/gen/clouddeploy1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Deploy* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *clouddeploy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Deploy* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *clouddeploy:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Deploy* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/deploy/). diff --git a/gen/clouderrorreporting1_beta1-cli/Cargo.toml b/gen/clouderrorreporting1_beta1-cli/Cargo.toml index 70257fa40c..1d173bdfbb 100644 --- a/gen/clouderrorreporting1_beta1-cli/Cargo.toml +++ b/gen/clouderrorreporting1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-clouderrorreporting1_beta1-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Clouderrorreporting (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouderrorreporting1_beta1-cli" @@ -20,13 +20,13 @@ name = "clouderrorreporting1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-clouderrorreporting1_beta1] path = "../clouderrorreporting1_beta1" -version = "4.0.1+20220302" +version = "5.0.2+20230112" + diff --git a/gen/clouderrorreporting1_beta1-cli/README.md b/gen/clouderrorreporting1_beta1-cli/README.md index 43e2c12673..a548c5577b 100644 --- a/gen/clouderrorreporting1_beta1-cli/README.md +++ b/gen/clouderrorreporting1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Clouderrorreporting* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *Clouderrorreporting* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash clouderrorreporting1-beta1 [options] diff --git a/gen/clouderrorreporting1_beta1-cli/mkdocs.yml b/gen/clouderrorreporting1_beta1-cli/mkdocs.yml index 8b64aee3cd..42054b7309 100644 --- a/gen/clouderrorreporting1_beta1-cli/mkdocs.yml +++ b/gen/clouderrorreporting1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Clouderrorreporting v4.0.1+20220302 +site_name: Clouderrorreporting v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-clouderrorreporting1_beta1-cli site_description: A complete library to interact with Clouderrorreporting (protocol v1beta1) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/clouderrorreport docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_delete-events.md', 'Projects', 'Delete Events'] -- ['projects_events-list.md', 'Projects', 'Events List'] -- ['projects_events-report.md', 'Projects', 'Events Report'] -- ['projects_group-stats-list.md', 'Projects', 'Group Stats List'] -- ['projects_groups-get.md', 'Projects', 'Groups Get'] -- ['projects_groups-update.md', 'Projects', 'Groups Update'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Delete Events': 'projects_delete-events.md' + - 'Events List': 'projects_events-list.md' + - 'Events Report': 'projects_events-report.md' + - 'Group Stats List': 'projects_group-stats-list.md' + - 'Groups Get': 'projects_groups-get.md' + - 'Groups Update': 'projects_groups-update.md' theme: readthedocs diff --git a/gen/clouderrorreporting1_beta1-cli/src/client.rs b/gen/clouderrorreporting1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/clouderrorreporting1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/clouderrorreporting1_beta1-cli/src/main.rs b/gen/clouderrorreporting1_beta1-cli/src/main.rs index 7c5c299b25..1f206e52ca 100644 --- a/gen/clouderrorreporting1_beta1-cli/src/main.rs +++ b/gen/clouderrorreporting1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_clouderrorreporting1_beta1::{api, Error, oauth2}; +use google_clouderrorreporting1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -125,7 +124,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "group-id" => { call = call.group_id(value.unwrap_or("")); @@ -283,7 +282,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "timed-count-duration" => { - call = call.timed_count_duration(value.unwrap_or("")); + call = call.timed_count_duration( value.map(|v| arg_from_str(v, err, "timed-count-duration", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, "time-range-period" => { call = call.time_range_period(value.unwrap_or("")); @@ -301,7 +300,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order" => { call = call.order(value.unwrap_or("")); @@ -310,7 +309,7 @@ where call = call.add_group_id(value.unwrap_or("")); }, "alignment-time" => { - call = call.alignment_time(value.unwrap_or("")); + call = call.alignment_time( value.map(|v| arg_from_str(v, err, "alignment-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "alignment" => { call = call.alignment(value.unwrap_or("")); @@ -684,7 +683,7 @@ async fn main() { vec![ (Some(r##"project-name"##), None, - Some(r##"Required. The resource name of the Google Cloud Platform project. Written as `projects/{projectID}` or `projects/{projectNumber}`, where `{projectID}` and `{projectNumber}` can be found in the [Google Cloud Console](https://support.google.com/cloud/answer/6158840). Examples: `projects/my-project-123`, `projects/5551234`."##), + Some(r##"Required. The resource name of the Google Cloud Platform project. Written as `projects/{projectID}` or `projects/{projectNumber}`, where `{projectID}` and `{projectNumber}` can be found in the [Google Cloud console](https://support.google.com/cloud/answer/6158840). Examples: `projects/my-project-123`, `projects/5551234`."##), Some(true), Some(false)), @@ -706,7 +705,7 @@ async fn main() { vec![ (Some(r##"group-name"##), None, - Some(r##"Required. The group resource name. Written as `projects/{projectID}/groups/{group_name}`. Call [`groupStats.list`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/projects.groupStats/list) to return a list of groups belonging to this project. Example: `projects/my-project-123/groups/my-group`"##), + Some(r##"Required. The group resource name. Written as `projects/{projectID}/groups/{group_name}`. Call groupStats.list to return a list of groups belonging to this project. Example: `projects/my-project-123/groups/my-group`"##), Some(true), Some(false)), @@ -756,7 +755,7 @@ async fn main() { let mut app = App::new("clouderrorreporting1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230112") .about("Groups and counts similar errors from cloud services and applications, reports new errors, and provides access to error groups and their associated errors. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_clouderrorreporting1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/clouderrorreporting1_beta1/Cargo.toml b/gen/clouderrorreporting1_beta1/Cargo.toml index 8d040487f1..87bc538957 100644 --- a/gen/clouderrorreporting1_beta1/Cargo.toml +++ b/gen/clouderrorreporting1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-clouderrorreporting1_beta1" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Clouderrorreporting (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouderrorreporting1_beta1" homepage = "https://cloud.google.com/error-reporting/" -documentation = "https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112" license = "MIT" keywords = ["clouderrorreporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/clouderrorreporting1_beta1/README.md b/gen/clouderrorreporting1_beta1/README.md index a49ec52070..1ebc309188 100644 --- a/gen/clouderrorreporting1_beta1/README.md +++ b/gen/clouderrorreporting1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-clouderrorreporting1_beta1` library allows access to all features of the *Google Clouderrorreporting* service. -This documentation was generated from *Clouderrorreporting* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *clouderrorreporting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Clouderrorreporting* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *clouderrorreporting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Clouderrorreporting* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/error-reporting/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/Clouderrorreporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/Clouderrorreporting) ... * projects - * [*delete events*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/api::ProjectDeleteEventCall), [*events list*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/api::ProjectEventListCall), [*events report*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/api::ProjectEventReportCall), [*group stats list*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/api::ProjectGroupStatListCall), [*groups get*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/api::ProjectGroupGetCall) and [*groups update*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/api::ProjectGroupUpdateCall) + * [*delete events*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/api::ProjectDeleteEventCall), [*events list*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/api::ProjectEventListCall), [*events report*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/api::ProjectEventReportCall), [*group stats list*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/api::ProjectGroupStatListCall), [*groups get*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/api::ProjectGroupGetCall) and [*groups update*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/api::ProjectGroupUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/Clouderrorreporting)** +* **[Hub](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/Clouderrorreporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2-beta-1+20230112/google_clouderrorreporting1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-clouderrorreporting1_beta1/5.0.2+20230112/google_clouderrorreporting1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/clouderrorreporting1_beta1/src/api.rs b/gen/clouderrorreporting1_beta1/src/api.rs index c9e8fe4a8b..ddc72abdb8 100644 --- a/gen/clouderrorreporting1_beta1/src/api.rs +++ b/gen/clouderrorreporting1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Clouderrorreporting { Clouderrorreporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://clouderrorreporting.googleapis.com/".to_string(), _root_url: "https://clouderrorreporting.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Clouderrorreporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/clouderrorreporting1_beta1/src/client.rs b/gen/clouderrorreporting1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/clouderrorreporting1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/clouderrorreporting1_beta1/src/lib.rs b/gen/clouderrorreporting1_beta1/src/lib.rs index a1d79b8f85..ea3c48ece6 100644 --- a/gen/clouderrorreporting1_beta1/src/lib.rs +++ b/gen/clouderrorreporting1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Clouderrorreporting* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *clouderrorreporting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Clouderrorreporting* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *clouderrorreporting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Clouderrorreporting* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/error-reporting/). diff --git a/gen/cloudfunctions1-cli/Cargo.toml b/gen/cloudfunctions1-cli/Cargo.toml index bd593ffedf..7c007a13a9 100644 --- a/gen/cloudfunctions1-cli/Cargo.toml +++ b/gen/cloudfunctions1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudfunctions1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Functions (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudfunctions1-cli" @@ -20,13 +20,13 @@ name = "cloudfunctions1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudfunctions1] path = "../cloudfunctions1" -version = "4.0.1+20220224" +version = "5.0.2+20230119" + diff --git a/gen/cloudfunctions1-cli/README.md b/gen/cloudfunctions1-cli/README.md index af3ab0b6b9..24d8ed82ed 100644 --- a/gen/cloudfunctions1-cli/README.md +++ b/gen/cloudfunctions1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Functions* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Functions* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash cloudfunctions1 [options] diff --git a/gen/cloudfunctions1-cli/mkdocs.yml b/gen/cloudfunctions1-cli/mkdocs.yml index 0dc3757310..76c967c85a 100644 --- a/gen/cloudfunctions1-cli/mkdocs.yml +++ b/gen/cloudfunctions1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Functions v4.0.1+20220224 +site_name: Cloud Functions v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-cloudfunctions1-cli site_description: A complete library to interact with Cloud Functions (protocol v1) @@ -7,22 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudfunctions1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['projects_locations-functions-call.md', 'Projects', 'Locations Functions Call'] -- ['projects_locations-functions-create.md', 'Projects', 'Locations Functions Create'] -- ['projects_locations-functions-delete.md', 'Projects', 'Locations Functions Delete'] -- ['projects_locations-functions-generate-download-url.md', 'Projects', 'Locations Functions Generate Download Url'] -- ['projects_locations-functions-generate-upload-url.md', 'Projects', 'Locations Functions Generate Upload Url'] -- ['projects_locations-functions-get.md', 'Projects', 'Locations Functions Get'] -- ['projects_locations-functions-get-iam-policy.md', 'Projects', 'Locations Functions Get Iam Policy'] -- ['projects_locations-functions-list.md', 'Projects', 'Locations Functions List'] -- ['projects_locations-functions-patch.md', 'Projects', 'Locations Functions Patch'] -- ['projects_locations-functions-set-iam-policy.md', 'Projects', 'Locations Functions Set Iam Policy'] -- ['projects_locations-functions-test-iam-permissions.md', 'Projects', 'Locations Functions Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Projects': + - 'Locations Functions Call': 'projects_locations-functions-call.md' + - 'Locations Functions Create': 'projects_locations-functions-create.md' + - 'Locations Functions Delete': 'projects_locations-functions-delete.md' + - 'Locations Functions Generate Download Url': 'projects_locations-functions-generate-download-url.md' + - 'Locations Functions Generate Upload Url': 'projects_locations-functions-generate-upload-url.md' + - 'Locations Functions Get': 'projects_locations-functions-get.md' + - 'Locations Functions Get Iam Policy': 'projects_locations-functions-get-iam-policy.md' + - 'Locations Functions List': 'projects_locations-functions-list.md' + - 'Locations Functions Patch': 'projects_locations-functions-patch.md' + - 'Locations Functions Set Iam Policy': 'projects_locations-functions-set-iam-policy.md' + - 'Locations Functions Test Iam Permissions': 'projects_locations-functions-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/cloudfunctions1-cli/src/client.rs b/gen/cloudfunctions1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudfunctions1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudfunctions1-cli/src/main.rs b/gen/cloudfunctions1-cli/src/main.rs index e2a21ec147..5db9c4d6ed 100644 --- a/gen/cloudfunctions1-cli/src/main.rs +++ b/gen/cloudfunctions1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudfunctions1::{api, Error, oauth2}; +use google_cloudfunctions1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -532,8 +531,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["kms-key-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -652,7 +652,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -711,7 +711,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -834,7 +834,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1065,7 +1065,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1457,7 +1457,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1529,7 +1529,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1557,7 +1557,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1607,7 +1607,7 @@ async fn main() { let mut app = App::new("cloudfunctions1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230119") .about("Manages lightweight user-provided functions executed in response to events.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudfunctions1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudfunctions1/Cargo.toml b/gen/cloudfunctions1/Cargo.toml index eed95178a1..87b14e19d2 100644 --- a/gen/cloudfunctions1/Cargo.toml +++ b/gen/cloudfunctions1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudfunctions1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Functions (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudfunctions1" homepage = "https://cloud.google.com/functions" -documentation = "https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-cloudfunctions1/5.0.2+20230119" license = "MIT" keywords = ["cloudfunctions", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudfunctions1/README.md b/gen/cloudfunctions1/README.md index eb237f203b..52d3259214 100644 --- a/gen/cloudfunctions1/README.md +++ b/gen/cloudfunctions1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-cloudfunctions1` library allows access to all features of the *Google Cloud Functions* service. -This documentation was generated from *Cloud Functions* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *cloudfunctions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Functions* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *cloudfunctions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Functions* *v1* API can be found at the [official documentation site](https://cloud.google.com/functions). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/CloudFunctions) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/CloudFunctions) ... -* [operations](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::Operation) - * [*get*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::OperationListCall) +* [operations](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::Operation) + * [*get*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::OperationListCall) * projects - * [*locations functions call*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionCallCall), [*locations functions create*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionCreateCall), [*locations functions delete*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionDeleteCall), [*locations functions generate download url*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGenerateDownloadUrlCall), [*locations functions generate upload url*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGenerateUploadUrlCall), [*locations functions get*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGetCall), [*locations functions get iam policy*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGetIamPolicyCall), [*locations functions list*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionListCall), [*locations functions patch*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionPatchCall), [*locations functions set iam policy*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionSetIamPolicyCall), [*locations functions test iam permissions*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationFunctionTestIamPermissionCall) and [*locations list*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/api::ProjectLocationListCall) + * [*locations functions call*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionCallCall), [*locations functions create*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionCreateCall), [*locations functions delete*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionDeleteCall), [*locations functions generate download url*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGenerateDownloadUrlCall), [*locations functions generate upload url*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGenerateUploadUrlCall), [*locations functions get*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGetCall), [*locations functions get iam policy*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionGetIamPolicyCall), [*locations functions list*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionListCall), [*locations functions patch*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionPatchCall), [*locations functions set iam policy*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionSetIamPolicyCall), [*locations functions test iam permissions*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationFunctionTestIamPermissionCall) and [*locations list*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/api::ProjectLocationListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/CloudFunctions)** +* **[Hub](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/CloudFunctions)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Part)** + * **[Parts](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudfunctions1/5.0.2-beta-1+20230119/google_cloudfunctions1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudfunctions1/5.0.2+20230119/google_cloudfunctions1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudfunctions1/src/api.rs b/gen/cloudfunctions1/src/api.rs index 7a05e538e7..21a5732d82 100644 --- a/gen/cloudfunctions1/src/api.rs +++ b/gen/cloudfunctions1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> CloudFunctions { CloudFunctions { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudfunctions.googleapis.com/".to_string(), _root_url: "https://cloudfunctions.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudFunctions { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudfunctions1/src/client.rs b/gen/cloudfunctions1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudfunctions1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudfunctions1/src/lib.rs b/gen/cloudfunctions1/src/lib.rs index 16c33bc03c..f64f44bed0 100644 --- a/gen/cloudfunctions1/src/lib.rs +++ b/gen/cloudfunctions1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Functions* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *cloudfunctions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Functions* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *cloudfunctions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Functions* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/functions). diff --git a/gen/cloudidentity1-cli/Cargo.toml b/gen/cloudidentity1-cli/Cargo.toml index 1efe6dfd3c..1fb2396461 100644 --- a/gen/cloudidentity1-cli/Cargo.toml +++ b/gen/cloudidentity1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudidentity1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Identity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudidentity1-cli" @@ -20,13 +20,13 @@ name = "cloudidentity1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudidentity1] path = "../cloudidentity1" -version = "4.0.1+20220301" +version = "5.0.2+20230117" + diff --git a/gen/cloudidentity1-cli/README.md b/gen/cloudidentity1-cli/README.md index 6e3fc88217..79eacf9e71 100644 --- a/gen/cloudidentity1-cli/README.md +++ b/gen/cloudidentity1-cli/README.md @@ -25,10 +25,16 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Identity* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Identity* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash cloudidentity1 [options] + customers + userinvitations-cancel (-r )... [-p ]... [-o ] + userinvitations-get [-p ]... [-o ] + userinvitations-is-invitable-user [-p ]... [-o ] + userinvitations-list [-p ]... [-o ] + userinvitations-send (-r )... [-p ]... [-o ] devices cancel-wipe (-r )... [-p ]... [-o ] create (-r )... [-p ]... [-o ] @@ -67,6 +73,22 @@ cloudidentity1 [options] patch (-r )... [-p ]... [-o ] search [-p ]... [-o ] update-security-settings (-r )... [-p ]... [-o ] + inbound-saml-sso-profiles + create (-r )... [-p ]... [-o ] + delete [-p ]... [-o ] + get [-p ]... [-o ] + idp-credentials-add (-r )... [-p ]... [-o ] + idp-credentials-delete [-p ]... [-o ] + idp-credentials-get [-p ]... [-o ] + idp-credentials-list [-p ]... [-o ] + list [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] + inbound-sso-assignments + create (-r )... [-p ]... [-o ] + delete [-p ]... [-o ] + get [-p ]... [-o ] + list [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] cloudidentity1 --help Configuration: diff --git a/gen/cloudidentity1-cli/mkdocs.yml b/gen/cloudidentity1-cli/mkdocs.yml index c39f50b743..1b2b439b4a 100644 --- a/gen/cloudidentity1-cli/mkdocs.yml +++ b/gen/cloudidentity1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Identity v4.0.1+20220301 +site_name: Cloud Identity v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-cloudidentity1-cli site_description: A complete library to interact with Cloud Identity (protocol v1) @@ -7,44 +7,68 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudidentity1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['devices_cancel-wipe.md', 'Devices', 'Cancel Wipe'] -- ['devices_create.md', 'Devices', 'Create'] -- ['devices_delete.md', 'Devices', 'Delete'] -- ['devices_device-users-approve.md', 'Devices', 'Device Users Approve'] -- ['devices_device-users-block.md', 'Devices', 'Device Users Block'] -- ['devices_device-users-cancel-wipe.md', 'Devices', 'Device Users Cancel Wipe'] -- ['devices_device-users-client-states-get.md', 'Devices', 'Device Users Client States Get'] -- ['devices_device-users-client-states-list.md', 'Devices', 'Device Users Client States List'] -- ['devices_device-users-client-states-patch.md', 'Devices', 'Device Users Client States Patch'] -- ['devices_device-users-delete.md', 'Devices', 'Device Users Delete'] -- ['devices_device-users-get.md', 'Devices', 'Device Users Get'] -- ['devices_device-users-list.md', 'Devices', 'Device Users List'] -- ['devices_device-users-lookup.md', 'Devices', 'Device Users Lookup'] -- ['devices_device-users-wipe.md', 'Devices', 'Device Users Wipe'] -- ['devices_get.md', 'Devices', 'Get'] -- ['devices_list.md', 'Devices', 'List'] -- ['devices_wipe.md', 'Devices', 'Wipe'] -- ['groups_create.md', 'Groups', 'Create'] -- ['groups_delete.md', 'Groups', 'Delete'] -- ['groups_get.md', 'Groups', 'Get'] -- ['groups_get-security-settings.md', 'Groups', 'Get Security Settings'] -- ['groups_list.md', 'Groups', 'List'] -- ['groups_lookup.md', 'Groups', 'Lookup'] -- ['groups_memberships-check-transitive-membership.md', 'Groups', 'Memberships Check Transitive Membership'] -- ['groups_memberships-create.md', 'Groups', 'Memberships Create'] -- ['groups_memberships-delete.md', 'Groups', 'Memberships Delete'] -- ['groups_memberships-get.md', 'Groups', 'Memberships Get'] -- ['groups_memberships-get-membership-graph.md', 'Groups', 'Memberships Get Membership Graph'] -- ['groups_memberships-list.md', 'Groups', 'Memberships List'] -- ['groups_memberships-lookup.md', 'Groups', 'Memberships Lookup'] -- ['groups_memberships-modify-membership-roles.md', 'Groups', 'Memberships Modify Membership Roles'] -- ['groups_memberships-search-transitive-groups.md', 'Groups', 'Memberships Search Transitive Groups'] -- ['groups_memberships-search-transitive-memberships.md', 'Groups', 'Memberships Search Transitive Memberships'] -- ['groups_patch.md', 'Groups', 'Patch'] -- ['groups_search.md', 'Groups', 'Search'] -- ['groups_update-security-settings.md', 'Groups', 'Update Security Settings'] +nav: +- Home: 'index.md' +- 'Customers': + - 'Userinvitations Cancel': 'customers_userinvitations-cancel.md' + - 'Userinvitations Get': 'customers_userinvitations-get.md' + - 'Userinvitations Is Invitable User': 'customers_userinvitations-is-invitable-user.md' + - 'Userinvitations List': 'customers_userinvitations-list.md' + - 'Userinvitations Send': 'customers_userinvitations-send.md' +- 'Devices': + - 'Cancel Wipe': 'devices_cancel-wipe.md' + - 'Create': 'devices_create.md' + - 'Delete': 'devices_delete.md' + - 'Device Users Approve': 'devices_device-users-approve.md' + - 'Device Users Block': 'devices_device-users-block.md' + - 'Device Users Cancel Wipe': 'devices_device-users-cancel-wipe.md' + - 'Device Users Client States Get': 'devices_device-users-client-states-get.md' + - 'Device Users Client States List': 'devices_device-users-client-states-list.md' + - 'Device Users Client States Patch': 'devices_device-users-client-states-patch.md' + - 'Device Users Delete': 'devices_device-users-delete.md' + - 'Device Users Get': 'devices_device-users-get.md' + - 'Device Users List': 'devices_device-users-list.md' + - 'Device Users Lookup': 'devices_device-users-lookup.md' + - 'Device Users Wipe': 'devices_device-users-wipe.md' + - 'Get': 'devices_get.md' + - 'List': 'devices_list.md' + - 'Wipe': 'devices_wipe.md' +- 'Groups': + - 'Create': 'groups_create.md' + - 'Delete': 'groups_delete.md' + - 'Get': 'groups_get.md' + - 'Get Security Settings': 'groups_get-security-settings.md' + - 'List': 'groups_list.md' + - 'Lookup': 'groups_lookup.md' + - 'Memberships Check Transitive Membership': 'groups_memberships-check-transitive-membership.md' + - 'Memberships Create': 'groups_memberships-create.md' + - 'Memberships Delete': 'groups_memberships-delete.md' + - 'Memberships Get': 'groups_memberships-get.md' + - 'Memberships Get Membership Graph': 'groups_memberships-get-membership-graph.md' + - 'Memberships List': 'groups_memberships-list.md' + - 'Memberships Lookup': 'groups_memberships-lookup.md' + - 'Memberships Modify Membership Roles': 'groups_memberships-modify-membership-roles.md' + - 'Memberships Search Transitive Groups': 'groups_memberships-search-transitive-groups.md' + - 'Memberships Search Transitive Memberships': 'groups_memberships-search-transitive-memberships.md' + - 'Patch': 'groups_patch.md' + - 'Search': 'groups_search.md' + - 'Update Security Settings': 'groups_update-security-settings.md' +- 'Inbound Saml Sso Profiles': + - 'Create': 'inbound-saml-sso-profiles_create.md' + - 'Delete': 'inbound-saml-sso-profiles_delete.md' + - 'Get': 'inbound-saml-sso-profiles_get.md' + - 'Idp Credentials Add': 'inbound-saml-sso-profiles_idp-credentials-add.md' + - 'Idp Credentials Delete': 'inbound-saml-sso-profiles_idp-credentials-delete.md' + - 'Idp Credentials Get': 'inbound-saml-sso-profiles_idp-credentials-get.md' + - 'Idp Credentials List': 'inbound-saml-sso-profiles_idp-credentials-list.md' + - 'List': 'inbound-saml-sso-profiles_list.md' + - 'Patch': 'inbound-saml-sso-profiles_patch.md' +- 'Inbound Sso Assignments': + - 'Create': 'inbound-sso-assignments_create.md' + - 'Delete': 'inbound-sso-assignments_delete.md' + - 'Get': 'inbound-sso-assignments_get.md' + - 'List': 'inbound-sso-assignments_list.md' + - 'Patch': 'inbound-sso-assignments_patch.md' theme: readthedocs diff --git a/gen/cloudidentity1-cli/src/client.rs b/gen/cloudidentity1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudidentity1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudidentity1-cli/src/main.rs b/gen/cloudidentity1-cli/src/main.rs index 029e50dc17..5a359496ed 100644 --- a/gen/cloudidentity1-cli/src/main.rs +++ b/gen/cloudidentity1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudidentity1::{api, Error, oauth2}; +use google_cloudidentity1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,328 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _customers_userinvitations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CancelUserInvitationRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().userinvitations_cancel(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_userinvitations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().userinvitations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_userinvitations_is_invitable_user(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().userinvitations_is_invitable_user(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_userinvitations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.customers().userinvitations_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _customers_userinvitations_send(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SendUserInvitationRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.customers().userinvitations_send(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _devices_cancel_wipe(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -170,6 +491,7 @@ where "build-number" => Some(("buildNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compromised-state" => Some(("compromisedState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-id" => Some(("deviceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-type" => Some(("deviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "enabled-developer-options" => Some(("enabledDeveloperOptions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enabled-usb-debugging" => Some(("enabledUsbDebugging", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -191,7 +513,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "wifi-mac-addresses" => Some(("wifiMacAddresses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["android-specific-attributes", "asset-tag", "baseband-version", "bootloader-version", "brand", "build-number", "compromised-state", "create-time", "device-type", "enabled-developer-options", "enabled-unknown-sources", "enabled-usb-debugging", "encryption-state", "imei", "kernel-version", "last-sync-time", "management-state", "manufacturer", "meid", "model", "name", "network-operator", "os-version", "other-accounts", "owner-profile-account", "owner-type", "ownership-privilege", "release-version", "security-patch-time", "serial-number", "supports-work-profile", "wifi-mac-addresses"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["android-specific-attributes", "asset-tag", "baseband-version", "bootloader-version", "brand", "build-number", "compromised-state", "create-time", "device-id", "device-type", "enabled-developer-options", "enabled-unknown-sources", "enabled-usb-debugging", "encryption-state", "imei", "kernel-version", "last-sync-time", "management-state", "manufacturer", "meid", "model", "name", "network-operator", "os-version", "other-accounts", "owner-profile-account", "owner-type", "ownership-privilege", "release-version", "security-patch-time", "serial-number", "supports-work-profile", "wifi-mac-addresses"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -737,7 +1059,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "customer" => { call = call.customer(value.unwrap_or("")); @@ -911,7 +1233,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -985,7 +1307,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "android-id" => { call = call.android_id(value.unwrap_or("")); @@ -1191,7 +1513,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1545,7 +1867,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1610,7 +1932,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2037,7 +2359,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2243,7 +2565,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2302,7 +2624,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2401,7 +2723,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2466,7 +2788,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2557,7 +2879,965 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "customer" => Some(("customer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.change-password-uri" => Some(("idpConfig.changePasswordUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.entity-id" => Some(("idpConfig.entityId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.logout-redirect-uri" => Some(("idpConfig.logoutRedirectUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.single-sign-on-service-uri" => Some(("idpConfig.singleSignOnServiceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sp-config.assertion-consumer-service-uri" => Some(("spConfig.assertionConsumerServiceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sp-config.entity-id" => Some(("spConfig.entityId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["assertion-consumer-service-uri", "change-password-uri", "customer", "display-name", "entity-id", "idp-config", "logout-redirect-uri", "name", "single-sign-on-service-uri", "sp-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InboundSamlSsoProfile = json::value::from_value(object).unwrap(); + let mut call = self.hub.inbound_saml_sso_profiles().create(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_saml_sso_profiles().delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_saml_sso_profiles().get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_idp_credentials_add(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "pem-data" => Some(("pemData", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["pem-data"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::AddIdpCredentialRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.inbound_saml_sso_profiles().idp_credentials_add(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_idp_credentials_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_saml_sso_profiles().idp_credentials_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_idp_credentials_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_saml_sso_profiles().idp_credentials_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_idp_credentials_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_saml_sso_profiles().idp_credentials_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_saml_sso_profiles().list(); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_saml_sso_profiles_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "customer" => Some(("customer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.change-password-uri" => Some(("idpConfig.changePasswordUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.entity-id" => Some(("idpConfig.entityId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.logout-redirect-uri" => Some(("idpConfig.logoutRedirectUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "idp-config.single-sign-on-service-uri" => Some(("idpConfig.singleSignOnServiceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sp-config.assertion-consumer-service-uri" => Some(("spConfig.assertionConsumerServiceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sp-config.entity-id" => Some(("spConfig.entityId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["assertion-consumer-service-uri", "change-password-uri", "customer", "display-name", "entity-id", "idp-config", "logout-redirect-uri", "name", "single-sign-on-service-uri", "sp-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InboundSamlSsoProfile = json::value::from_value(object).unwrap(); + let mut call = self.hub.inbound_saml_sso_profiles().patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_sso_assignments_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "customer" => Some(("customer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rank" => Some(("rank", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "saml-sso-info.inbound-saml-sso-profile" => Some(("samlSsoInfo.inboundSamlSsoProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sign-in-behavior.redirect-condition" => Some(("signInBehavior.redirectCondition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sso-mode" => Some(("ssoMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-group" => Some(("targetGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-org-unit" => Some(("targetOrgUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["customer", "inbound-saml-sso-profile", "name", "rank", "redirect-condition", "saml-sso-info", "sign-in-behavior", "sso-mode", "target-group", "target-org-unit"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InboundSsoAssignment = json::value::from_value(object).unwrap(); + let mut call = self.hub.inbound_sso_assignments().create(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_sso_assignments_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_sso_assignments().delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_sso_assignments_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_sso_assignments().get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_sso_assignments_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.inbound_sso_assignments().list(); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inbound_sso_assignments_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "customer" => Some(("customer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rank" => Some(("rank", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "saml-sso-info.inbound-saml-sso-profile" => Some(("samlSsoInfo.inboundSamlSsoProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sign-in-behavior.redirect-condition" => Some(("signInBehavior.redirectCondition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sso-mode" => Some(("ssoMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-group" => Some(("targetGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-org-unit" => Some(("targetOrgUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["customer", "inbound-saml-sso-profile", "name", "rank", "redirect-condition", "saml-sso-info", "sign-in-behavior", "sso-mode", "target-group", "target-org-unit"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InboundSsoAssignment = json::value::from_value(object).unwrap(); + let mut call = self.hub.inbound_sso_assignments().patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2611,6 +3891,29 @@ where let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option = None; match self.opt.subcommand() { + ("customers", Some(opt)) => { + match opt.subcommand() { + ("userinvitations-cancel", Some(opt)) => { + call_result = self._customers_userinvitations_cancel(opt, dry_run, &mut err).await; + }, + ("userinvitations-get", Some(opt)) => { + call_result = self._customers_userinvitations_get(opt, dry_run, &mut err).await; + }, + ("userinvitations-is-invitable-user", Some(opt)) => { + call_result = self._customers_userinvitations_is_invitable_user(opt, dry_run, &mut err).await; + }, + ("userinvitations-list", Some(opt)) => { + call_result = self._customers_userinvitations_list(opt, dry_run, &mut err).await; + }, + ("userinvitations-send", Some(opt)) => { + call_result = self._customers_userinvitations_send(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("customers".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("devices", Some(opt)) => { match opt.subcommand() { ("cancel-wipe", Some(opt)) => { @@ -2735,6 +4038,64 @@ where } } }, + ("inbound-saml-sso-profiles", Some(opt)) => { + match opt.subcommand() { + ("create", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_create(opt, dry_run, &mut err).await; + }, + ("delete", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_delete(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_get(opt, dry_run, &mut err).await; + }, + ("idp-credentials-add", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_idp_credentials_add(opt, dry_run, &mut err).await; + }, + ("idp-credentials-delete", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_idp_credentials_delete(opt, dry_run, &mut err).await; + }, + ("idp-credentials-get", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_idp_credentials_get(opt, dry_run, &mut err).await; + }, + ("idp-credentials-list", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_idp_credentials_list(opt, dry_run, &mut err).await; + }, + ("list", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_list(opt, dry_run, &mut err).await; + }, + ("patch", Some(opt)) => { + call_result = self._inbound_saml_sso_profiles_patch(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("inbound-saml-sso-profiles".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, + ("inbound-sso-assignments", Some(opt)) => { + match opt.subcommand() { + ("create", Some(opt)) => { + call_result = self._inbound_sso_assignments_create(opt, dry_run, &mut err).await; + }, + ("delete", Some(opt)) => { + call_result = self._inbound_sso_assignments_delete(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._inbound_sso_assignments_get(opt, dry_run, &mut err).await; + }, + ("list", Some(opt)) => { + call_result = self._inbound_sso_assignments_list(opt, dry_run, &mut err).await; + }, + ("patch", Some(opt)) => { + call_result = self._inbound_sso_assignments_patch(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("inbound-sso-assignments".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); @@ -2808,6 +4169,131 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ + ("customers", "methods: 'userinvitations-cancel', 'userinvitations-get', 'userinvitations-is-invitable-user', 'userinvitations-list' and 'userinvitations-send'", vec![ + ("userinvitations-cancel", + Some(r##"Cancels a UserInvitation that was already sent."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/customers_userinvitations-cancel", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. `UserInvitation` name in the format `customers/{customer}/userinvitations/{user_email_address}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("userinvitations-get", + Some(r##"Retrieves a UserInvitation resource. **Note:** New consumer accounts with the customer's verified domain created within the previous 48 hours will not appear in the result. This delay also applies to newly-verified domains."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/customers_userinvitations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. `UserInvitation` name in the format `customers/{customer}/userinvitations/{user_email_address}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("userinvitations-is-invitable-user", + Some(r##"Verifies whether a user account is eligible to receive a UserInvitation (is an unmanaged account). Eligibility is based on the following criteria: * the email address is a consumer account and it's the primary email address of the account, and * the domain of the email address matches an existing verified Google Workspace or Cloud Identity domain If both conditions are met, the user is eligible. **Note:** This method is not supported for Workspace Essentials customers."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/customers_userinvitations-is-invitable-user", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. `UserInvitation` name in the format `customers/{customer}/userinvitations/{user_email_address}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("userinvitations-list", + Some(r##"Retrieves a list of UserInvitation resources. **Note:** New consumer accounts with the customer's verified domain created within the previous 48 hours will not appear in the result. This delay also applies to newly-verified domains."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/customers_userinvitations-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The customer ID of the Google Workspace or Cloud Identity account the UserInvitation resources are associated with."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("userinvitations-send", + Some(r##"Sends a UserInvitation to email. If the `UserInvitation` does not exist for this request and it is a valid request, the request creates a `UserInvitation`. **Note:** The `get` and `list` methods have a 48-hour delay where newly-created consumer accounts will not appear in the results. You can still send a `UserInvitation` to those accounts if you know the unmanaged email address and IsInvitableUser==True."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/customers_userinvitations-send", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. `UserInvitation` name in the format `customers/{customer}/userinvitations/{user_email_address}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("devices", "methods: 'cancel-wipe', 'create', 'delete', 'device-users-approve', 'device-users-block', 'device-users-cancel-wipe', 'device-users-client-states-get', 'device-users-client-states-list', 'device-users-client-states-patch', 'device-users-delete', 'device-users-get', 'device-users-list', 'device-users-lookup', 'device-users-wipe', 'get', 'list' and 'wipe'", vec![ ("cancel-wipe", Some(r##"Cancels an unfinished device wipe. This operation can be used to cancel device wipe in the gap between the wipe operation returning success and the device being wiped. This operation is possible when the device is in a "pending wipe" state. The device enters the "pending wipe" state when a wipe device command is issued, but has not yet been sent to the device. The cancel wipe will fail if the wipe command has already been issued to the device."##), @@ -3648,11 +5134,331 @@ async fn main() { ]), ]), + ("inbound-saml-sso-profiles", "methods: 'create', 'delete', 'get', 'idp-credentials-add', 'idp-credentials-delete', 'idp-credentials-get', 'idp-credentials-list', 'list' and 'patch'", vec![ + ("create", + Some(r##"Creates an InboundSamlSsoProfile for a customer."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_create", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("delete", + Some(r##"Deletes an InboundSamlSsoProfile."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The [resource name](https://cloud.google.com/apis/design/resource_names) of the InboundSamlSsoProfile to delete. Format: `inboundSamlSsoProfiles/{sso_profile_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Gets an InboundSamlSsoProfile."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The [resource name](https://cloud.google.com/apis/design/resource_names) of the InboundSamlSsoProfile to get. Format: `inboundSamlSsoProfiles/{sso_profile_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("idp-credentials-add", + Some(r##"Adds an IdpCredential. Up to 2 credentials are allowed."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_idp-credentials-add", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The InboundSamlSsoProfile that owns the IdpCredential. Format: `inboundSamlSsoProfiles/{sso_profile_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("idp-credentials-delete", + Some(r##"Deletes an IdpCredential."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_idp-credentials-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The [resource name](https://cloud.google.com/apis/design/resource_names) of the IdpCredential to delete. Format: `inboundSamlSsoProfiles/{sso_profile_id}/idpCredentials/{idp_credential_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("idp-credentials-get", + Some(r##"Gets an IdpCredential."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_idp-credentials-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The [resource name](https://cloud.google.com/apis/design/resource_names) of the IdpCredential to retrieve. Format: `inboundSamlSsoProfiles/{sso_profile_id}/idpCredentials/{idp_credential_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("idp-credentials-list", + Some(r##"Returns a list of IdpCredentials in an InboundSamlSsoProfile."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_idp-credentials-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent, which owns this collection of `IdpCredential`s. Format: `inboundSamlSsoProfiles/{sso_profile_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list", + Some(r##"Lists InboundSamlSsoProfiles for a customer."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_list", + vec![ + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Updates an InboundSamlSsoProfile."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-saml-sso-profiles_patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. [Resource name](https://cloud.google.com/apis/design/resource_names) of the SAML SSO profile."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + + ("inbound-sso-assignments", "methods: 'create', 'delete', 'get', 'list' and 'patch'", vec![ + ("create", + Some(r##"Creates an InboundSsoAssignment for users and devices in a `Customer` under a given `Group` or `OrgUnit`."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-sso-assignments_create", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("delete", + Some(r##"Deletes an InboundSsoAssignment. To disable SSO, Create (or Update) an assignment that has `sso_mode` == `SSO_OFF`."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-sso-assignments_delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The [resource name](https://cloud.google.com/apis/design/resource_names) of the InboundSsoAssignment to delete. Format: `inboundSsoAssignments/{assignment}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Gets an InboundSsoAssignment."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-sso-assignments_get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The [resource name](https://cloud.google.com/apis/design/resource_names) of the InboundSsoAssignment to fetch. Format: `inboundSsoAssignments/{assignment}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list", + Some(r##"Lists the InboundSsoAssignments for a `Customer`."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-sso-assignments_list", + vec![ + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Updates an InboundSsoAssignment. The body of this request is the `inbound_sso_assignment` field and the `update_mask` is relative to that. For example: a PATCH to `/v1/inboundSsoAssignments/0abcdefg1234567&update_mask=rank` with a body of `{ "rank": 1 }` moves that (presumably group-targeted) SSO assignment to the highest priority and shifts any other group-targeted assignments down in priority."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli/inbound-sso-assignments_patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. [Resource name](https://cloud.google.com/apis/design/resource_names) of the Inbound SSO Assignment."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ]; let mut app = App::new("cloudidentity1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230117") .about("API for provisioning and managing identity resources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudidentity1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudidentity1/Cargo.toml b/gen/cloudidentity1/Cargo.toml index a3b45d1140..a3162ad0df 100644 --- a/gen/cloudidentity1/Cargo.toml +++ b/gen/cloudidentity1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudidentity1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Identity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudidentity1" homepage = "https://cloud.google.com/identity/" -documentation = "https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-cloudidentity1/5.0.2+20230117" license = "MIT" keywords = ["cloudidentity", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudidentity1/README.md b/gen/cloudidentity1/README.md index 75ea2cd25a..d4620a3304 100644 --- a/gen/cloudidentity1/README.md +++ b/gen/cloudidentity1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-cloudidentity1` library allows access to all features of the *Google Cloud Identity* service. -This documentation was generated from *Cloud Identity* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *cloudidentity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Identity* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *cloudidentity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Identity* *v1* API can be found at the [official documentation site](https://cloud.google.com/identity/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/CloudIdentity) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/CloudIdentity) ... * customers - * [*userinvitations cancel*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::CustomerUserinvitationCancelCall), [*userinvitations get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::CustomerUserinvitationGetCall), [*userinvitations is invitable user*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::CustomerUserinvitationIsInvitableUserCall), [*userinvitations list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::CustomerUserinvitationListCall) and [*userinvitations send*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::CustomerUserinvitationSendCall) + * [*userinvitations cancel*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::CustomerUserinvitationCancelCall), [*userinvitations get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::CustomerUserinvitationGetCall), [*userinvitations is invitable user*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::CustomerUserinvitationIsInvitableUserCall), [*userinvitations list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::CustomerUserinvitationListCall) and [*userinvitations send*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::CustomerUserinvitationSendCall) * devices - * [*cancel wipe*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceCancelWipeCall), [*create*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeleteCall), [*device users approve*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserApproveCall), [*device users block*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserBlockCall), [*device users cancel wipe*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserCancelWipeCall), [*device users client states get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserClientStateGetCall), [*device users client states list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserClientStateListCall), [*device users client states patch*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserClientStatePatchCall), [*device users delete*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserDeleteCall), [*device users get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserGetCall), [*device users list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserListCall), [*device users lookup*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserLookupCall), [*device users wipe*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceDeviceUserWipeCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceGetCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceListCall) and [*wipe*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::DeviceWipeCall) -* [groups](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::Group) - * [*create*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupDeleteCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupGetCall), [*get security settings*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupGetSecuritySettingCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupListCall), [*lookup*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupLookupCall), [*memberships check transitive membership*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipCheckTransitiveMembershipCall), [*memberships create*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipCreateCall), [*memberships delete*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipDeleteCall), [*memberships get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipGetCall), [*memberships get membership graph*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipGetMembershipGraphCall), [*memberships list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipListCall), [*memberships lookup*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipLookupCall), [*memberships modify membership roles*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipModifyMembershipRoleCall), [*memberships search transitive groups*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipSearchTransitiveGroupCall), [*memberships search transitive memberships*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupMembershipSearchTransitiveMembershipCall), [*patch*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupPatchCall), [*search*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupSearchCall) and [*update security settings*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::GroupUpdateSecuritySettingCall) -* [inbound saml sso profiles](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfile) - * [*create*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileDeleteCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileGetCall), [*idp credentials add*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialAddCall), [*idp credentials delete*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialDeleteCall), [*idp credentials get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialGetCall), [*idp credentials list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialListCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfileListCall) and [*patch*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSamlSsoProfilePatchCall) -* [inbound sso assignments](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSsoAssignment) - * [*create*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSsoAssignmentCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSsoAssignmentDeleteCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSsoAssignmentGetCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSsoAssignmentListCall) and [*patch*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/api::InboundSsoAssignmentPatchCall) + * [*cancel wipe*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceCancelWipeCall), [*create*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeleteCall), [*device users approve*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserApproveCall), [*device users block*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserBlockCall), [*device users cancel wipe*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserCancelWipeCall), [*device users client states get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserClientStateGetCall), [*device users client states list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserClientStateListCall), [*device users client states patch*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserClientStatePatchCall), [*device users delete*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserDeleteCall), [*device users get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserGetCall), [*device users list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserListCall), [*device users lookup*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserLookupCall), [*device users wipe*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceDeviceUserWipeCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceGetCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceListCall) and [*wipe*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::DeviceWipeCall) +* [groups](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::Group) + * [*create*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupDeleteCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupGetCall), [*get security settings*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupGetSecuritySettingCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupListCall), [*lookup*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupLookupCall), [*memberships check transitive membership*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipCheckTransitiveMembershipCall), [*memberships create*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipCreateCall), [*memberships delete*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipDeleteCall), [*memberships get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipGetCall), [*memberships get membership graph*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipGetMembershipGraphCall), [*memberships list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipListCall), [*memberships lookup*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipLookupCall), [*memberships modify membership roles*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipModifyMembershipRoleCall), [*memberships search transitive groups*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipSearchTransitiveGroupCall), [*memberships search transitive memberships*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupMembershipSearchTransitiveMembershipCall), [*patch*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupPatchCall), [*search*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupSearchCall) and [*update security settings*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::GroupUpdateSecuritySettingCall) +* [inbound saml sso profiles](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfile) + * [*create*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileDeleteCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileGetCall), [*idp credentials add*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialAddCall), [*idp credentials delete*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialDeleteCall), [*idp credentials get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialGetCall), [*idp credentials list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileIdpCredentialListCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfileListCall) and [*patch*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSamlSsoProfilePatchCall) +* [inbound sso assignments](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSsoAssignment) + * [*create*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSsoAssignmentCreateCall), [*delete*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSsoAssignmentDeleteCall), [*get*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSsoAssignmentGetCall), [*list*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSsoAssignmentListCall) and [*patch*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/api::InboundSsoAssignmentPatchCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/CloudIdentity)** +* **[Hub](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/CloudIdentity)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Part)** + * **[Parts](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -160,17 +160,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -180,29 +180,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudidentity1/5.0.2-beta-1+20230117/google_cloudidentity1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudidentity1/5.0.2+20230117/google_cloudidentity1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudidentity1/src/api.rs b/gen/cloudidentity1/src/api.rs index ce747320ed..c0e6769a97 100644 --- a/gen/cloudidentity1/src/api.rs +++ b/gen/cloudidentity1/src/api.rs @@ -147,7 +147,7 @@ impl<'a, S> CloudIdentity { CloudIdentity { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudidentity.googleapis.com/".to_string(), _root_url: "https://cloudidentity.googleapis.com/".to_string(), } @@ -170,7 +170,7 @@ impl<'a, S> CloudIdentity { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudidentity1/src/client.rs b/gen/cloudidentity1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudidentity1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudidentity1/src/lib.rs b/gen/cloudidentity1/src/lib.rs index b96a3924b3..99039b998c 100644 --- a/gen/cloudidentity1/src/lib.rs +++ b/gen/cloudidentity1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Identity* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *cloudidentity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Identity* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *cloudidentity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Identity* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/identity/). diff --git a/gen/cloudiot1-cli/Cargo.toml b/gen/cloudiot1-cli/Cargo.toml index dcd049f289..f24be06f06 100644 --- a/gen/cloudiot1-cli/Cargo.toml +++ b/gen/cloudiot1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudiot1-cli" -version = "4.0.1+20220131" +version = "5.0.2+20230102" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Iot (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudiot1-cli" @@ -20,13 +20,13 @@ name = "cloudiot1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudiot1] path = "../cloudiot1" -version = "4.0.1+20220131" +version = "5.0.2+20230102" + diff --git a/gen/cloudiot1-cli/README.md b/gen/cloudiot1-cli/README.md index bf48a39f0b..9c224f698a 100644 --- a/gen/cloudiot1-cli/README.md +++ b/gen/cloudiot1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Iot* API at revision *20220131*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Iot* API at revision *20230102*. The CLI is at version *5.0.2*. ```bash cloudiot1 [options] diff --git a/gen/cloudiot1-cli/mkdocs.yml b/gen/cloudiot1-cli/mkdocs.yml index 0e41a54e38..4eaac0a535 100644 --- a/gen/cloudiot1-cli/mkdocs.yml +++ b/gen/cloudiot1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Iot v4.0.1+20220131 +site_name: Cloud Iot v5.0.2+20230102 site_url: http://byron.github.io/google-apis-rs/google-cloudiot1-cli site_description: A complete library to interact with Cloud Iot (protocol v1) @@ -7,31 +7,32 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudiot1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-registries-bind-device-to-gateway.md', 'Projects', 'Locations Registries Bind Device To Gateway'] -- ['projects_locations-registries-create.md', 'Projects', 'Locations Registries Create'] -- ['projects_locations-registries-delete.md', 'Projects', 'Locations Registries Delete'] -- ['projects_locations-registries-devices-config-versions-list.md', 'Projects', 'Locations Registries Devices Config Versions List'] -- ['projects_locations-registries-devices-create.md', 'Projects', 'Locations Registries Devices Create'] -- ['projects_locations-registries-devices-delete.md', 'Projects', 'Locations Registries Devices Delete'] -- ['projects_locations-registries-devices-get.md', 'Projects', 'Locations Registries Devices Get'] -- ['projects_locations-registries-devices-list.md', 'Projects', 'Locations Registries Devices List'] -- ['projects_locations-registries-devices-modify-cloud-to-device-config.md', 'Projects', 'Locations Registries Devices Modify Cloud To Device Config'] -- ['projects_locations-registries-devices-patch.md', 'Projects', 'Locations Registries Devices Patch'] -- ['projects_locations-registries-devices-send-command-to-device.md', 'Projects', 'Locations Registries Devices Send Command To Device'] -- ['projects_locations-registries-devices-states-list.md', 'Projects', 'Locations Registries Devices States List'] -- ['projects_locations-registries-get.md', 'Projects', 'Locations Registries Get'] -- ['projects_locations-registries-get-iam-policy.md', 'Projects', 'Locations Registries Get Iam Policy'] -- ['projects_locations-registries-groups-devices-list.md', 'Projects', 'Locations Registries Groups Devices List'] -- ['projects_locations-registries-groups-get-iam-policy.md', 'Projects', 'Locations Registries Groups Get Iam Policy'] -- ['projects_locations-registries-groups-set-iam-policy.md', 'Projects', 'Locations Registries Groups Set Iam Policy'] -- ['projects_locations-registries-groups-test-iam-permissions.md', 'Projects', 'Locations Registries Groups Test Iam Permissions'] -- ['projects_locations-registries-list.md', 'Projects', 'Locations Registries List'] -- ['projects_locations-registries-patch.md', 'Projects', 'Locations Registries Patch'] -- ['projects_locations-registries-set-iam-policy.md', 'Projects', 'Locations Registries Set Iam Policy'] -- ['projects_locations-registries-test-iam-permissions.md', 'Projects', 'Locations Registries Test Iam Permissions'] -- ['projects_locations-registries-unbind-device-from-gateway.md', 'Projects', 'Locations Registries Unbind Device From Gateway'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Registries Bind Device To Gateway': 'projects_locations-registries-bind-device-to-gateway.md' + - 'Locations Registries Create': 'projects_locations-registries-create.md' + - 'Locations Registries Delete': 'projects_locations-registries-delete.md' + - 'Locations Registries Devices Config Versions List': 'projects_locations-registries-devices-config-versions-list.md' + - 'Locations Registries Devices Create': 'projects_locations-registries-devices-create.md' + - 'Locations Registries Devices Delete': 'projects_locations-registries-devices-delete.md' + - 'Locations Registries Devices Get': 'projects_locations-registries-devices-get.md' + - 'Locations Registries Devices List': 'projects_locations-registries-devices-list.md' + - 'Locations Registries Devices Modify Cloud To Device Config': 'projects_locations-registries-devices-modify-cloud-to-device-config.md' + - 'Locations Registries Devices Patch': 'projects_locations-registries-devices-patch.md' + - 'Locations Registries Devices Send Command To Device': 'projects_locations-registries-devices-send-command-to-device.md' + - 'Locations Registries Devices States List': 'projects_locations-registries-devices-states-list.md' + - 'Locations Registries Get': 'projects_locations-registries-get.md' + - 'Locations Registries Get Iam Policy': 'projects_locations-registries-get-iam-policy.md' + - 'Locations Registries Groups Devices List': 'projects_locations-registries-groups-devices-list.md' + - 'Locations Registries Groups Get Iam Policy': 'projects_locations-registries-groups-get-iam-policy.md' + - 'Locations Registries Groups Set Iam Policy': 'projects_locations-registries-groups-set-iam-policy.md' + - 'Locations Registries Groups Test Iam Permissions': 'projects_locations-registries-groups-test-iam-permissions.md' + - 'Locations Registries List': 'projects_locations-registries-list.md' + - 'Locations Registries Patch': 'projects_locations-registries-patch.md' + - 'Locations Registries Set Iam Policy': 'projects_locations-registries-set-iam-policy.md' + - 'Locations Registries Test Iam Permissions': 'projects_locations-registries-test-iam-permissions.md' + - 'Locations Registries Unbind Device From Gateway': 'projects_locations-registries-unbind-device-from-gateway.md' theme: readthedocs diff --git a/gen/cloudiot1-cli/src/client.rs b/gen/cloudiot1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudiot1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudiot1-cli/src/main.rs b/gen/cloudiot1-cli/src/main.rs index 834256ef95..f1a04ac21f 100644 --- a/gen/cloudiot1-cli/src/main.rs +++ b/gen/cloudiot1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudiot1::{api, Error, oauth2}; +use google_cloudiot1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -286,7 +285,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "num-versions" => { - call = call.num_versions(arg_from_str(value.unwrap_or("-0"), err, "num-versions", "integer")); + call = call.num_versions( value.map(|v| arg_from_str(v, err, "num-versions", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -502,7 +501,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -561,7 +560,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "gateway-list-options-gateway-type" => { call = call.gateway_list_options_gateway_type(value.unwrap_or("")); @@ -573,10 +572,10 @@ where call = call.gateway_list_options_associations_device_id(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "device-num-ids" => { - call = call.add_device_num_ids(value.unwrap_or("")); + call = call.add_device_num_ids( value.map(|v| arg_from_str(v, err, "device-num-ids", "uint64")).unwrap_or(0)); }, "device-ids" => { call = call.add_device_ids(value.unwrap_or("")); @@ -777,7 +776,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -919,7 +918,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "num-states" => { - call = call.num_states(arg_from_str(value.unwrap_or("-0"), err, "num-states", "integer")); + call = call.num_states( value.map(|v| arg_from_str(v, err, "num-states", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1115,7 +1114,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "gateway-list-options-gateway-type" => { call = call.gateway_list_options_gateway_type(value.unwrap_or("")); @@ -1127,10 +1126,10 @@ where call = call.gateway_list_options_associations_device_id(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "device-num-ids" => { - call = call.add_device_num_ids(value.unwrap_or("")); + call = call.add_device_num_ids( value.map(|v| arg_from_str(v, err, "device-num-ids", "uint64")).unwrap_or(0)); }, "device-ids" => { call = call.add_device_ids(value.unwrap_or("")); @@ -1448,7 +1447,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1542,7 +1541,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2332,7 +2331,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2382,7 +2381,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2410,7 +2409,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2438,7 +2437,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2516,7 +2515,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2544,7 +2543,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2600,7 +2599,7 @@ async fn main() { let mut app = App::new("cloudiot1") .author("Sebastian Thiel ") - .version("4.0.1+20220131") + .version("5.0.2+20230102") .about("Registers and manages IoT (Internet of Things) devices that connect to the Google Cloud Platform. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudiot1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudiot1/Cargo.toml b/gen/cloudiot1/Cargo.toml index 3aecd2f7e2..1596c0f100 100644 --- a/gen/cloudiot1/Cargo.toml +++ b/gen/cloudiot1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudiot1" -version = "5.0.2-beta-1+20230102" +version = "5.0.2+20230102" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Iot (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudiot1" homepage = "https://cloud.google.com/iot" -documentation = "https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102" +documentation = "https://docs.rs/google-cloudiot1/5.0.2+20230102" license = "MIT" keywords = ["cloudiot", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudiot1/README.md b/gen/cloudiot1/README.md index 9283e84fc1..32bffdda70 100644 --- a/gen/cloudiot1/README.md +++ b/gen/cloudiot1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudiot1` library allows access to all features of the *Google Cloud Iot* service. -This documentation was generated from *Cloud Iot* crate version *5.0.2-beta-1+20230102*, where *20230102* is the exact revision of the *cloudiot:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Iot* crate version *5.0.2+20230102*, where *20230102* is the exact revision of the *cloudiot:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Iot* *v1* API can be found at the [official documentation site](https://cloud.google.com/iot). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/CloudIot) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/CloudIot) ... * projects - * [*locations registries bind device to gateway*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryBindDeviceToGatewayCall), [*locations registries create*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryCreateCall), [*locations registries delete*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeleteCall), [*locations registries devices config versions list*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceConfigVersionListCall), [*locations registries devices create*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceCreateCall), [*locations registries devices delete*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceDeleteCall), [*locations registries devices get*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceGetCall), [*locations registries devices list*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceListCall), [*locations registries devices modify cloud to device config*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceModifyCloudToDeviceConfigCall), [*locations registries devices patch*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDevicePatchCall), [*locations registries devices send command to device*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceSendCommandToDeviceCall), [*locations registries devices states list*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceStateListCall), [*locations registries get*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryGetCall), [*locations registries get iam policy*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryGetIamPolicyCall), [*locations registries groups devices list*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupDeviceListCall), [*locations registries groups get iam policy*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupGetIamPolicyCall), [*locations registries groups set iam policy*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupSetIamPolicyCall), [*locations registries groups test iam permissions*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupTestIamPermissionCall), [*locations registries list*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryListCall), [*locations registries patch*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryPatchCall), [*locations registries set iam policy*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistrySetIamPolicyCall), [*locations registries test iam permissions*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryTestIamPermissionCall) and [*locations registries unbind device from gateway*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/api::ProjectLocationRegistryUnbindDeviceFromGatewayCall) + * [*locations registries bind device to gateway*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryBindDeviceToGatewayCall), [*locations registries create*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryCreateCall), [*locations registries delete*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeleteCall), [*locations registries devices config versions list*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceConfigVersionListCall), [*locations registries devices create*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceCreateCall), [*locations registries devices delete*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceDeleteCall), [*locations registries devices get*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceGetCall), [*locations registries devices list*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceListCall), [*locations registries devices modify cloud to device config*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceModifyCloudToDeviceConfigCall), [*locations registries devices patch*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDevicePatchCall), [*locations registries devices send command to device*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceSendCommandToDeviceCall), [*locations registries devices states list*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryDeviceStateListCall), [*locations registries get*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryGetCall), [*locations registries get iam policy*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryGetIamPolicyCall), [*locations registries groups devices list*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupDeviceListCall), [*locations registries groups get iam policy*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupGetIamPolicyCall), [*locations registries groups set iam policy*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupSetIamPolicyCall), [*locations registries groups test iam permissions*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryGroupTestIamPermissionCall), [*locations registries list*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryListCall), [*locations registries patch*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryPatchCall), [*locations registries set iam policy*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistrySetIamPolicyCall), [*locations registries test iam permissions*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryTestIamPermissionCall) and [*locations registries unbind device from gateway*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/api::ProjectLocationRegistryUnbindDeviceFromGatewayCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/CloudIot)** +* **[Hub](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/CloudIot)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Part)** + * **[Parts](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudiot1/5.0.2-beta-1+20230102/google_cloudiot1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudiot1/5.0.2+20230102/google_cloudiot1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudiot1/src/api.rs b/gen/cloudiot1/src/api.rs index 6cab70bd76..af13caeee6 100644 --- a/gen/cloudiot1/src/api.rs +++ b/gen/cloudiot1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> CloudIot { CloudIot { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudiot.googleapis.com/".to_string(), _root_url: "https://cloudiot.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudIot { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudiot1/src/client.rs b/gen/cloudiot1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudiot1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudiot1/src/lib.rs b/gen/cloudiot1/src/lib.rs index c3ca34c7ba..931498f92a 100644 --- a/gen/cloudiot1/src/lib.rs +++ b/gen/cloudiot1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Iot* crate version *5.0.2-beta-1+20230102*, where *20230102* is the exact revision of the *cloudiot:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Iot* crate version *5.0.2+20230102*, where *20230102* is the exact revision of the *cloudiot:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Iot* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/iot). diff --git a/gen/cloudkms1-cli/Cargo.toml b/gen/cloudkms1-cli/Cargo.toml index 2fbc0d9878..8bd833d829 100644 --- a/gen/cloudkms1-cli/Cargo.toml +++ b/gen/cloudkms1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudkms1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud KMS (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudkms1-cli" @@ -20,13 +20,13 @@ name = "cloudkms1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudkms1] path = "../cloudkms1" -version = "4.0.1+20220225" +version = "5.0.2+20230106" + diff --git a/gen/cloudkms1-cli/README.md b/gen/cloudkms1-cli/README.md index 4e0cd99a79..847aa7ef48 100644 --- a/gen/cloudkms1-cli/README.md +++ b/gen/cloudkms1-cli/README.md @@ -25,11 +25,14 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud KMS* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud KMS* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash cloudkms1 [options] projects + locations-ekm-config-get-iam-policy [-p ]... [-o ] + locations-ekm-config-set-iam-policy (-r )... [-p ]... [-o ] + locations-ekm-config-test-iam-permissions (-r )... [-p ]... [-o ] locations-ekm-connections-create (-r )... [-p ]... [-o ] locations-ekm-connections-get [-p ]... [-o ] locations-ekm-connections-get-iam-policy [-p ]... [-o ] diff --git a/gen/cloudkms1-cli/mkdocs.yml b/gen/cloudkms1-cli/mkdocs.yml index 363d3e7938..9e923430ae 100644 --- a/gen/cloudkms1-cli/mkdocs.yml +++ b/gen/cloudkms1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud KMS v4.0.1+20220225 +site_name: Cloud KMS v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-cloudkms1-cli site_description: A complete library to interact with Cloud KMS (protocol v1) @@ -7,52 +7,56 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudkms1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-ekm-connections-create.md', 'Projects', 'Locations Ekm Connections Create'] -- ['projects_locations-ekm-connections-get.md', 'Projects', 'Locations Ekm Connections Get'] -- ['projects_locations-ekm-connections-get-iam-policy.md', 'Projects', 'Locations Ekm Connections Get Iam Policy'] -- ['projects_locations-ekm-connections-list.md', 'Projects', 'Locations Ekm Connections List'] -- ['projects_locations-ekm-connections-patch.md', 'Projects', 'Locations Ekm Connections Patch'] -- ['projects_locations-ekm-connections-set-iam-policy.md', 'Projects', 'Locations Ekm Connections Set Iam Policy'] -- ['projects_locations-ekm-connections-test-iam-permissions.md', 'Projects', 'Locations Ekm Connections Test Iam Permissions'] -- ['projects_locations-generate-random-bytes.md', 'Projects', 'Locations Generate Random Bytes'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-key-rings-create.md', 'Projects', 'Locations Key Rings Create'] -- ['projects_locations-key-rings-crypto-keys-create.md', 'Projects', 'Locations Key Rings Crypto Keys Create'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-decrypt.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Asymmetric Decrypt'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-sign.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Asymmetric Sign'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-create.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Create'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-destroy.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Destroy'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-get.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Get'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-get-public-key.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Get Public Key'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-import.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Import'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-list.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions List'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-mac-sign.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Mac Sign'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-mac-verify.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Mac Verify'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-patch.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Patch'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-restore.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Restore'] -- ['projects_locations-key-rings-crypto-keys-decrypt.md', 'Projects', 'Locations Key Rings Crypto Keys Decrypt'] -- ['projects_locations-key-rings-crypto-keys-encrypt.md', 'Projects', 'Locations Key Rings Crypto Keys Encrypt'] -- ['projects_locations-key-rings-crypto-keys-get.md', 'Projects', 'Locations Key Rings Crypto Keys Get'] -- ['projects_locations-key-rings-crypto-keys-get-iam-policy.md', 'Projects', 'Locations Key Rings Crypto Keys Get Iam Policy'] -- ['projects_locations-key-rings-crypto-keys-list.md', 'Projects', 'Locations Key Rings Crypto Keys List'] -- ['projects_locations-key-rings-crypto-keys-patch.md', 'Projects', 'Locations Key Rings Crypto Keys Patch'] -- ['projects_locations-key-rings-crypto-keys-set-iam-policy.md', 'Projects', 'Locations Key Rings Crypto Keys Set Iam Policy'] -- ['projects_locations-key-rings-crypto-keys-test-iam-permissions.md', 'Projects', 'Locations Key Rings Crypto Keys Test Iam Permissions'] -- ['projects_locations-key-rings-crypto-keys-update-primary-version.md', 'Projects', 'Locations Key Rings Crypto Keys Update Primary Version'] -- ['projects_locations-key-rings-get.md', 'Projects', 'Locations Key Rings Get'] -- ['projects_locations-key-rings-get-iam-policy.md', 'Projects', 'Locations Key Rings Get Iam Policy'] -- ['projects_locations-key-rings-import-jobs-create.md', 'Projects', 'Locations Key Rings Import Jobs Create'] -- ['projects_locations-key-rings-import-jobs-get.md', 'Projects', 'Locations Key Rings Import Jobs Get'] -- ['projects_locations-key-rings-import-jobs-get-iam-policy.md', 'Projects', 'Locations Key Rings Import Jobs Get Iam Policy'] -- ['projects_locations-key-rings-import-jobs-list.md', 'Projects', 'Locations Key Rings Import Jobs List'] -- ['projects_locations-key-rings-import-jobs-set-iam-policy.md', 'Projects', 'Locations Key Rings Import Jobs Set Iam Policy'] -- ['projects_locations-key-rings-import-jobs-test-iam-permissions.md', 'Projects', 'Locations Key Rings Import Jobs Test Iam Permissions'] -- ['projects_locations-key-rings-list.md', 'Projects', 'Locations Key Rings List'] -- ['projects_locations-key-rings-set-iam-policy.md', 'Projects', 'Locations Key Rings Set Iam Policy'] -- ['projects_locations-key-rings-test-iam-permissions.md', 'Projects', 'Locations Key Rings Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Ekm Config Get Iam Policy': 'projects_locations-ekm-config-get-iam-policy.md' + - 'Locations Ekm Config Set Iam Policy': 'projects_locations-ekm-config-set-iam-policy.md' + - 'Locations Ekm Config Test Iam Permissions': 'projects_locations-ekm-config-test-iam-permissions.md' + - 'Locations Ekm Connections Create': 'projects_locations-ekm-connections-create.md' + - 'Locations Ekm Connections Get': 'projects_locations-ekm-connections-get.md' + - 'Locations Ekm Connections Get Iam Policy': 'projects_locations-ekm-connections-get-iam-policy.md' + - 'Locations Ekm Connections List': 'projects_locations-ekm-connections-list.md' + - 'Locations Ekm Connections Patch': 'projects_locations-ekm-connections-patch.md' + - 'Locations Ekm Connections Set Iam Policy': 'projects_locations-ekm-connections-set-iam-policy.md' + - 'Locations Ekm Connections Test Iam Permissions': 'projects_locations-ekm-connections-test-iam-permissions.md' + - 'Locations Generate Random Bytes': 'projects_locations-generate-random-bytes.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Key Rings Create': 'projects_locations-key-rings-create.md' + - 'Locations Key Rings Crypto Keys Create': 'projects_locations-key-rings-crypto-keys-create.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Asymmetric Decrypt': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-decrypt.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Asymmetric Sign': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-sign.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Create': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-create.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Destroy': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-destroy.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Get': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-get.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Get Public Key': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-get-public-key.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Import': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-import.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions List': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-list.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Mac Sign': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-mac-sign.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Mac Verify': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-mac-verify.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Patch': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-patch.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Restore': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-restore.md' + - 'Locations Key Rings Crypto Keys Decrypt': 'projects_locations-key-rings-crypto-keys-decrypt.md' + - 'Locations Key Rings Crypto Keys Encrypt': 'projects_locations-key-rings-crypto-keys-encrypt.md' + - 'Locations Key Rings Crypto Keys Get': 'projects_locations-key-rings-crypto-keys-get.md' + - 'Locations Key Rings Crypto Keys Get Iam Policy': 'projects_locations-key-rings-crypto-keys-get-iam-policy.md' + - 'Locations Key Rings Crypto Keys List': 'projects_locations-key-rings-crypto-keys-list.md' + - 'Locations Key Rings Crypto Keys Patch': 'projects_locations-key-rings-crypto-keys-patch.md' + - 'Locations Key Rings Crypto Keys Set Iam Policy': 'projects_locations-key-rings-crypto-keys-set-iam-policy.md' + - 'Locations Key Rings Crypto Keys Test Iam Permissions': 'projects_locations-key-rings-crypto-keys-test-iam-permissions.md' + - 'Locations Key Rings Crypto Keys Update Primary Version': 'projects_locations-key-rings-crypto-keys-update-primary-version.md' + - 'Locations Key Rings Get': 'projects_locations-key-rings-get.md' + - 'Locations Key Rings Get Iam Policy': 'projects_locations-key-rings-get-iam-policy.md' + - 'Locations Key Rings Import Jobs Create': 'projects_locations-key-rings-import-jobs-create.md' + - 'Locations Key Rings Import Jobs Get': 'projects_locations-key-rings-import-jobs-get.md' + - 'Locations Key Rings Import Jobs Get Iam Policy': 'projects_locations-key-rings-import-jobs-get-iam-policy.md' + - 'Locations Key Rings Import Jobs List': 'projects_locations-key-rings-import-jobs-list.md' + - 'Locations Key Rings Import Jobs Set Iam Policy': 'projects_locations-key-rings-import-jobs-set-iam-policy.md' + - 'Locations Key Rings Import Jobs Test Iam Permissions': 'projects_locations-key-rings-import-jobs-test-iam-permissions.md' + - 'Locations Key Rings List': 'projects_locations-key-rings-list.md' + - 'Locations Key Rings Set Iam Policy': 'projects_locations-key-rings-set-iam-policy.md' + - 'Locations Key Rings Test Iam Permissions': 'projects_locations-key-rings-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/cloudkms1-cli/src/client.rs b/gen/cloudkms1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudkms1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudkms1-cli/src/main.rs b/gen/cloudkms1-cli/src/main.rs index e3bd061345..c69ed36dca 100644 --- a/gen/cloudkms1-cli/src/main.rs +++ b/gen/cloudkms1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudkms1::{api, Error, oauth2}; +use google_cloudkms1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,234 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_ekm_config_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_ekm_config_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_ekm_config_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_ekm_config_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_ekm_config_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_ekm_config_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_ekm_connections_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -201,7 +428,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -260,7 +487,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -357,7 +584,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -875,7 +1102,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "skip-initial-version-creation" => { - call = call.skip_initial_version_creation(arg_from_str(value.unwrap_or("false"), err, "skip-initial-version-creation", "boolean")); + call = call.skip_initial_version_creation( value.map(|v| arg_from_str(v, err, "skip-initial-version-creation", "boolean")).unwrap_or(false)); }, "crypto-key-id" => { call = call.crypto_key_id(value.unwrap_or("")); @@ -1421,8 +1648,9 @@ where "crypto-key-version" => Some(("cryptoKeyVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-job" => Some(("importJob", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rsa-aes-wrapped-key" => Some(("rsaAesWrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "wrapped-key" => Some(("wrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["algorithm", "crypto-key-version", "import-job", "rsa-aes-wrapped-key"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["algorithm", "crypto-key-version", "import-job", "rsa-aes-wrapped-key", "wrapped-key"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1495,7 +1723,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1782,7 +2010,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2150,7 +2378,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2212,7 +2440,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2336,7 +2564,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2701,7 +2929,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2911,7 +3139,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2970,7 +3198,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3207,7 +3435,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3444,7 +3672,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3503,6 +3731,15 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-ekm-config-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_ekm_config_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-ekm-config-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_ekm_config_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-ekm-config-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_ekm_config_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-ekm-connections-create", Some(opt)) => { call_result = self._projects_locations_ekm_connections_create(opt, dry_run, &mut err).await; }, @@ -3714,7 +3951,85 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-ekm-connections-create', 'locations-ekm-connections-get', 'locations-ekm-connections-get-iam-policy', 'locations-ekm-connections-list', 'locations-ekm-connections-patch', 'locations-ekm-connections-set-iam-policy', 'locations-ekm-connections-test-iam-permissions', 'locations-generate-random-bytes', 'locations-get', 'locations-key-rings-create', 'locations-key-rings-crypto-keys-create', 'locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-decrypt', 'locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-sign', 'locations-key-rings-crypto-keys-crypto-key-versions-create', 'locations-key-rings-crypto-keys-crypto-key-versions-destroy', 'locations-key-rings-crypto-keys-crypto-key-versions-get', 'locations-key-rings-crypto-keys-crypto-key-versions-get-public-key', 'locations-key-rings-crypto-keys-crypto-key-versions-import', 'locations-key-rings-crypto-keys-crypto-key-versions-list', 'locations-key-rings-crypto-keys-crypto-key-versions-mac-sign', 'locations-key-rings-crypto-keys-crypto-key-versions-mac-verify', 'locations-key-rings-crypto-keys-crypto-key-versions-patch', 'locations-key-rings-crypto-keys-crypto-key-versions-restore', 'locations-key-rings-crypto-keys-decrypt', 'locations-key-rings-crypto-keys-encrypt', 'locations-key-rings-crypto-keys-get', 'locations-key-rings-crypto-keys-get-iam-policy', 'locations-key-rings-crypto-keys-list', 'locations-key-rings-crypto-keys-patch', 'locations-key-rings-crypto-keys-set-iam-policy', 'locations-key-rings-crypto-keys-test-iam-permissions', 'locations-key-rings-crypto-keys-update-primary-version', 'locations-key-rings-get', 'locations-key-rings-get-iam-policy', 'locations-key-rings-import-jobs-create', 'locations-key-rings-import-jobs-get', 'locations-key-rings-import-jobs-get-iam-policy', 'locations-key-rings-import-jobs-list', 'locations-key-rings-import-jobs-set-iam-policy', 'locations-key-rings-import-jobs-test-iam-permissions', 'locations-key-rings-list', 'locations-key-rings-set-iam-policy', 'locations-key-rings-test-iam-permissions' and 'locations-list'", vec![ + ("projects", "methods: 'locations-ekm-config-get-iam-policy', 'locations-ekm-config-set-iam-policy', 'locations-ekm-config-test-iam-permissions', 'locations-ekm-connections-create', 'locations-ekm-connections-get', 'locations-ekm-connections-get-iam-policy', 'locations-ekm-connections-list', 'locations-ekm-connections-patch', 'locations-ekm-connections-set-iam-policy', 'locations-ekm-connections-test-iam-permissions', 'locations-generate-random-bytes', 'locations-get', 'locations-key-rings-create', 'locations-key-rings-crypto-keys-create', 'locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-decrypt', 'locations-key-rings-crypto-keys-crypto-key-versions-asymmetric-sign', 'locations-key-rings-crypto-keys-crypto-key-versions-create', 'locations-key-rings-crypto-keys-crypto-key-versions-destroy', 'locations-key-rings-crypto-keys-crypto-key-versions-get', 'locations-key-rings-crypto-keys-crypto-key-versions-get-public-key', 'locations-key-rings-crypto-keys-crypto-key-versions-import', 'locations-key-rings-crypto-keys-crypto-key-versions-list', 'locations-key-rings-crypto-keys-crypto-key-versions-mac-sign', 'locations-key-rings-crypto-keys-crypto-key-versions-mac-verify', 'locations-key-rings-crypto-keys-crypto-key-versions-patch', 'locations-key-rings-crypto-keys-crypto-key-versions-restore', 'locations-key-rings-crypto-keys-decrypt', 'locations-key-rings-crypto-keys-encrypt', 'locations-key-rings-crypto-keys-get', 'locations-key-rings-crypto-keys-get-iam-policy', 'locations-key-rings-crypto-keys-list', 'locations-key-rings-crypto-keys-patch', 'locations-key-rings-crypto-keys-set-iam-policy', 'locations-key-rings-crypto-keys-test-iam-permissions', 'locations-key-rings-crypto-keys-update-primary-version', 'locations-key-rings-get', 'locations-key-rings-get-iam-policy', 'locations-key-rings-import-jobs-create', 'locations-key-rings-import-jobs-get', 'locations-key-rings-import-jobs-get-iam-policy', 'locations-key-rings-import-jobs-list', 'locations-key-rings-import-jobs-set-iam-policy', 'locations-key-rings-import-jobs-test-iam-permissions', 'locations-key-rings-list', 'locations-key-rings-set-iam-policy', 'locations-key-rings-test-iam-permissions' and 'locations-list'", vec![ + ("locations-ekm-config-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudkms1_cli/projects_locations-ekm-config-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-ekm-config-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudkms1_cli/projects_locations-ekm-config-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-ekm-config-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudkms1_cli/projects_locations-ekm-config-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-ekm-connections-create", Some(r##"Creates a new EkmConnection in a given Project and Location."##), "Details at http://byron.github.io/google-apis-rs/google_cloudkms1_cli/projects_locations-ekm-connections-create", @@ -3771,7 +4086,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3843,7 +4158,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3871,7 +4186,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4401,7 +4716,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4473,7 +4788,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4501,7 +4816,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4579,7 +4894,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4651,7 +4966,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4695,7 +5010,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4723,7 +5038,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4773,7 +5088,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4801,7 +5116,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4851,7 +5166,7 @@ async fn main() { let mut app = App::new("cloudkms1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230106") .about("Manages keys and performs cryptographic operations in a central cloud service, for direct use by other cloud resources and applications. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudkms1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudkms1/Cargo.toml b/gen/cloudkms1/Cargo.toml index b49b870b57..a34d2f3bd5 100644 --- a/gen/cloudkms1/Cargo.toml +++ b/gen/cloudkms1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudkms1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud KMS (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudkms1" homepage = "https://cloud.google.com/kms/" -documentation = "https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-cloudkms1/5.0.2+20230106" license = "MIT" keywords = ["cloudkms", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudkms1/README.md b/gen/cloudkms1/README.md index 5d0fc65f25..6b0b51af84 100644 --- a/gen/cloudkms1/README.md +++ b/gen/cloudkms1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudkms1` library allows access to all features of the *Google Cloud KMS* service. -This documentation was generated from *Cloud KMS* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *cloudkms:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud KMS* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *cloudkms:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud KMS* *v1* API can be found at the [official documentation site](https://cloud.google.com/kms/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/CloudKMS) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/CloudKMS) ... * projects - * [*locations ekm config get iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConfigGetIamPolicyCall), [*locations ekm config set iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConfigSetIamPolicyCall), [*locations ekm config test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConfigTestIamPermissionCall), [*locations ekm connections create*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionCreateCall), [*locations ekm connections get*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionGetCall), [*locations ekm connections get iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionGetIamPolicyCall), [*locations ekm connections list*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionListCall), [*locations ekm connections patch*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionPatchCall), [*locations ekm connections set iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionSetIamPolicyCall), [*locations ekm connections test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionTestIamPermissionCall), [*locations generate random bytes*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationGenerateRandomByteCall), [*locations get*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationGetCall), [*locations key rings create*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCreateCall), [*locations key rings crypto keys create*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCreateCall), [*locations key rings crypto keys crypto key versions asymmetric decrypt*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionAsymmetricDecryptCall), [*locations key rings crypto keys crypto key versions asymmetric sign*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionAsymmetricSignCall), [*locations key rings crypto keys crypto key versions create*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionCreateCall), [*locations key rings crypto keys crypto key versions destroy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionDestroyCall), [*locations key rings crypto keys crypto key versions get*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionGetCall), [*locations key rings crypto keys crypto key versions get public key*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionGetPublicKeyCall), [*locations key rings crypto keys crypto key versions import*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionImportCall), [*locations key rings crypto keys crypto key versions list*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionListCall), [*locations key rings crypto keys crypto key versions mac sign*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionMacSignCall), [*locations key rings crypto keys crypto key versions mac verify*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionMacVerifyCall), [*locations key rings crypto keys crypto key versions patch*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionPatchCall), [*locations key rings crypto keys crypto key versions restore*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionRestoreCall), [*locations key rings crypto keys decrypt*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyDecryptCall), [*locations key rings crypto keys encrypt*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyEncryptCall), [*locations key rings crypto keys get*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyGetCall), [*locations key rings crypto keys get iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyGetIamPolicyCall), [*locations key rings crypto keys list*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyListCall), [*locations key rings crypto keys patch*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyPatchCall), [*locations key rings crypto keys set iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeySetIamPolicyCall), [*locations key rings crypto keys test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyTestIamPermissionCall), [*locations key rings crypto keys update primary version*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyUpdatePrimaryVersionCall), [*locations key rings get*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingGetCall), [*locations key rings get iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingGetIamPolicyCall), [*locations key rings import jobs create*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobCreateCall), [*locations key rings import jobs get*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobGetCall), [*locations key rings import jobs get iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobGetIamPolicyCall), [*locations key rings import jobs list*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobListCall), [*locations key rings import jobs set iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobSetIamPolicyCall), [*locations key rings import jobs test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobTestIamPermissionCall), [*locations key rings list*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingListCall), [*locations key rings set iam policy*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingSetIamPolicyCall), [*locations key rings test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationKeyRingTestIamPermissionCall) and [*locations list*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/api::ProjectLocationListCall) + * [*locations ekm config get iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConfigGetIamPolicyCall), [*locations ekm config set iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConfigSetIamPolicyCall), [*locations ekm config test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConfigTestIamPermissionCall), [*locations ekm connections create*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionCreateCall), [*locations ekm connections get*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionGetCall), [*locations ekm connections get iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionGetIamPolicyCall), [*locations ekm connections list*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionListCall), [*locations ekm connections patch*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionPatchCall), [*locations ekm connections set iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionSetIamPolicyCall), [*locations ekm connections test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationEkmConnectionTestIamPermissionCall), [*locations generate random bytes*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationGenerateRandomByteCall), [*locations get*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationGetCall), [*locations key rings create*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCreateCall), [*locations key rings crypto keys create*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCreateCall), [*locations key rings crypto keys crypto key versions asymmetric decrypt*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionAsymmetricDecryptCall), [*locations key rings crypto keys crypto key versions asymmetric sign*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionAsymmetricSignCall), [*locations key rings crypto keys crypto key versions create*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionCreateCall), [*locations key rings crypto keys crypto key versions destroy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionDestroyCall), [*locations key rings crypto keys crypto key versions get*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionGetCall), [*locations key rings crypto keys crypto key versions get public key*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionGetPublicKeyCall), [*locations key rings crypto keys crypto key versions import*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionImportCall), [*locations key rings crypto keys crypto key versions list*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionListCall), [*locations key rings crypto keys crypto key versions mac sign*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionMacSignCall), [*locations key rings crypto keys crypto key versions mac verify*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionMacVerifyCall), [*locations key rings crypto keys crypto key versions patch*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionPatchCall), [*locations key rings crypto keys crypto key versions restore*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionRestoreCall), [*locations key rings crypto keys decrypt*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyDecryptCall), [*locations key rings crypto keys encrypt*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyEncryptCall), [*locations key rings crypto keys get*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyGetCall), [*locations key rings crypto keys get iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyGetIamPolicyCall), [*locations key rings crypto keys list*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyListCall), [*locations key rings crypto keys patch*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyPatchCall), [*locations key rings crypto keys set iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeySetIamPolicyCall), [*locations key rings crypto keys test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyTestIamPermissionCall), [*locations key rings crypto keys update primary version*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingCryptoKeyUpdatePrimaryVersionCall), [*locations key rings get*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingGetCall), [*locations key rings get iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingGetIamPolicyCall), [*locations key rings import jobs create*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobCreateCall), [*locations key rings import jobs get*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobGetCall), [*locations key rings import jobs get iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobGetIamPolicyCall), [*locations key rings import jobs list*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobListCall), [*locations key rings import jobs set iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobSetIamPolicyCall), [*locations key rings import jobs test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingImportJobTestIamPermissionCall), [*locations key rings list*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingListCall), [*locations key rings set iam policy*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingSetIamPolicyCall), [*locations key rings test iam permissions*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationKeyRingTestIamPermissionCall) and [*locations list*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/api::ProjectLocationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/CloudKMS)** +* **[Hub](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/CloudKMS)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Part)** + * **[Parts](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudkms1/5.0.2-beta-1+20230106/google_cloudkms1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudkms1/5.0.2+20230106/google_cloudkms1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudkms1/src/api.rs b/gen/cloudkms1/src/api.rs index fb298f3415..daee7dd83c 100644 --- a/gen/cloudkms1/src/api.rs +++ b/gen/cloudkms1/src/api.rs @@ -124,7 +124,7 @@ impl<'a, S> CloudKMS { CloudKMS { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudkms.googleapis.com/".to_string(), _root_url: "https://cloudkms.googleapis.com/".to_string(), } @@ -135,7 +135,7 @@ impl<'a, S> CloudKMS { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudkms1/src/client.rs b/gen/cloudkms1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudkms1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudkms1/src/lib.rs b/gen/cloudkms1/src/lib.rs index eed2b2f1aa..9ef9904517 100644 --- a/gen/cloudkms1/src/lib.rs +++ b/gen/cloudkms1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud KMS* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *cloudkms:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud KMS* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *cloudkms:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud KMS* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/kms/). diff --git a/gen/cloudkms1_beta1-cli/Cargo.toml b/gen/cloudkms1_beta1-cli/Cargo.toml index 7a58d17366..a1bbcf735d 100644 --- a/gen/cloudkms1_beta1-cli/Cargo.toml +++ b/gen/cloudkms1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudkms1_beta1-cli" -version = "4.0.1+20170515" +version = "5.0.2+20170515" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud KMS (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudkms1_beta1-cli" @@ -20,13 +20,13 @@ name = "cloudkms1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudkms1_beta1] path = "../cloudkms1_beta1" -version = "4.0.1+20170515" +version = "5.0.2+20170515" + diff --git a/gen/cloudkms1_beta1-cli/README.md b/gen/cloudkms1_beta1-cli/README.md index 4250735411..8a38f0d6bb 100644 --- a/gen/cloudkms1_beta1-cli/README.md +++ b/gen/cloudkms1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud KMS* API at revision *20170515*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud KMS* API at revision *20170515*. The CLI is at version *5.0.2*. ```bash cloudkms1-beta1 [options] diff --git a/gen/cloudkms1_beta1-cli/mkdocs.yml b/gen/cloudkms1_beta1-cli/mkdocs.yml index c08587aa77..5315d41b87 100644 --- a/gen/cloudkms1_beta1-cli/mkdocs.yml +++ b/gen/cloudkms1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud KMS v4.0.1+20170515 +site_name: Cloud KMS v5.0.2+20170515 site_url: http://byron.github.io/google-apis-rs/google-cloudkms1_beta1-cli site_description: A complete library to interact with Cloud KMS (protocol v1beta1) @@ -7,32 +7,33 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudkms1_beta1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-key-rings-create.md', 'Projects', 'Locations Key Rings Create'] -- ['projects_locations-key-rings-crypto-keys-create.md', 'Projects', 'Locations Key Rings Crypto Keys Create'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-create.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Create'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-destroy.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Destroy'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-get.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Get'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-list.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions List'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-patch.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Patch'] -- ['projects_locations-key-rings-crypto-keys-crypto-key-versions-restore.md', 'Projects', 'Locations Key Rings Crypto Keys Crypto Key Versions Restore'] -- ['projects_locations-key-rings-crypto-keys-decrypt.md', 'Projects', 'Locations Key Rings Crypto Keys Decrypt'] -- ['projects_locations-key-rings-crypto-keys-encrypt.md', 'Projects', 'Locations Key Rings Crypto Keys Encrypt'] -- ['projects_locations-key-rings-crypto-keys-get.md', 'Projects', 'Locations Key Rings Crypto Keys Get'] -- ['projects_locations-key-rings-crypto-keys-get-iam-policy.md', 'Projects', 'Locations Key Rings Crypto Keys Get Iam Policy'] -- ['projects_locations-key-rings-crypto-keys-list.md', 'Projects', 'Locations Key Rings Crypto Keys List'] -- ['projects_locations-key-rings-crypto-keys-patch.md', 'Projects', 'Locations Key Rings Crypto Keys Patch'] -- ['projects_locations-key-rings-crypto-keys-set-iam-policy.md', 'Projects', 'Locations Key Rings Crypto Keys Set Iam Policy'] -- ['projects_locations-key-rings-crypto-keys-test-iam-permissions.md', 'Projects', 'Locations Key Rings Crypto Keys Test Iam Permissions'] -- ['projects_locations-key-rings-crypto-keys-update-primary-version.md', 'Projects', 'Locations Key Rings Crypto Keys Update Primary Version'] -- ['projects_locations-key-rings-get.md', 'Projects', 'Locations Key Rings Get'] -- ['projects_locations-key-rings-get-iam-policy.md', 'Projects', 'Locations Key Rings Get Iam Policy'] -- ['projects_locations-key-rings-list.md', 'Projects', 'Locations Key Rings List'] -- ['projects_locations-key-rings-set-iam-policy.md', 'Projects', 'Locations Key Rings Set Iam Policy'] -- ['projects_locations-key-rings-test-iam-permissions.md', 'Projects', 'Locations Key Rings Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Key Rings Create': 'projects_locations-key-rings-create.md' + - 'Locations Key Rings Crypto Keys Create': 'projects_locations-key-rings-crypto-keys-create.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Create': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-create.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Destroy': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-destroy.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Get': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-get.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions List': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-list.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Patch': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-patch.md' + - 'Locations Key Rings Crypto Keys Crypto Key Versions Restore': 'projects_locations-key-rings-crypto-keys-crypto-key-versions-restore.md' + - 'Locations Key Rings Crypto Keys Decrypt': 'projects_locations-key-rings-crypto-keys-decrypt.md' + - 'Locations Key Rings Crypto Keys Encrypt': 'projects_locations-key-rings-crypto-keys-encrypt.md' + - 'Locations Key Rings Crypto Keys Get': 'projects_locations-key-rings-crypto-keys-get.md' + - 'Locations Key Rings Crypto Keys Get Iam Policy': 'projects_locations-key-rings-crypto-keys-get-iam-policy.md' + - 'Locations Key Rings Crypto Keys List': 'projects_locations-key-rings-crypto-keys-list.md' + - 'Locations Key Rings Crypto Keys Patch': 'projects_locations-key-rings-crypto-keys-patch.md' + - 'Locations Key Rings Crypto Keys Set Iam Policy': 'projects_locations-key-rings-crypto-keys-set-iam-policy.md' + - 'Locations Key Rings Crypto Keys Test Iam Permissions': 'projects_locations-key-rings-crypto-keys-test-iam-permissions.md' + - 'Locations Key Rings Crypto Keys Update Primary Version': 'projects_locations-key-rings-crypto-keys-update-primary-version.md' + - 'Locations Key Rings Get': 'projects_locations-key-rings-get.md' + - 'Locations Key Rings Get Iam Policy': 'projects_locations-key-rings-get-iam-policy.md' + - 'Locations Key Rings List': 'projects_locations-key-rings-list.md' + - 'Locations Key Rings Set Iam Policy': 'projects_locations-key-rings-set-iam-policy.md' + - 'Locations Key Rings Test Iam Permissions': 'projects_locations-key-rings-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/cloudkms1_beta1-cli/src/client.rs b/gen/cloudkms1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudkms1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudkms1_beta1-cli/src/main.rs b/gen/cloudkms1_beta1-cli/src/main.rs index 4f8eac8232..acf5f14c9d 100644 --- a/gen/cloudkms1_beta1-cli/src/main.rs +++ b/gen/cloudkms1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudkms1_beta1::{api, Error, oauth2}; +use google_cloudkms1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -526,7 +525,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -619,7 +618,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1038,7 +1037,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1136,7 +1135,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1557,7 +1556,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1789,7 +1788,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2686,7 +2685,7 @@ async fn main() { let mut app = App::new("cloudkms1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20170515") + .version("5.0.2+20170515") .about("Manages encryption for your cloud services the same way you do on-premise. You can generate, use, rotate, and destroy AES256 encryption keys.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudkms1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudkms1_beta1/Cargo.toml b/gen/cloudkms1_beta1/Cargo.toml index b4e3c2306b..8a20ebadc5 100644 --- a/gen/cloudkms1_beta1/Cargo.toml +++ b/gen/cloudkms1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudkms1_beta1" -version = "5.0.2-beta-1+20170515" +version = "5.0.2+20170515" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud KMS (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudkms1_beta1" homepage = "https://cloud.google.com/kms/" -documentation = "https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515" +documentation = "https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515" license = "MIT" keywords = ["cloudkms", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudkms1_beta1/README.md b/gen/cloudkms1_beta1/README.md index 4476eb0cf7..914ba8b8cc 100644 --- a/gen/cloudkms1_beta1/README.md +++ b/gen/cloudkms1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudkms1_beta1` library allows access to all features of the *Google Cloud KMS* service. -This documentation was generated from *Cloud KMS* crate version *5.0.2-beta-1+20170515*, where *20170515* is the exact revision of the *cloudkms:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud KMS* crate version *5.0.2+20170515*, where *20170515* is the exact revision of the *cloudkms:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud KMS* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/kms/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/CloudKMS) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/CloudKMS) ... * projects - * [*locations get*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationGetCall), [*locations key rings create*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCreateCall), [*locations key rings crypto keys create*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCreateCall), [*locations key rings crypto keys crypto key versions create*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionCreateCall), [*locations key rings crypto keys crypto key versions destroy*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionDestroyCall), [*locations key rings crypto keys crypto key versions get*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionGetCall), [*locations key rings crypto keys crypto key versions list*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionListCall), [*locations key rings crypto keys crypto key versions patch*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionPatchCall), [*locations key rings crypto keys crypto key versions restore*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionRestoreCall), [*locations key rings crypto keys decrypt*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyDecryptCall), [*locations key rings crypto keys encrypt*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyEncryptCall), [*locations key rings crypto keys get*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyGetCall), [*locations key rings crypto keys get iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyGetIamPolicyCall), [*locations key rings crypto keys list*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyListCall), [*locations key rings crypto keys patch*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyPatchCall), [*locations key rings crypto keys set iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeySetIamPolicyCall), [*locations key rings crypto keys test iam permissions*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyTestIamPermissionCall), [*locations key rings crypto keys update primary version*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyUpdatePrimaryVersionCall), [*locations key rings get*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingGetCall), [*locations key rings get iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingGetIamPolicyCall), [*locations key rings list*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingListCall), [*locations key rings set iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingSetIamPolicyCall), [*locations key rings test iam permissions*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingTestIamPermissionCall) and [*locations list*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/api::ProjectLocationListCall) + * [*locations get*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationGetCall), [*locations key rings create*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCreateCall), [*locations key rings crypto keys create*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCreateCall), [*locations key rings crypto keys crypto key versions create*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionCreateCall), [*locations key rings crypto keys crypto key versions destroy*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionDestroyCall), [*locations key rings crypto keys crypto key versions get*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionGetCall), [*locations key rings crypto keys crypto key versions list*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionListCall), [*locations key rings crypto keys crypto key versions patch*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionPatchCall), [*locations key rings crypto keys crypto key versions restore*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyCryptoKeyVersionRestoreCall), [*locations key rings crypto keys decrypt*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyDecryptCall), [*locations key rings crypto keys encrypt*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyEncryptCall), [*locations key rings crypto keys get*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyGetCall), [*locations key rings crypto keys get iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyGetIamPolicyCall), [*locations key rings crypto keys list*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyListCall), [*locations key rings crypto keys patch*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyPatchCall), [*locations key rings crypto keys set iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeySetIamPolicyCall), [*locations key rings crypto keys test iam permissions*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyTestIamPermissionCall), [*locations key rings crypto keys update primary version*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingCryptoKeyUpdatePrimaryVersionCall), [*locations key rings get*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingGetCall), [*locations key rings get iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingGetIamPolicyCall), [*locations key rings list*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingListCall), [*locations key rings set iam policy*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingSetIamPolicyCall), [*locations key rings test iam permissions*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationKeyRingTestIamPermissionCall) and [*locations list*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/api::ProjectLocationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/CloudKMS)** +* **[Hub](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/CloudKMS)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudkms1_beta1/5.0.2-beta-1+20170515/google_cloudkms1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudkms1_beta1/5.0.2+20170515/google_cloudkms1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudkms1_beta1/src/api.rs b/gen/cloudkms1_beta1/src/api.rs index 596b7bce6c..a582197a95 100644 --- a/gen/cloudkms1_beta1/src/api.rs +++ b/gen/cloudkms1_beta1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudKMS { CloudKMS { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudkms.googleapis.com/".to_string(), _root_url: "https://cloudkms.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudKMS { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudkms1_beta1/src/client.rs b/gen/cloudkms1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudkms1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudkms1_beta1/src/lib.rs b/gen/cloudkms1_beta1/src/lib.rs index 6f62324354..90bea1469c 100644 --- a/gen/cloudkms1_beta1/src/lib.rs +++ b/gen/cloudkms1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud KMS* crate version *5.0.2-beta-1+20170515*, where *20170515* is the exact revision of the *cloudkms:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud KMS* crate version *5.0.2+20170515*, where *20170515* is the exact revision of the *cloudkms:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud KMS* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/kms/). diff --git a/gen/cloudlatencytest2-cli/Cargo.toml b/gen/cloudlatencytest2-cli/Cargo.toml index 6412276b60..7f7f7b9af1 100644 --- a/gen/cloudlatencytest2-cli/Cargo.toml +++ b/gen/cloudlatencytest2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudlatencytest2-cli" -version = "4.0.1+20160309" +version = "5.0.2+20160309" authors = ["Sebastian Thiel "] description = "A complete library to interact with cloudlatencytest (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudlatencytest2-cli" @@ -19,13 +19,13 @@ name = "cloudlatencytest2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -35,7 +35,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudlatencytest2] path = "../cloudlatencytest2" -version = "4.0.1+20160309" +version = "5.0.2+20160309" + diff --git a/gen/cloudlatencytest2-cli/README.md b/gen/cloudlatencytest2-cli/README.md index 8ffafdea86..34d27fc6b2 100644 --- a/gen/cloudlatencytest2-cli/README.md +++ b/gen/cloudlatencytest2-cli/README.md @@ -22,7 +22,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *cloudlatencytest* API at revision *20160309*. The CLI is at version *4.0.1*. +This documentation was generated from the *cloudlatencytest* API at revision *20160309*. The CLI is at version *5.0.2*. ```bash cloudlatencytest2 [options] diff --git a/gen/cloudlatencytest2-cli/mkdocs.yml b/gen/cloudlatencytest2-cli/mkdocs.yml index 716b33f29d..3a4e840da3 100644 --- a/gen/cloudlatencytest2-cli/mkdocs.yml +++ b/gen/cloudlatencytest2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: cloudlatencytest v4.0.1+20160309 +site_name: cloudlatencytest v5.0.2+20160309 site_url: http://byron.github.io/google-apis-rs/google-cloudlatencytest2-cli site_description: A complete library to interact with cloudlatencytest (protocol v2) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudlatencytest docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['statscollection_updateaggregatedstats.md', 'Statscollection', 'Updateaggregatedstats'] -- ['statscollection_updatestats.md', 'Statscollection', 'Updatestats'] +nav: +- Home: 'index.md' +- 'Statscollection': + - 'Updateaggregatedstats': 'statscollection_updateaggregatedstats.md' + - 'Updatestats': 'statscollection_updatestats.md' theme: readthedocs diff --git a/gen/cloudlatencytest2-cli/src/client.rs b/gen/cloudlatencytest2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudlatencytest2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudlatencytest2-cli/src/main.rs b/gen/cloudlatencytest2-cli/src/main.rs index d467d03109..922e76dd2c 100644 --- a/gen/cloudlatencytest2-cli/src/main.rs +++ b/gen/cloudlatencytest2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudlatencytest2::{api, Error, oauth2}; +use google_cloudlatencytest2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -360,7 +359,7 @@ async fn main() { let mut app = App::new("cloudlatencytest2") .author("Sebastian Thiel ") - .version("4.0.1+20160309") + .version("5.0.2+20160309") .about("Reports latency data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudlatencytest2_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudlatencytest2/Cargo.toml b/gen/cloudlatencytest2/Cargo.toml index 6903b509d0..7bc534bfa3 100644 --- a/gen/cloudlatencytest2/Cargo.toml +++ b/gen/cloudlatencytest2/Cargo.toml @@ -4,11 +4,11 @@ [package] name = "google-cloudlatencytest2" -version = "5.0.2-beta-1+20160309" +version = "5.0.2+20160309" authors = ["Sebastian Thiel "] description = "A complete library to interact with cloudlatencytest (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudlatencytest2" -documentation = "https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309" +documentation = "https://docs.rs/google-cloudlatencytest2/5.0.2+20160309" license = "MIT" keywords = ["cloudlatencytest", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudlatencytest2/README.md b/gen/cloudlatencytest2/README.md index 21c2ec1db7..d16eb29708 100644 --- a/gen/cloudlatencytest2/README.md +++ b/gen/cloudlatencytest2/README.md @@ -5,13 +5,13 @@ DO NOT EDIT ! --> The `google-cloudlatencytest2` library allows access to all features of the *Google cloudlatencytest* service. -This documentation was generated from *cloudlatencytest* crate version *5.0.2-beta-1+20160309*, where *20160309* is the exact revision of the *cloudlatencytest:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *cloudlatencytest* crate version *5.0.2+20160309*, where *20160309* is the exact revision of the *cloudlatencytest:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/Cloudlatencytest) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/Cloudlatencytest) ... * statscollection - * [*updateaggregatedstats*](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/api::StatscollectionUpdateaggregatedstatCall) and [*updatestats*](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/api::StatscollectionUpdatestatCall) + * [*updateaggregatedstats*](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/api::StatscollectionUpdateaggregatedstatCall) and [*updatestats*](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/api::StatscollectionUpdatestatCall) @@ -20,17 +20,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/Cloudlatencytest)** +* **[Hub](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/Cloudlatencytest)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Part)** + * **[Parts](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -121,17 +121,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -141,29 +141,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::RequestValue) and -[decodable](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::RequestValue) and +[decodable](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudlatencytest2/5.0.2-beta-1+20160309/google_cloudlatencytest2/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudlatencytest2/5.0.2+20160309/google_cloudlatencytest2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudlatencytest2/src/api.rs b/gen/cloudlatencytest2/src/api.rs index 9fb251e600..975bff3ce2 100644 --- a/gen/cloudlatencytest2/src/api.rs +++ b/gen/cloudlatencytest2/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Cloudlatencytest { Cloudlatencytest { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudlatencytest-pa.googleapis.com/v2/statscollection/".to_string(), _root_url: "https://cloudlatencytest-pa.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Cloudlatencytest { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudlatencytest2/src/client.rs b/gen/cloudlatencytest2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudlatencytest2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudlatencytest2/src/lib.rs b/gen/cloudlatencytest2/src/lib.rs index 1b8f85e05d..9ccaee22e9 100644 --- a/gen/cloudlatencytest2/src/lib.rs +++ b/gen/cloudlatencytest2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *cloudlatencytest* crate version *5.0.2-beta-1+20160309*, where *20160309* is the exact revision of the *cloudlatencytest:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *cloudlatencytest* crate version *5.0.2+20160309*, where *20160309* is the exact revision of the *cloudlatencytest:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/main/gen/cloudlatencytest2). //! # Features //! diff --git a/gen/cloudmonitoring2_beta2-cli/Cargo.toml b/gen/cloudmonitoring2_beta2-cli/Cargo.toml index 632d5d02f4..43b32f179e 100644 --- a/gen/cloudmonitoring2_beta2-cli/Cargo.toml +++ b/gen/cloudmonitoring2_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudmonitoring2_beta2-cli" -version = "4.0.1+20170501" +version = "5.0.2+20170501" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Monitoring (protocol v2beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudmonitoring2_beta2-cli" @@ -20,13 +20,13 @@ name = "cloudmonitoring2-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudmonitoring2_beta2] path = "../cloudmonitoring2_beta2" -version = "4.0.1+20170501" +version = "5.0.2+20170501" + diff --git a/gen/cloudmonitoring2_beta2-cli/README.md b/gen/cloudmonitoring2_beta2-cli/README.md index fadc50772d..b4986d1b10 100644 --- a/gen/cloudmonitoring2_beta2-cli/README.md +++ b/gen/cloudmonitoring2_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Monitoring* API at revision *20170501*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Monitoring* API at revision *20170501*. The CLI is at version *5.0.2*. ```bash cloudmonitoring2-beta2 [options] diff --git a/gen/cloudmonitoring2_beta2-cli/mkdocs.yml b/gen/cloudmonitoring2_beta2-cli/mkdocs.yml index 8eba2e4849..86174bff83 100644 --- a/gen/cloudmonitoring2_beta2-cli/mkdocs.yml +++ b/gen/cloudmonitoring2_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Monitoring v4.0.1+20170501 +site_name: Cloud Monitoring v5.0.2+20170501 site_url: http://byron.github.io/google-apis-rs/google-cloudmonitoring2_beta2-cli site_description: A complete library to interact with Cloud Monitoring (protocol v2beta2) @@ -7,14 +7,17 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudmonitoring2 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['metric-descriptors_create.md', 'Metric Descriptors', 'Create'] -- ['metric-descriptors_delete.md', 'Metric Descriptors', 'Delete'] -- ['metric-descriptors_list.md', 'Metric Descriptors', 'List'] -- ['timeseries_list.md', 'Timeseries', 'List'] -- ['timeseries_write.md', 'Timeseries', 'Write'] -- ['timeseries-descriptors_list.md', 'Timeseries Descriptors', 'List'] +nav: +- Home: 'index.md' +- 'Metric Descriptors': + - 'Create': 'metric-descriptors_create.md' + - 'Delete': 'metric-descriptors_delete.md' + - 'List': 'metric-descriptors_list.md' +- 'Timeseries': + - 'List': 'timeseries_list.md' + - 'Write': 'timeseries_write.md' +- 'Timeseries Descriptors': + - 'List': 'timeseries-descriptors_list.md' theme: readthedocs diff --git a/gen/cloudmonitoring2_beta2-cli/src/client.rs b/gen/cloudmonitoring2_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudmonitoring2_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudmonitoring2_beta2-cli/src/main.rs b/gen/cloudmonitoring2_beta2-cli/src/main.rs index 2af3f806a4..3c1783ea7b 100644 --- a/gen/cloudmonitoring2_beta2-cli/src/main.rs +++ b/gen/cloudmonitoring2_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudmonitoring2_beta2::{api, Error, oauth2}; +use google_cloudmonitoring2_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -238,7 +237,7 @@ where call = call.page_token(value.unwrap_or("")); }, "count" => { - call = call.count(arg_from_str(value.unwrap_or("-0"), err, "count", "integer")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -342,7 +341,7 @@ where call = call.add_labels(value.unwrap_or("")); }, "count" => { - call = call.count(arg_from_str(value.unwrap_or("-0"), err, "count", "integer")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int32")).unwrap_or(-0)); }, "aggregator" => { call = call.aggregator(value.unwrap_or("")); @@ -534,7 +533,7 @@ where call = call.add_labels(value.unwrap_or("")); }, "count" => { - call = call.count(arg_from_str(value.unwrap_or("-0"), err, "count", "integer")); + call = call.count( value.map(|v| arg_from_str(v, err, "count", "int32")).unwrap_or(-0)); }, "aggregator" => { call = call.aggregator(value.unwrap_or("")); @@ -908,7 +907,7 @@ async fn main() { let mut app = App::new("cloudmonitoring2-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20170501") + .version("5.0.2+20170501") .about("Accesses Google Cloud Monitoring data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudmonitoring2_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudmonitoring2_beta2/Cargo.toml b/gen/cloudmonitoring2_beta2/Cargo.toml index c2d625655c..42b6b3a388 100644 --- a/gen/cloudmonitoring2_beta2/Cargo.toml +++ b/gen/cloudmonitoring2_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudmonitoring2_beta2" -version = "5.0.2-beta-1+20170501" +version = "5.0.2+20170501" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Monitoring (protocol v2beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudmonitoring2_beta2" homepage = "https://cloud.google.com/monitoring/v2beta2/" -documentation = "https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501" +documentation = "https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501" license = "MIT" keywords = ["cloudmonitoring", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudmonitoring2_beta2/README.md b/gen/cloudmonitoring2_beta2/README.md index 8fbc72ee02..9df74756ae 100644 --- a/gen/cloudmonitoring2_beta2/README.md +++ b/gen/cloudmonitoring2_beta2/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-cloudmonitoring2_beta2` library allows access to all features of the *Google Cloud Monitoring* service. -This documentation was generated from *Cloud Monitoring* crate version *5.0.2-beta-1+20170501*, where *20170501* is the exact revision of the *cloudmonitoring:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Monitoring* crate version *5.0.2+20170501*, where *20170501* is the exact revision of the *cloudmonitoring:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Monitoring* *v2_beta2* API can be found at the [official documentation site](https://cloud.google.com/monitoring/v2beta2/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/CloudMonitoring) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/CloudMonitoring) ... -* [metric descriptors](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptor) - * [*create*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptorCreateCall), [*delete*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptorDeleteCall) and [*list*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptorListCall) +* [metric descriptors](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptor) + * [*create*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptorCreateCall), [*delete*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptorDeleteCall) and [*list*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::MetricDescriptorListCall) * timeseries - * [*list*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::TimeseryListCall) and [*write*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::TimeseryWriteCall) -* [timeseries descriptors](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::TimeseriesDescriptor) - * [*list*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/api::TimeseriesDescriptorListCall) + * [*list*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::TimeseryListCall) and [*write*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::TimeseryWriteCall) +* [timeseries descriptors](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::TimeseriesDescriptor) + * [*list*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/api::TimeseriesDescriptorListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/CloudMonitoring)** +* **[Hub](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/CloudMonitoring)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Part)** + * **[Parts](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2-beta-1+20170501/google_cloudmonitoring2_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudmonitoring2_beta2/5.0.2+20170501/google_cloudmonitoring2_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudmonitoring2_beta2/src/api.rs b/gen/cloudmonitoring2_beta2/src/api.rs index 88d530193e..11f79fc2fa 100644 --- a/gen/cloudmonitoring2_beta2/src/api.rs +++ b/gen/cloudmonitoring2_beta2/src/api.rs @@ -132,7 +132,7 @@ impl<'a, S> CloudMonitoring { CloudMonitoring { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/cloudmonitoring/v2beta2/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> CloudMonitoring { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudmonitoring2_beta2/src/client.rs b/gen/cloudmonitoring2_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudmonitoring2_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudmonitoring2_beta2/src/lib.rs b/gen/cloudmonitoring2_beta2/src/lib.rs index 83bb21a4b7..b94856481f 100644 --- a/gen/cloudmonitoring2_beta2/src/lib.rs +++ b/gen/cloudmonitoring2_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Monitoring* crate version *5.0.2-beta-1+20170501*, where *20170501* is the exact revision of the *cloudmonitoring:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Monitoring* crate version *5.0.2+20170501*, where *20170501* is the exact revision of the *cloudmonitoring:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Monitoring* *v2_beta2* API can be found at the //! [official documentation site](https://cloud.google.com/monitoring/v2beta2/). diff --git a/gen/cloudprivatecatalog1_beta1-cli/Cargo.toml b/gen/cloudprivatecatalog1_beta1-cli/Cargo.toml index f442d3e783..c7e811369d 100644 --- a/gen/cloudprivatecatalog1_beta1-cli/Cargo.toml +++ b/gen/cloudprivatecatalog1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudprivatecatalog1_beta1-cli" -version = "4.0.1+20200405" +version = "5.0.2+20200405" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Private Catalog (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprivatecatalog1_beta1-cli" @@ -20,13 +20,13 @@ name = "cloudprivatecatalog1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudprivatecatalog1_beta1] path = "../cloudprivatecatalog1_beta1" -version = "4.0.1+20200405" +version = "5.0.2+20200405" + diff --git a/gen/cloudprivatecatalog1_beta1-cli/README.md b/gen/cloudprivatecatalog1_beta1-cli/README.md index aab59f4a16..81c7f8d950 100644 --- a/gen/cloudprivatecatalog1_beta1-cli/README.md +++ b/gen/cloudprivatecatalog1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Private Catalog* API at revision *20200405*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Private Catalog* API at revision *20200405*. The CLI is at version *5.0.2*. ```bash cloudprivatecatalog1-beta1 [options] diff --git a/gen/cloudprivatecatalog1_beta1-cli/mkdocs.yml b/gen/cloudprivatecatalog1_beta1-cli/mkdocs.yml index 255e6dc442..06d59bc187 100644 --- a/gen/cloudprivatecatalog1_beta1-cli/mkdocs.yml +++ b/gen/cloudprivatecatalog1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Private Catalog v4.0.1+20200405 +site_name: Cloud Private Catalog v5.0.2+20200405 site_url: http://byron.github.io/google-apis-rs/google-cloudprivatecatalog1_beta1-cli site_description: A complete library to interact with Cloud Private Catalog (protocol v1beta1) @@ -7,17 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprivatecata docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_catalogs-search.md', 'Folders', 'Catalogs Search'] -- ['folders_products-search.md', 'Folders', 'Products Search'] -- ['folders_versions-search.md', 'Folders', 'Versions Search'] -- ['organizations_catalogs-search.md', 'Organizations', 'Catalogs Search'] -- ['organizations_products-search.md', 'Organizations', 'Products Search'] -- ['organizations_versions-search.md', 'Organizations', 'Versions Search'] -- ['projects_catalogs-search.md', 'Projects', 'Catalogs Search'] -- ['projects_products-search.md', 'Projects', 'Products Search'] -- ['projects_versions-search.md', 'Projects', 'Versions Search'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Catalogs Search': 'folders_catalogs-search.md' + - 'Products Search': 'folders_products-search.md' + - 'Versions Search': 'folders_versions-search.md' +- 'Organizations': + - 'Catalogs Search': 'organizations_catalogs-search.md' + - 'Products Search': 'organizations_products-search.md' + - 'Versions Search': 'organizations_versions-search.md' +- 'Projects': + - 'Catalogs Search': 'projects_catalogs-search.md' + - 'Products Search': 'projects_products-search.md' + - 'Versions Search': 'projects_versions-search.md' theme: readthedocs diff --git a/gen/cloudprivatecatalog1_beta1-cli/src/client.rs b/gen/cloudprivatecatalog1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudprivatecatalog1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudprivatecatalog1_beta1-cli/src/main.rs b/gen/cloudprivatecatalog1_beta1-cli/src/main.rs index 483ae3f343..4e37505e40 100644 --- a/gen/cloudprivatecatalog1_beta1-cli/src/main.rs +++ b/gen/cloudprivatecatalog1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudprivatecatalog1_beta1::{api, Error, oauth2}; +use google_cloudprivatecatalog1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -64,7 +63,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -126,7 +125,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -188,7 +187,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -250,7 +249,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -312,7 +311,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -374,7 +373,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -436,7 +435,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -498,7 +497,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -560,7 +559,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -976,7 +975,7 @@ async fn main() { let mut app = App::new("cloudprivatecatalog1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20200405") + .version("5.0.2+20200405") .about("Enable cloud users to discover enterprise catalogs and products in their organizations.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudprivatecatalog1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudprivatecatalog1_beta1/Cargo.toml b/gen/cloudprivatecatalog1_beta1/Cargo.toml index 75ab3e83f5..9bd7fc619f 100644 --- a/gen/cloudprivatecatalog1_beta1/Cargo.toml +++ b/gen/cloudprivatecatalog1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudprivatecatalog1_beta1" -version = "5.0.2-beta-1+20200405" +version = "5.0.2+20200405" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Private Catalog (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprivatecatalog1_beta1" homepage = "https://cloud.google.com/private-catalog/" -documentation = "https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405" +documentation = "https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405" license = "MIT" keywords = ["cloudprivatecatalog", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudprivatecatalog1_beta1/README.md b/gen/cloudprivatecatalog1_beta1/README.md index 2dafd55df4..b0c05aba17 100644 --- a/gen/cloudprivatecatalog1_beta1/README.md +++ b/gen/cloudprivatecatalog1_beta1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-cloudprivatecatalog1_beta1` library allows access to all features of the *Google Cloud Private Catalog* service. -This documentation was generated from *Cloud Private Catalog* crate version *5.0.2-beta-1+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Private Catalog* crate version *5.0.2+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Private Catalog* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/private-catalog/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/CloudPrivateCatalog) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/CloudPrivateCatalog) ... * folders - * [*catalogs search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::FolderCatalogSearchCall), [*products search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::FolderProductSearchCall) and [*versions search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::FolderVersionSearchCall) + * [*catalogs search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::FolderCatalogSearchCall), [*products search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::FolderProductSearchCall) and [*versions search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::FolderVersionSearchCall) * organizations - * [*catalogs search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::OrganizationCatalogSearchCall), [*products search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::OrganizationProductSearchCall) and [*versions search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::OrganizationVersionSearchCall) + * [*catalogs search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::OrganizationCatalogSearchCall), [*products search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::OrganizationProductSearchCall) and [*versions search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::OrganizationVersionSearchCall) * projects - * [*catalogs search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::ProjectCatalogSearchCall), [*products search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::ProjectProductSearchCall) and [*versions search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/api::ProjectVersionSearchCall) + * [*catalogs search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::ProjectCatalogSearchCall), [*products search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::ProjectProductSearchCall) and [*versions search*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/api::ProjectVersionSearchCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/CloudPrivateCatalog)** +* **[Hub](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/CloudPrivateCatalog)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalog1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudprivatecatalog1_beta1/5.0.2+20200405/google_cloudprivatecatalog1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudprivatecatalog1_beta1/src/api.rs b/gen/cloudprivatecatalog1_beta1/src/api.rs index 2e947277e0..86f6c46652 100644 --- a/gen/cloudprivatecatalog1_beta1/src/api.rs +++ b/gen/cloudprivatecatalog1_beta1/src/api.rs @@ -122,7 +122,7 @@ impl<'a, S> CloudPrivateCatalog { CloudPrivateCatalog { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudprivatecatalog.googleapis.com/".to_string(), _root_url: "https://cloudprivatecatalog.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> CloudPrivateCatalog { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudprivatecatalog1_beta1/src/client.rs b/gen/cloudprivatecatalog1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudprivatecatalog1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudprivatecatalog1_beta1/src/lib.rs b/gen/cloudprivatecatalog1_beta1/src/lib.rs index 16ebd7ff15..667c00c667 100644 --- a/gen/cloudprivatecatalog1_beta1/src/lib.rs +++ b/gen/cloudprivatecatalog1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Private Catalog* crate version *5.0.2-beta-1+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Private Catalog* crate version *5.0.2+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Private Catalog* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/private-catalog/). diff --git a/gen/cloudprivatecatalogproducer1_beta1-cli/Cargo.toml b/gen/cloudprivatecatalogproducer1_beta1-cli/Cargo.toml index ee37979b38..cdb381a7fb 100644 --- a/gen/cloudprivatecatalogproducer1_beta1-cli/Cargo.toml +++ b/gen/cloudprivatecatalogproducer1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudprivatecatalogproducer1_beta1-cli" -version = "4.0.1+20200405" +version = "5.0.2+20200405" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud PrivateCatalog Producer (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprivatecatalogproducer1_beta1-cli" @@ -20,13 +20,13 @@ name = "cloudprivatecatalogproducer1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudprivatecatalogproducer1_beta1] path = "../cloudprivatecatalogproducer1_beta1" -version = "4.0.1+20200405" +version = "5.0.2+20200405" + diff --git a/gen/cloudprivatecatalogproducer1_beta1-cli/README.md b/gen/cloudprivatecatalogproducer1_beta1-cli/README.md index 70cd927ecd..9e38012716 100644 --- a/gen/cloudprivatecatalogproducer1_beta1-cli/README.md +++ b/gen/cloudprivatecatalogproducer1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud PrivateCatalog Producer* API at revision *20200405*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud PrivateCatalog Producer* API at revision *20200405*. The CLI is at version *5.0.2*. ```bash cloudprivatecatalogproducer1-beta1 [options] diff --git a/gen/cloudprivatecatalogproducer1_beta1-cli/mkdocs.yml b/gen/cloudprivatecatalogproducer1_beta1-cli/mkdocs.yml index 457681fe18..ba13122156 100644 --- a/gen/cloudprivatecatalogproducer1_beta1-cli/mkdocs.yml +++ b/gen/cloudprivatecatalogproducer1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud PrivateCatalog Producer v4.0.1+20200405 +site_name: Cloud PrivateCatalog Producer v5.0.2+20200405 site_url: http://byron.github.io/google-apis-rs/google-cloudprivatecatalogproducer1_beta1-cli site_description: A complete library to interact with Cloud PrivateCatalog Producer (protocol v1beta1) @@ -7,37 +7,39 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprivatecata docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['catalogs_associations-create.md', 'Catalogs', 'Associations Create'] -- ['catalogs_associations-delete.md', 'Catalogs', 'Associations Delete'] -- ['catalogs_associations-get.md', 'Catalogs', 'Associations Get'] -- ['catalogs_associations-list.md', 'Catalogs', 'Associations List'] -- ['catalogs_create.md', 'Catalogs', 'Create'] -- ['catalogs_delete.md', 'Catalogs', 'Delete'] -- ['catalogs_get.md', 'Catalogs', 'Get'] -- ['catalogs_get-iam-policy.md', 'Catalogs', 'Get Iam Policy'] -- ['catalogs_list.md', 'Catalogs', 'List'] -- ['catalogs_patch.md', 'Catalogs', 'Patch'] -- ['catalogs_products-copy.md', 'Catalogs', 'Products Copy'] -- ['catalogs_products-create.md', 'Catalogs', 'Products Create'] -- ['catalogs_products-delete.md', 'Catalogs', 'Products Delete'] -- ['catalogs_products-get.md', 'Catalogs', 'Products Get'] -- ['catalogs_products-icons-upload.md', 'Catalogs', 'Products Icons Upload'] -- ['catalogs_products-list.md', 'Catalogs', 'Products List'] -- ['catalogs_products-patch.md', 'Catalogs', 'Products Patch'] -- ['catalogs_products-versions-create.md', 'Catalogs', 'Products Versions Create'] -- ['catalogs_products-versions-delete.md', 'Catalogs', 'Products Versions Delete'] -- ['catalogs_products-versions-get.md', 'Catalogs', 'Products Versions Get'] -- ['catalogs_products-versions-list.md', 'Catalogs', 'Products Versions List'] -- ['catalogs_products-versions-patch.md', 'Catalogs', 'Products Versions Patch'] -- ['catalogs_set-iam-policy.md', 'Catalogs', 'Set Iam Policy'] -- ['catalogs_test-iam-permissions.md', 'Catalogs', 'Test Iam Permissions'] -- ['catalogs_undelete.md', 'Catalogs', 'Undelete'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] +nav: +- Home: 'index.md' +- 'Catalogs': + - 'Associations Create': 'catalogs_associations-create.md' + - 'Associations Delete': 'catalogs_associations-delete.md' + - 'Associations Get': 'catalogs_associations-get.md' + - 'Associations List': 'catalogs_associations-list.md' + - 'Create': 'catalogs_create.md' + - 'Delete': 'catalogs_delete.md' + - 'Get': 'catalogs_get.md' + - 'Get Iam Policy': 'catalogs_get-iam-policy.md' + - 'List': 'catalogs_list.md' + - 'Patch': 'catalogs_patch.md' + - 'Products Copy': 'catalogs_products-copy.md' + - 'Products Create': 'catalogs_products-create.md' + - 'Products Delete': 'catalogs_products-delete.md' + - 'Products Get': 'catalogs_products-get.md' + - 'Products Icons Upload': 'catalogs_products-icons-upload.md' + - 'Products List': 'catalogs_products-list.md' + - 'Products Patch': 'catalogs_products-patch.md' + - 'Products Versions Create': 'catalogs_products-versions-create.md' + - 'Products Versions Delete': 'catalogs_products-versions-delete.md' + - 'Products Versions Get': 'catalogs_products-versions-get.md' + - 'Products Versions List': 'catalogs_products-versions-list.md' + - 'Products Versions Patch': 'catalogs_products-versions-patch.md' + - 'Set Iam Policy': 'catalogs_set-iam-policy.md' + - 'Test Iam Permissions': 'catalogs_test-iam-permissions.md' + - 'Undelete': 'catalogs_undelete.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' theme: readthedocs diff --git a/gen/cloudprivatecatalogproducer1_beta1-cli/src/client.rs b/gen/cloudprivatecatalogproducer1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudprivatecatalogproducer1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudprivatecatalogproducer1_beta1-cli/src/main.rs b/gen/cloudprivatecatalogproducer1_beta1-cli/src/main.rs index 52203fc089..d0a4bf5c54 100644 --- a/gen/cloudprivatecatalogproducer1_beta1-cli/src/main.rs +++ b/gen/cloudprivatecatalogproducer1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudprivatecatalogproducer1_beta1::{api, Error, oauth2}; +use google_cloudprivatecatalogproducer1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -252,7 +251,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -398,7 +397,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -506,7 +505,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -568,7 +567,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -662,7 +661,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1084,7 +1083,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1180,7 +1179,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1431,7 +1430,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1523,7 +1522,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2026,7 +2025,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -3023,7 +3022,7 @@ async fn main() { let mut app = App::new("cloudprivatecatalogproducer1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20200405") + .version("5.0.2+20200405") .about("Enables cloud users to manage and share enterprise catalogs intheir organizations.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudprivatecatalogproducer1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudprivatecatalogproducer1_beta1/Cargo.toml b/gen/cloudprivatecatalogproducer1_beta1/Cargo.toml index 397ddfb20c..d353cc865b 100644 --- a/gen/cloudprivatecatalogproducer1_beta1/Cargo.toml +++ b/gen/cloudprivatecatalogproducer1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudprivatecatalogproducer1_beta1" -version = "5.0.2-beta-1+20200405" +version = "5.0.2+20200405" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud PrivateCatalog Producer (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprivatecatalogproducer1_beta1" homepage = "https://cloud.google.com/private-catalog/" -documentation = "https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405" +documentation = "https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405" license = "MIT" keywords = ["cloudprivatecatalogp", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudprivatecatalogproducer1_beta1/README.md b/gen/cloudprivatecatalogproducer1_beta1/README.md index d3b5667013..afc54bd9c0 100644 --- a/gen/cloudprivatecatalogproducer1_beta1/README.md +++ b/gen/cloudprivatecatalogproducer1_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-cloudprivatecatalogproducer1_beta1` library allows access to all features of the *Google Cloud PrivateCatalog Producer* service. -This documentation was generated from *Cloud PrivateCatalog Producer* crate version *5.0.2-beta-1+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalogproducer:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud PrivateCatalog Producer* crate version *5.0.2+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalogproducer:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud PrivateCatalog Producer* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/private-catalog/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/CloudPrivateCatalogProducer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/CloudPrivateCatalogProducer) ... * catalogs - * [*associations create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationCreateCall), [*associations delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationDeleteCall), [*associations get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationGetCall), [*associations list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationListCall), [*create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogCreateCall), [*delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogDeleteCall), [*get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogGetCall), [*get iam policy*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogGetIamPolicyCall), [*list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogListCall), [*patch*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogPatchCall), [*products copy*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductCopyCall), [*products create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductCreateCall), [*products delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductDeleteCall), [*products get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductGetCall), [*products icons upload*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductIconUploadCall), [*products list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductListCall), [*products patch*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductPatchCall), [*products versions create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionCreateCall), [*products versions delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionDeleteCall), [*products versions get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionGetCall), [*products versions list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionListCall), [*products versions patch*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionPatchCall), [*set iam policy*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogUndeleteCall) + * [*associations create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationCreateCall), [*associations delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationDeleteCall), [*associations get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationGetCall), [*associations list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogAssociationListCall), [*create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogCreateCall), [*delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogDeleteCall), [*get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogGetCall), [*get iam policy*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogGetIamPolicyCall), [*list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogListCall), [*patch*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogPatchCall), [*products copy*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductCopyCall), [*products create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductCreateCall), [*products delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductDeleteCall), [*products get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductGetCall), [*products icons upload*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductIconUploadCall), [*products list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductListCall), [*products patch*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductPatchCall), [*products versions create*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionCreateCall), [*products versions delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionDeleteCall), [*products versions get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionGetCall), [*products versions list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionListCall), [*products versions patch*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogProductVersionPatchCall), [*set iam policy*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::CatalogUndeleteCall) * operations - * [*cancel*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationCancelCall), [*delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationDeleteCall), [*get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationListCall) + * [*cancel*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationCancelCall), [*delete*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationDeleteCall), [*get*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/api::OperationListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/CloudPrivateCatalogProducer)** +* **[Hub](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/CloudPrivateCatalogProducer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2-beta-1+20200405/google_cloudprivatecatalogproducer1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudprivatecatalogproducer1_beta1/5.0.2+20200405/google_cloudprivatecatalogproducer1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudprivatecatalogproducer1_beta1/src/api.rs b/gen/cloudprivatecatalogproducer1_beta1/src/api.rs index 066d8e462c..3d671ebfd6 100644 --- a/gen/cloudprivatecatalogproducer1_beta1/src/api.rs +++ b/gen/cloudprivatecatalogproducer1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudPrivateCatalogProducer { CloudPrivateCatalogProducer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudprivatecatalogproducer.googleapis.com/".to_string(), _root_url: "https://cloudprivatecatalogproducer.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> CloudPrivateCatalogProducer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudprivatecatalogproducer1_beta1/src/client.rs b/gen/cloudprivatecatalogproducer1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudprivatecatalogproducer1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudprivatecatalogproducer1_beta1/src/lib.rs b/gen/cloudprivatecatalogproducer1_beta1/src/lib.rs index 4e49fa57d8..505547df56 100644 --- a/gen/cloudprivatecatalogproducer1_beta1/src/lib.rs +++ b/gen/cloudprivatecatalogproducer1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud PrivateCatalog Producer* crate version *5.0.2-beta-1+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalogproducer:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud PrivateCatalog Producer* crate version *5.0.2+20200405*, where *20200405* is the exact revision of the *cloudprivatecatalogproducer:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud PrivateCatalog Producer* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/private-catalog/). diff --git a/gen/cloudprofiler2-cli/Cargo.toml b/gen/cloudprofiler2-cli/Cargo.toml index 4060df6e95..55b1a7e710 100644 --- a/gen/cloudprofiler2-cli/Cargo.toml +++ b/gen/cloudprofiler2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudprofiler2-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Profiler (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprofiler2-cli" @@ -20,13 +20,13 @@ name = "cloudprofiler2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudprofiler2] path = "../cloudprofiler2" -version = "4.0.1+20220228" +version = "5.0.2+20230123" + diff --git a/gen/cloudprofiler2-cli/README.md b/gen/cloudprofiler2-cli/README.md index 82ef88304d..ca61be6ce5 100644 --- a/gen/cloudprofiler2-cli/README.md +++ b/gen/cloudprofiler2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Profiler* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Profiler* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash cloudprofiler2 [options] diff --git a/gen/cloudprofiler2-cli/mkdocs.yml b/gen/cloudprofiler2-cli/mkdocs.yml index bea8bd11f2..35a3874901 100644 --- a/gen/cloudprofiler2-cli/mkdocs.yml +++ b/gen/cloudprofiler2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Profiler v4.0.1+20220228 +site_name: Cloud Profiler v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-cloudprofiler2-cli site_description: A complete library to interact with Cloud Profiler (protocol v2) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprofiler2-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_profiles-create.md', 'Projects', 'Profiles Create'] -- ['projects_profiles-create-offline.md', 'Projects', 'Profiles Create Offline'] -- ['projects_profiles-patch.md', 'Projects', 'Profiles Patch'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Profiles Create': 'projects_profiles-create.md' + - 'Profiles Create Offline': 'projects_profiles-create-offline.md' + - 'Profiles Patch': 'projects_profiles-patch.md' theme: readthedocs diff --git a/gen/cloudprofiler2-cli/src/client.rs b/gen/cloudprofiler2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudprofiler2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudprofiler2-cli/src/main.rs b/gen/cloudprofiler2-cli/src/main.rs index e8fbd16e23..3875231ad7 100644 --- a/gen/cloudprofiler2-cli/src/main.rs +++ b/gen/cloudprofiler2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudprofiler2::{api, Error, oauth2}; +use google_cloudprofiler2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -278,7 +277,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -513,7 +512,7 @@ async fn main() { let mut app = App::new("cloudprofiler2") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230123") .about("Manages continuous profiling information.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudprofiler2_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudprofiler2/Cargo.toml b/gen/cloudprofiler2/Cargo.toml index a6f918666a..46df4b0a19 100644 --- a/gen/cloudprofiler2/Cargo.toml +++ b/gen/cloudprofiler2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudprofiler2" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Profiler (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudprofiler2" homepage = "https://cloud.google.com/profiler/" -documentation = "https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-cloudprofiler2/5.0.2+20230123" license = "MIT" keywords = ["cloudprofiler", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudprofiler2/README.md b/gen/cloudprofiler2/README.md index 240229388a..9b77b7d54f 100644 --- a/gen/cloudprofiler2/README.md +++ b/gen/cloudprofiler2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudprofiler2` library allows access to all features of the *Google Cloud Profiler* service. -This documentation was generated from *Cloud Profiler* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *cloudprofiler:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Profiler* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *cloudprofiler:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Profiler* *v2* API can be found at the [official documentation site](https://cloud.google.com/profiler/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/CloudProfiler) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/CloudProfiler) ... * projects - * [*profiles create*](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/api::ProjectProfileCreateCall), [*profiles create offline*](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/api::ProjectProfileCreateOfflineCall) and [*profiles patch*](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/api::ProjectProfilePatchCall) + * [*profiles create*](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/api::ProjectProfileCreateCall), [*profiles create offline*](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/api::ProjectProfileCreateOfflineCall) and [*profiles patch*](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/api::ProjectProfilePatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/CloudProfiler)** +* **[Hub](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/CloudProfiler)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Part)** + * **[Parts](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::RequestValue) and -[decodable](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::RequestValue) and +[decodable](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudprofiler2/5.0.2-beta-1+20230123/google_cloudprofiler2/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudprofiler2/5.0.2+20230123/google_cloudprofiler2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudprofiler2/src/api.rs b/gen/cloudprofiler2/src/api.rs index 91b4399a27..ad99d9b3c9 100644 --- a/gen/cloudprofiler2/src/api.rs +++ b/gen/cloudprofiler2/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> CloudProfiler { CloudProfiler { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudprofiler.googleapis.com/".to_string(), _root_url: "https://cloudprofiler.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> CloudProfiler { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudprofiler2/src/client.rs b/gen/cloudprofiler2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudprofiler2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudprofiler2/src/lib.rs b/gen/cloudprofiler2/src/lib.rs index 377fe281b8..945297da14 100644 --- a/gen/cloudprofiler2/src/lib.rs +++ b/gen/cloudprofiler2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Profiler* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *cloudprofiler:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Profiler* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *cloudprofiler:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Profiler* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/profiler/). diff --git a/gen/cloudresourcemanager1-cli/Cargo.toml b/gen/cloudresourcemanager1-cli/Cargo.toml index 21b9171227..4d69c899d0 100644 --- a/gen/cloudresourcemanager1-cli/Cargo.toml +++ b/gen/cloudresourcemanager1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudresourcemanager1-cli" -version = "4.0.1+20220306" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager1-cli" @@ -20,13 +20,13 @@ name = "cloudresourcemanager1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudresourcemanager1] path = "../cloudresourcemanager1" -version = "4.0.1+20220306" +version = "5.0.2+20230115" + diff --git a/gen/cloudresourcemanager1-cli/README.md b/gen/cloudresourcemanager1-cli/README.md index 79da8628e0..d8d8f6d347 100644 --- a/gen/cloudresourcemanager1-cli/README.md +++ b/gen/cloudresourcemanager1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Resource Manager* API at revision *20220306*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Resource Manager* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash cloudresourcemanager1 [options] diff --git a/gen/cloudresourcemanager1-cli/mkdocs.yml b/gen/cloudresourcemanager1-cli/mkdocs.yml index 8d30fb6006..b1916e308a 100644 --- a/gen/cloudresourcemanager1-cli/mkdocs.yml +++ b/gen/cloudresourcemanager1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Resource Manager v4.0.1+20220306 +site_name: Cloud Resource Manager v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-cloudresourcemanager1-cli site_description: A complete library to interact with Cloud Resource Manager (protocol v1) @@ -7,46 +7,51 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourceman docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_clear-org-policy.md', 'Folders', 'Clear Org Policy'] -- ['folders_get-effective-org-policy.md', 'Folders', 'Get Effective Org Policy'] -- ['folders_get-org-policy.md', 'Folders', 'Get Org Policy'] -- ['folders_list-available-org-policy-constraints.md', 'Folders', 'List Available Org Policy Constraints'] -- ['folders_list-org-policies.md', 'Folders', 'List Org Policies'] -- ['folders_set-org-policy.md', 'Folders', 'Set Org Policy'] -- ['liens_create.md', 'Liens', 'Create'] -- ['liens_delete.md', 'Liens', 'Delete'] -- ['liens_get.md', 'Liens', 'Get'] -- ['liens_list.md', 'Liens', 'List'] -- ['operations_get.md', 'Operations', 'Get'] -- ['organizations_clear-org-policy.md', 'Organizations', 'Clear Org Policy'] -- ['organizations_get.md', 'Organizations', 'Get'] -- ['organizations_get-effective-org-policy.md', 'Organizations', 'Get Effective Org Policy'] -- ['organizations_get-iam-policy.md', 'Organizations', 'Get Iam Policy'] -- ['organizations_get-org-policy.md', 'Organizations', 'Get Org Policy'] -- ['organizations_list-available-org-policy-constraints.md', 'Organizations', 'List Available Org Policy Constraints'] -- ['organizations_list-org-policies.md', 'Organizations', 'List Org Policies'] -- ['organizations_search.md', 'Organizations', 'Search'] -- ['organizations_set-iam-policy.md', 'Organizations', 'Set Iam Policy'] -- ['organizations_set-org-policy.md', 'Organizations', 'Set Org Policy'] -- ['organizations_test-iam-permissions.md', 'Organizations', 'Test Iam Permissions'] -- ['projects_clear-org-policy.md', 'Projects', 'Clear Org Policy'] -- ['projects_create.md', 'Projects', 'Create'] -- ['projects_delete.md', 'Projects', 'Delete'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_get-ancestry.md', 'Projects', 'Get Ancestry'] -- ['projects_get-effective-org-policy.md', 'Projects', 'Get Effective Org Policy'] -- ['projects_get-iam-policy.md', 'Projects', 'Get Iam Policy'] -- ['projects_get-org-policy.md', 'Projects', 'Get Org Policy'] -- ['projects_list.md', 'Projects', 'List'] -- ['projects_list-available-org-policy-constraints.md', 'Projects', 'List Available Org Policy Constraints'] -- ['projects_list-org-policies.md', 'Projects', 'List Org Policies'] -- ['projects_set-iam-policy.md', 'Projects', 'Set Iam Policy'] -- ['projects_set-org-policy.md', 'Projects', 'Set Org Policy'] -- ['projects_test-iam-permissions.md', 'Projects', 'Test Iam Permissions'] -- ['projects_undelete.md', 'Projects', 'Undelete'] -- ['projects_update.md', 'Projects', 'Update'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Clear Org Policy': 'folders_clear-org-policy.md' + - 'Get Effective Org Policy': 'folders_get-effective-org-policy.md' + - 'Get Org Policy': 'folders_get-org-policy.md' + - 'List Available Org Policy Constraints': 'folders_list-available-org-policy-constraints.md' + - 'List Org Policies': 'folders_list-org-policies.md' + - 'Set Org Policy': 'folders_set-org-policy.md' +- 'Liens': + - 'Create': 'liens_create.md' + - 'Delete': 'liens_delete.md' + - 'Get': 'liens_get.md' + - 'List': 'liens_list.md' +- 'Operations': + - 'Get': 'operations_get.md' +- 'Organizations': + - 'Clear Org Policy': 'organizations_clear-org-policy.md' + - 'Get': 'organizations_get.md' + - 'Get Effective Org Policy': 'organizations_get-effective-org-policy.md' + - 'Get Iam Policy': 'organizations_get-iam-policy.md' + - 'Get Org Policy': 'organizations_get-org-policy.md' + - 'List Available Org Policy Constraints': 'organizations_list-available-org-policy-constraints.md' + - 'List Org Policies': 'organizations_list-org-policies.md' + - 'Search': 'organizations_search.md' + - 'Set Iam Policy': 'organizations_set-iam-policy.md' + - 'Set Org Policy': 'organizations_set-org-policy.md' + - 'Test Iam Permissions': 'organizations_test-iam-permissions.md' +- 'Projects': + - 'Clear Org Policy': 'projects_clear-org-policy.md' + - 'Create': 'projects_create.md' + - 'Delete': 'projects_delete.md' + - 'Get': 'projects_get.md' + - 'Get Ancestry': 'projects_get-ancestry.md' + - 'Get Effective Org Policy': 'projects_get-effective-org-policy.md' + - 'Get Iam Policy': 'projects_get-iam-policy.md' + - 'Get Org Policy': 'projects_get-org-policy.md' + - 'List': 'projects_list.md' + - 'List Available Org Policy Constraints': 'projects_list-available-org-policy-constraints.md' + - 'List Org Policies': 'projects_list-org-policies.md' + - 'Set Iam Policy': 'projects_set-iam-policy.md' + - 'Set Org Policy': 'projects_set-org-policy.md' + - 'Test Iam Permissions': 'projects_test-iam-permissions.md' + - 'Undelete': 'projects_undelete.md' + - 'Update': 'projects_update.md' theme: readthedocs diff --git a/gen/cloudresourcemanager1-cli/src/client.rs b/gen/cloudresourcemanager1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudresourcemanager1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudresourcemanager1-cli/src/main.rs b/gen/cloudresourcemanager1-cli/src/main.rs index 38dcaa3c3d..f3f1506a9d 100644 --- a/gen/cloudresourcemanager1-cli/src/main.rs +++ b/gen/cloudresourcemanager1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudresourcemanager1::{api, Error, oauth2}; +use google_cloudresourcemanager1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -780,7 +779,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2430,7 +2429,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3694,7 +3693,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3828,7 +3827,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3884,7 +3883,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4065,7 +4064,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4193,7 +4192,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4249,7 +4248,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4333,7 +4332,7 @@ async fn main() { let mut app = App::new("cloudresourcemanager1") .author("Sebastian Thiel ") - .version("4.0.1+20220306") + .version("5.0.2+20230115") .about("Creates, reads, and updates metadata for Google Cloud Platform resource containers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudresourcemanager1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudresourcemanager1/Cargo.toml b/gen/cloudresourcemanager1/Cargo.toml index 60ae509333..8ff9e17956 100644 --- a/gen/cloudresourcemanager1/Cargo.toml +++ b/gen/cloudresourcemanager1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudresourcemanager1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager1" homepage = "https://cloud.google.com/resource-manager" -documentation = "https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115" license = "MIT" keywords = ["cloudresourcemanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudresourcemanager1/README.md b/gen/cloudresourcemanager1/README.md index 187117fe10..26ae003b53 100644 --- a/gen/cloudresourcemanager1/README.md +++ b/gen/cloudresourcemanager1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-cloudresourcemanager1` library allows access to all features of the *Google Cloud Resource Manager* service. -This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Resource Manager* *v1* API can be found at the [official documentation site](https://cloud.google.com/resource-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/CloudResourceManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/CloudResourceManager) ... * folders - * [*clear org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::FolderClearOrgPolicyCall), [*get effective org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::FolderGetEffectiveOrgPolicyCall), [*get org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::FolderGetOrgPolicyCall), [*list available org policy constraints*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::FolderListAvailableOrgPolicyConstraintCall), [*list org policies*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::FolderListOrgPolicyCall) and [*set org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::FolderSetOrgPolicyCall) -* [liens](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::Lien) - * [*create*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::LienCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::LienDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::LienGetCall) and [*list*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::LienListCall) -* [operations](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::Operation) - * [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OperationGetCall) -* [organizations](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::Organization) - * [*clear org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationClearOrgPolicyCall), [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationGetCall), [*get effective org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationGetEffectiveOrgPolicyCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationGetIamPolicyCall), [*get org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationGetOrgPolicyCall), [*list available org policy constraints*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationListAvailableOrgPolicyConstraintCall), [*list org policies*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationListOrgPolicyCall), [*search*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationSetIamPolicyCall), [*set org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationSetOrgPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::OrganizationTestIamPermissionCall) -* [projects](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::Project) - * [*clear org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectClearOrgPolicyCall), [*create*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectGetCall), [*get ancestry*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectGetAncestryCall), [*get effective org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectGetEffectiveOrgPolicyCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectGetIamPolicyCall), [*get org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectGetOrgPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectListCall), [*list available org policy constraints*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectListAvailableOrgPolicyConstraintCall), [*list org policies*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectListOrgPolicyCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectSetIamPolicyCall), [*set org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectSetOrgPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectTestIamPermissionCall), [*undelete*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectUndeleteCall) and [*update*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/api::ProjectUpdateCall) + * [*clear org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::FolderClearOrgPolicyCall), [*get effective org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::FolderGetEffectiveOrgPolicyCall), [*get org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::FolderGetOrgPolicyCall), [*list available org policy constraints*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::FolderListAvailableOrgPolicyConstraintCall), [*list org policies*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::FolderListOrgPolicyCall) and [*set org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::FolderSetOrgPolicyCall) +* [liens](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::Lien) + * [*create*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::LienCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::LienDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::LienGetCall) and [*list*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::LienListCall) +* [operations](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::Operation) + * [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OperationGetCall) +* [organizations](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::Organization) + * [*clear org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationClearOrgPolicyCall), [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationGetCall), [*get effective org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationGetEffectiveOrgPolicyCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationGetIamPolicyCall), [*get org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationGetOrgPolicyCall), [*list available org policy constraints*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationListAvailableOrgPolicyConstraintCall), [*list org policies*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationListOrgPolicyCall), [*search*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationSetIamPolicyCall), [*set org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationSetOrgPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::OrganizationTestIamPermissionCall) +* [projects](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::Project) + * [*clear org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectClearOrgPolicyCall), [*create*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectGetCall), [*get ancestry*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectGetAncestryCall), [*get effective org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectGetEffectiveOrgPolicyCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectGetIamPolicyCall), [*get org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectGetOrgPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectListCall), [*list available org policy constraints*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectListAvailableOrgPolicyConstraintCall), [*list org policies*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectListOrgPolicyCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectSetIamPolicyCall), [*set org policy*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectSetOrgPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectTestIamPermissionCall), [*undelete*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectUndeleteCall) and [*update*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/api::ProjectUpdateCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/CloudResourceManager)** +* **[Hub](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/CloudResourceManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Part)** + * **[Parts](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -144,17 +144,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -164,29 +164,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudresourcemanager1/5.0.2-beta-1+20230115/google_cloudresourcemanager1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudresourcemanager1/5.0.2+20230115/google_cloudresourcemanager1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudresourcemanager1/src/api.rs b/gen/cloudresourcemanager1/src/api.rs index b572e6fa82..76f497fb2d 100644 --- a/gen/cloudresourcemanager1/src/api.rs +++ b/gen/cloudresourcemanager1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudResourceManager { CloudResourceManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudresourcemanager.googleapis.com/".to_string(), _root_url: "https://cloudresourcemanager.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> CloudResourceManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudresourcemanager1/src/client.rs b/gen/cloudresourcemanager1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudresourcemanager1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudresourcemanager1/src/lib.rs b/gen/cloudresourcemanager1/src/lib.rs index f1ab42e864..0f894f1947 100644 --- a/gen/cloudresourcemanager1/src/lib.rs +++ b/gen/cloudresourcemanager1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Resource Manager* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/resource-manager). diff --git a/gen/cloudresourcemanager1_beta1-cli/Cargo.toml b/gen/cloudresourcemanager1_beta1-cli/Cargo.toml index 9ef4a2950e..7c391c7736 100644 --- a/gen/cloudresourcemanager1_beta1-cli/Cargo.toml +++ b/gen/cloudresourcemanager1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudresourcemanager1_beta1-cli" -version = "4.0.1+20220306" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager1_beta1-cli" @@ -20,13 +20,13 @@ name = "cloudresourcemanager1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudresourcemanager1_beta1] path = "../cloudresourcemanager1_beta1" -version = "4.0.1+20220306" +version = "5.0.2+20230115" + diff --git a/gen/cloudresourcemanager1_beta1-cli/README.md b/gen/cloudresourcemanager1_beta1-cli/README.md index e87905bda8..e8b03b4783 100644 --- a/gen/cloudresourcemanager1_beta1-cli/README.md +++ b/gen/cloudresourcemanager1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Resource Manager* API at revision *20220306*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Resource Manager* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash cloudresourcemanager1-beta1 [options] diff --git a/gen/cloudresourcemanager1_beta1-cli/mkdocs.yml b/gen/cloudresourcemanager1_beta1-cli/mkdocs.yml index a2cf0dd036..909918efe0 100644 --- a/gen/cloudresourcemanager1_beta1-cli/mkdocs.yml +++ b/gen/cloudresourcemanager1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Resource Manager v4.0.1+20220306 +site_name: Cloud Resource Manager v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-cloudresourcemanager1_beta1-cli site_description: A complete library to interact with Cloud Resource Manager (protocol v1beta1) @@ -7,24 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourceman docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['organizations_get.md', 'Organizations', 'Get'] -- ['organizations_get-iam-policy.md', 'Organizations', 'Get Iam Policy'] -- ['organizations_list.md', 'Organizations', 'List'] -- ['organizations_set-iam-policy.md', 'Organizations', 'Set Iam Policy'] -- ['organizations_test-iam-permissions.md', 'Organizations', 'Test Iam Permissions'] -- ['organizations_update.md', 'Organizations', 'Update'] -- ['projects_create.md', 'Projects', 'Create'] -- ['projects_delete.md', 'Projects', 'Delete'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_get-ancestry.md', 'Projects', 'Get Ancestry'] -- ['projects_get-iam-policy.md', 'Projects', 'Get Iam Policy'] -- ['projects_list.md', 'Projects', 'List'] -- ['projects_set-iam-policy.md', 'Projects', 'Set Iam Policy'] -- ['projects_test-iam-permissions.md', 'Projects', 'Test Iam Permissions'] -- ['projects_undelete.md', 'Projects', 'Undelete'] -- ['projects_update.md', 'Projects', 'Update'] +nav: +- Home: 'index.md' +- 'Organizations': + - 'Get': 'organizations_get.md' + - 'Get Iam Policy': 'organizations_get-iam-policy.md' + - 'List': 'organizations_list.md' + - 'Set Iam Policy': 'organizations_set-iam-policy.md' + - 'Test Iam Permissions': 'organizations_test-iam-permissions.md' + - 'Update': 'organizations_update.md' +- 'Projects': + - 'Create': 'projects_create.md' + - 'Delete': 'projects_delete.md' + - 'Get': 'projects_get.md' + - 'Get Ancestry': 'projects_get-ancestry.md' + - 'Get Iam Policy': 'projects_get-iam-policy.md' + - 'List': 'projects_list.md' + - 'Set Iam Policy': 'projects_set-iam-policy.md' + - 'Test Iam Permissions': 'projects_test-iam-permissions.md' + - 'Undelete': 'projects_undelete.md' + - 'Update': 'projects_update.md' theme: readthedocs diff --git a/gen/cloudresourcemanager1_beta1-cli/src/client.rs b/gen/cloudresourcemanager1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudresourcemanager1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudresourcemanager1_beta1-cli/src/main.rs b/gen/cloudresourcemanager1_beta1-cli/src/main.rs index 29cf36f1a9..a34f4c4314 100644 --- a/gen/cloudresourcemanager1_beta1-cli/src/main.rs +++ b/gen/cloudresourcemanager1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudresourcemanager1_beta1::{api, Error, oauth2}; +use google_cloudresourcemanager1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -202,7 +201,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -563,7 +562,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-legacy-stack" => { - call = call.use_legacy_stack(arg_from_str(value.unwrap_or("false"), err, "use-legacy-stack", "boolean")); + call = call.use_legacy_stack( value.map(|v| arg_from_str(v, err, "use-legacy-stack", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -895,7 +894,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1466,7 +1465,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1510,7 +1509,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1538,7 +1537,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1691,7 +1690,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1735,7 +1734,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1763,7 +1762,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1847,7 +1846,7 @@ async fn main() { let mut app = App::new("cloudresourcemanager1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220306") + .version("5.0.2+20230115") .about("Creates, reads, and updates metadata for Google Cloud Platform resource containers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudresourcemanager1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudresourcemanager1_beta1/Cargo.toml b/gen/cloudresourcemanager1_beta1/Cargo.toml index 31ac73d613..a2b32d2fa3 100644 --- a/gen/cloudresourcemanager1_beta1/Cargo.toml +++ b/gen/cloudresourcemanager1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudresourcemanager1_beta1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager1_beta1" homepage = "https://cloud.google.com/resource-manager" -documentation = "https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115" license = "MIT" keywords = ["cloudresourcemanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudresourcemanager1_beta1/README.md b/gen/cloudresourcemanager1_beta1/README.md index 02553d7a75..094a2a1947 100644 --- a/gen/cloudresourcemanager1_beta1/README.md +++ b/gen/cloudresourcemanager1_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-cloudresourcemanager1_beta1` library allows access to all features of the *Google Cloud Resource Manager* service. -This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Resource Manager* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/resource-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/CloudResourceManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/CloudResourceManager) ... -* [organizations](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::Organization) - * [*get*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::OrganizationGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::OrganizationGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::OrganizationListCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::OrganizationSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::OrganizationTestIamPermissionCall) and [*update*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::OrganizationUpdateCall) -* [projects](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::Project) - * [*create*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectGetCall), [*get ancestry*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectGetAncestryCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectListCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectTestIamPermissionCall), [*undelete*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectUndeleteCall) and [*update*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/api::ProjectUpdateCall) +* [organizations](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::Organization) + * [*get*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::OrganizationGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::OrganizationGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::OrganizationListCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::OrganizationSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::OrganizationTestIamPermissionCall) and [*update*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::OrganizationUpdateCall) +* [projects](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::Project) + * [*create*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectGetCall), [*get ancestry*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectGetAncestryCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectListCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectTestIamPermissionCall), [*undelete*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectUndeleteCall) and [*update*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/api::ProjectUpdateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/CloudResourceManager)** +* **[Hub](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/CloudResourceManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2-beta-1+20230115/google_cloudresourcemanager1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudresourcemanager1_beta1/5.0.2+20230115/google_cloudresourcemanager1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudresourcemanager1_beta1/src/api.rs b/gen/cloudresourcemanager1_beta1/src/api.rs index 57d2177a62..a6a00dd258 100644 --- a/gen/cloudresourcemanager1_beta1/src/api.rs +++ b/gen/cloudresourcemanager1_beta1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudResourceManager { CloudResourceManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudresourcemanager.googleapis.com/".to_string(), _root_url: "https://cloudresourcemanager.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudResourceManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudresourcemanager1_beta1/src/client.rs b/gen/cloudresourcemanager1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudresourcemanager1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudresourcemanager1_beta1/src/lib.rs b/gen/cloudresourcemanager1_beta1/src/lib.rs index 4c6fc72e0d..d71a15a4d7 100644 --- a/gen/cloudresourcemanager1_beta1/src/lib.rs +++ b/gen/cloudresourcemanager1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Resource Manager* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/resource-manager). diff --git a/gen/cloudresourcemanager2-cli/Cargo.toml b/gen/cloudresourcemanager2-cli/Cargo.toml index b50877605e..8728d0ff3b 100644 --- a/gen/cloudresourcemanager2-cli/Cargo.toml +++ b/gen/cloudresourcemanager2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudresourcemanager2-cli" -version = "4.0.1+20220306" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager2-cli" @@ -20,13 +20,13 @@ name = "cloudresourcemanager2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudresourcemanager2] path = "../cloudresourcemanager2" -version = "4.0.1+20220306" +version = "5.0.2+20230115" + diff --git a/gen/cloudresourcemanager2-cli/README.md b/gen/cloudresourcemanager2-cli/README.md index d5c6fbc612..3c0bc9b56c 100644 --- a/gen/cloudresourcemanager2-cli/README.md +++ b/gen/cloudresourcemanager2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Resource Manager* API at revision *20220306*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Resource Manager* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash cloudresourcemanager2 [options] diff --git a/gen/cloudresourcemanager2-cli/mkdocs.yml b/gen/cloudresourcemanager2-cli/mkdocs.yml index 3884008985..6910ce9b17 100644 --- a/gen/cloudresourcemanager2-cli/mkdocs.yml +++ b/gen/cloudresourcemanager2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Resource Manager v4.0.1+20220306 +site_name: Cloud Resource Manager v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-cloudresourcemanager2-cli site_description: A complete library to interact with Cloud Resource Manager (protocol v2) @@ -7,20 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourceman docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_create.md', 'Folders', 'Create'] -- ['folders_delete.md', 'Folders', 'Delete'] -- ['folders_get.md', 'Folders', 'Get'] -- ['folders_get-iam-policy.md', 'Folders', 'Get Iam Policy'] -- ['folders_list.md', 'Folders', 'List'] -- ['folders_move.md', 'Folders', 'Move'] -- ['folders_patch.md', 'Folders', 'Patch'] -- ['folders_search.md', 'Folders', 'Search'] -- ['folders_set-iam-policy.md', 'Folders', 'Set Iam Policy'] -- ['folders_test-iam-permissions.md', 'Folders', 'Test Iam Permissions'] -- ['folders_undelete.md', 'Folders', 'Undelete'] -- ['operations_get.md', 'Operations', 'Get'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Create': 'folders_create.md' + - 'Delete': 'folders_delete.md' + - 'Get': 'folders_get.md' + - 'Get Iam Policy': 'folders_get-iam-policy.md' + - 'List': 'folders_list.md' + - 'Move': 'folders_move.md' + - 'Patch': 'folders_patch.md' + - 'Search': 'folders_search.md' + - 'Set Iam Policy': 'folders_set-iam-policy.md' + - 'Test Iam Permissions': 'folders_test-iam-permissions.md' + - 'Undelete': 'folders_undelete.md' +- 'Operations': + - 'Get': 'operations_get.md' theme: readthedocs diff --git a/gen/cloudresourcemanager2-cli/src/client.rs b/gen/cloudresourcemanager2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudresourcemanager2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudresourcemanager2-cli/src/main.rs b/gen/cloudresourcemanager2-cli/src/main.rs index 63e3fc459f..3f10758183 100644 --- a/gen/cloudresourcemanager2-cli/src/main.rs +++ b/gen/cloudresourcemanager2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudresourcemanager2::{api, Error, oauth2}; +use google_cloudresourcemanager2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -340,7 +339,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "parent" => { call = call.parent(value.unwrap_or("")); @@ -349,7 +348,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -527,7 +526,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1174,7 +1173,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1296,7 +1295,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1324,7 +1323,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1405,7 +1404,7 @@ async fn main() { let mut app = App::new("cloudresourcemanager2") .author("Sebastian Thiel ") - .version("4.0.1+20220306") + .version("5.0.2+20230115") .about("Creates, reads, and updates metadata for Google Cloud Platform resource containers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudresourcemanager2/Cargo.toml b/gen/cloudresourcemanager2/Cargo.toml index e9ad427e00..72304cacf0 100644 --- a/gen/cloudresourcemanager2/Cargo.toml +++ b/gen/cloudresourcemanager2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudresourcemanager2" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager2" homepage = "https://cloud.google.com/resource-manager" -documentation = "https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115" license = "MIT" keywords = ["cloudresourcemanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudresourcemanager2/README.md b/gen/cloudresourcemanager2/README.md index 2370c7b8b4..cbd7fb0fb4 100644 --- a/gen/cloudresourcemanager2/README.md +++ b/gen/cloudresourcemanager2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-cloudresourcemanager2` library allows access to all features of the *Google Cloud Resource Manager* service. -This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Resource Manager* *v2* API can be found at the [official documentation site](https://cloud.google.com/resource-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/CloudResourceManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/CloudResourceManager) ... -* [folders](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::Folder) - * [*create*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderListCall), [*move*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderMoveCall), [*patch*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderPatchCall), [*search*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::FolderUndeleteCall) -* [operations](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::Operation) - * [*get*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/api::OperationGetCall) +* [folders](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::Folder) + * [*create*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderListCall), [*move*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderMoveCall), [*patch*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderPatchCall), [*search*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::FolderUndeleteCall) +* [operations](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::Operation) + * [*get*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/api::OperationGetCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/CloudResourceManager)** +* **[Hub](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/CloudResourceManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Part)** + * **[Parts](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::RequestValue) and -[decodable](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::RequestValue) and +[decodable](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudresourcemanager2/5.0.2-beta-1+20230115/google_cloudresourcemanager2/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudresourcemanager2/5.0.2+20230115/google_cloudresourcemanager2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudresourcemanager2/src/api.rs b/gen/cloudresourcemanager2/src/api.rs index 11866a4949..b9aaa3855b 100644 --- a/gen/cloudresourcemanager2/src/api.rs +++ b/gen/cloudresourcemanager2/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> CloudResourceManager { CloudResourceManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudresourcemanager.googleapis.com/".to_string(), _root_url: "https://cloudresourcemanager.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> CloudResourceManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudresourcemanager2/src/client.rs b/gen/cloudresourcemanager2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudresourcemanager2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudresourcemanager2/src/lib.rs b/gen/cloudresourcemanager2/src/lib.rs index 50b620cedb..0314034f64 100644 --- a/gen/cloudresourcemanager2/src/lib.rs +++ b/gen/cloudresourcemanager2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Resource Manager* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/resource-manager). diff --git a/gen/cloudresourcemanager3-cli/Cargo.toml b/gen/cloudresourcemanager3-cli/Cargo.toml index 1930451109..9fc7e9dfd7 100644 --- a/gen/cloudresourcemanager3-cli/Cargo.toml +++ b/gen/cloudresourcemanager3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudresourcemanager3-cli" -version = "4.0.1+20220306" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager3-cli" @@ -20,13 +20,13 @@ name = "cloudresourcemanager3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudresourcemanager3] path = "../cloudresourcemanager3" -version = "4.0.1+20220306" +version = "5.0.2+20230115" + diff --git a/gen/cloudresourcemanager3-cli/README.md b/gen/cloudresourcemanager3-cli/README.md index 6191123aed..997db52cfd 100644 --- a/gen/cloudresourcemanager3-cli/README.md +++ b/gen/cloudresourcemanager3-cli/README.md @@ -25,10 +25,12 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Resource Manager* API at revision *20220306*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Resource Manager* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash cloudresourcemanager3 [options] + effective-tags + list [-p ]... [-o ] folders create (-r )... [-p ]... [-o ] delete [-p ]... [-o ] @@ -87,6 +89,9 @@ cloudresourcemanager3 [options] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] set-iam-policy (-r )... [-p ]... [-o ] + tag-holds-create (-r )... [-p ]... [-o ] + tag-holds-delete [-p ]... [-o ] + tag-holds-list [-p ]... [-o ] test-iam-permissions (-r )... [-p ]... [-o ] cloudresourcemanager3 --help diff --git a/gen/cloudresourcemanager3-cli/mkdocs.yml b/gen/cloudresourcemanager3-cli/mkdocs.yml index b0f06a8122..aa06104fa6 100644 --- a/gen/cloudresourcemanager3-cli/mkdocs.yml +++ b/gen/cloudresourcemanager3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Resource Manager v4.0.1+20220306 +site_name: Cloud Resource Manager v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-cloudresourcemanager3-cli site_description: A complete library to interact with Cloud Resource Manager (protocol v3) @@ -7,59 +7,72 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourceman docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_create.md', 'Folders', 'Create'] -- ['folders_delete.md', 'Folders', 'Delete'] -- ['folders_get.md', 'Folders', 'Get'] -- ['folders_get-iam-policy.md', 'Folders', 'Get Iam Policy'] -- ['folders_list.md', 'Folders', 'List'] -- ['folders_move.md', 'Folders', 'Move'] -- ['folders_patch.md', 'Folders', 'Patch'] -- ['folders_search.md', 'Folders', 'Search'] -- ['folders_set-iam-policy.md', 'Folders', 'Set Iam Policy'] -- ['folders_test-iam-permissions.md', 'Folders', 'Test Iam Permissions'] -- ['folders_undelete.md', 'Folders', 'Undelete'] -- ['liens_create.md', 'Liens', 'Create'] -- ['liens_delete.md', 'Liens', 'Delete'] -- ['liens_get.md', 'Liens', 'Get'] -- ['liens_list.md', 'Liens', 'List'] -- ['operations_get.md', 'Operations', 'Get'] -- ['organizations_get.md', 'Organizations', 'Get'] -- ['organizations_get-iam-policy.md', 'Organizations', 'Get Iam Policy'] -- ['organizations_search.md', 'Organizations', 'Search'] -- ['organizations_set-iam-policy.md', 'Organizations', 'Set Iam Policy'] -- ['organizations_test-iam-permissions.md', 'Organizations', 'Test Iam Permissions'] -- ['projects_create.md', 'Projects', 'Create'] -- ['projects_delete.md', 'Projects', 'Delete'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_get-iam-policy.md', 'Projects', 'Get Iam Policy'] -- ['projects_list.md', 'Projects', 'List'] -- ['projects_move.md', 'Projects', 'Move'] -- ['projects_patch.md', 'Projects', 'Patch'] -- ['projects_search.md', 'Projects', 'Search'] -- ['projects_set-iam-policy.md', 'Projects', 'Set Iam Policy'] -- ['projects_test-iam-permissions.md', 'Projects', 'Test Iam Permissions'] -- ['projects_undelete.md', 'Projects', 'Undelete'] -- ['tag-bindings_create.md', 'Tag Bindings', 'Create'] -- ['tag-bindings_delete.md', 'Tag Bindings', 'Delete'] -- ['tag-bindings_list.md', 'Tag Bindings', 'List'] -- ['tag-keys_create.md', 'Tag Keys', 'Create'] -- ['tag-keys_delete.md', 'Tag Keys', 'Delete'] -- ['tag-keys_get.md', 'Tag Keys', 'Get'] -- ['tag-keys_get-iam-policy.md', 'Tag Keys', 'Get Iam Policy'] -- ['tag-keys_list.md', 'Tag Keys', 'List'] -- ['tag-keys_patch.md', 'Tag Keys', 'Patch'] -- ['tag-keys_set-iam-policy.md', 'Tag Keys', 'Set Iam Policy'] -- ['tag-keys_test-iam-permissions.md', 'Tag Keys', 'Test Iam Permissions'] -- ['tag-values_create.md', 'Tag Values', 'Create'] -- ['tag-values_delete.md', 'Tag Values', 'Delete'] -- ['tag-values_get.md', 'Tag Values', 'Get'] -- ['tag-values_get-iam-policy.md', 'Tag Values', 'Get Iam Policy'] -- ['tag-values_list.md', 'Tag Values', 'List'] -- ['tag-values_patch.md', 'Tag Values', 'Patch'] -- ['tag-values_set-iam-policy.md', 'Tag Values', 'Set Iam Policy'] -- ['tag-values_test-iam-permissions.md', 'Tag Values', 'Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Effective Tags': + - 'List': 'effective-tags_list.md' +- 'Folders': + - 'Create': 'folders_create.md' + - 'Delete': 'folders_delete.md' + - 'Get': 'folders_get.md' + - 'Get Iam Policy': 'folders_get-iam-policy.md' + - 'List': 'folders_list.md' + - 'Move': 'folders_move.md' + - 'Patch': 'folders_patch.md' + - 'Search': 'folders_search.md' + - 'Set Iam Policy': 'folders_set-iam-policy.md' + - 'Test Iam Permissions': 'folders_test-iam-permissions.md' + - 'Undelete': 'folders_undelete.md' +- 'Liens': + - 'Create': 'liens_create.md' + - 'Delete': 'liens_delete.md' + - 'Get': 'liens_get.md' + - 'List': 'liens_list.md' +- 'Operations': + - 'Get': 'operations_get.md' +- 'Organizations': + - 'Get': 'organizations_get.md' + - 'Get Iam Policy': 'organizations_get-iam-policy.md' + - 'Search': 'organizations_search.md' + - 'Set Iam Policy': 'organizations_set-iam-policy.md' + - 'Test Iam Permissions': 'organizations_test-iam-permissions.md' +- 'Projects': + - 'Create': 'projects_create.md' + - 'Delete': 'projects_delete.md' + - 'Get': 'projects_get.md' + - 'Get Iam Policy': 'projects_get-iam-policy.md' + - 'List': 'projects_list.md' + - 'Move': 'projects_move.md' + - 'Patch': 'projects_patch.md' + - 'Search': 'projects_search.md' + - 'Set Iam Policy': 'projects_set-iam-policy.md' + - 'Test Iam Permissions': 'projects_test-iam-permissions.md' + - 'Undelete': 'projects_undelete.md' +- 'Tag Bindings': + - 'Create': 'tag-bindings_create.md' + - 'Delete': 'tag-bindings_delete.md' + - 'List': 'tag-bindings_list.md' +- 'Tag Keys': + - 'Create': 'tag-keys_create.md' + - 'Delete': 'tag-keys_delete.md' + - 'Get': 'tag-keys_get.md' + - 'Get Iam Policy': 'tag-keys_get-iam-policy.md' + - 'List': 'tag-keys_list.md' + - 'Patch': 'tag-keys_patch.md' + - 'Set Iam Policy': 'tag-keys_set-iam-policy.md' + - 'Test Iam Permissions': 'tag-keys_test-iam-permissions.md' +- 'Tag Values': + - 'Create': 'tag-values_create.md' + - 'Delete': 'tag-values_delete.md' + - 'Get': 'tag-values_get.md' + - 'Get Iam Policy': 'tag-values_get-iam-policy.md' + - 'List': 'tag-values_list.md' + - 'Patch': 'tag-values_patch.md' + - 'Set Iam Policy': 'tag-values_set-iam-policy.md' + - 'Tag Holds Create': 'tag-values_tag-holds-create.md' + - 'Tag Holds Delete': 'tag-values_tag-holds-delete.md' + - 'Tag Holds List': 'tag-values_tag-holds-list.md' + - 'Test Iam Permissions': 'tag-values_test-iam-permissions.md' theme: readthedocs diff --git a/gen/cloudresourcemanager3-cli/src/client.rs b/gen/cloudresourcemanager3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudresourcemanager3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudresourcemanager3-cli/src/main.rs b/gen/cloudresourcemanager3-cli/src/main.rs index 4243069857..f2191090e4 100644 --- a/gen/cloudresourcemanager3-cli/src/main.rs +++ b/gen/cloudresourcemanager3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudresourcemanager3::{api, Error, oauth2}; +use google_cloudresourcemanager3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,68 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _effective_tags_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.effective_tags().list(); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "parent" => { + call = call.parent(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "parent"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -339,7 +400,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "parent" => { call = call.parent(value.unwrap_or("")); @@ -348,7 +409,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -529,7 +590,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -591,7 +652,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1103,7 +1164,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1354,7 +1415,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1865,7 +1926,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "parent" => { call = call.parent(value.unwrap_or("")); @@ -1874,7 +1935,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2057,7 +2118,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2119,7 +2180,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2466,7 +2527,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2580,7 +2641,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2658,10 +2719,12 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "namespaced-name" => Some(("namespacedName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "purpose" => Some(("purpose", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "purpose-data" => Some(("purposeData", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "short-name" => Some(("shortName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "name", "namespaced-name", "parent", "short-name", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "name", "namespaced-name", "parent", "purpose", "purpose-data", "short-name", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2676,7 +2739,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2732,7 +2795,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "etag" => { call = call.etag(value.unwrap_or("")); @@ -2934,7 +2997,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3012,10 +3075,12 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "namespaced-name" => Some(("namespacedName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "purpose" => Some(("purpose", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "purpose-data" => Some(("purposeData", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "short-name" => Some(("shortName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "name", "namespaced-name", "parent", "short-name", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "name", "namespaced-name", "parent", "purpose", "purpose-data", "short-name", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3030,10 +3095,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3301,7 +3366,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3357,7 +3422,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "etag" => { call = call.etag(value.unwrap_or("")); @@ -3559,7 +3624,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3655,10 +3720,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3794,6 +3859,217 @@ where } } + async fn _tag_values_tag_holds_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "help-link" => Some(("helpLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "holder" => Some(("holder", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "origin" => Some(("origin", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "help-link", "holder", "name", "origin"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TagHold = json::value::from_value(object).unwrap(); + let mut call = self.hub.tag_values().tag_holds_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _tag_values_tag_holds_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.tag_values().tag_holds_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _tag_values_tag_holds_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.tag_values().tag_holds_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _tag_values_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3884,6 +4160,17 @@ where let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option = None; match self.opt.subcommand() { + ("effective-tags", Some(opt)) => { + match opt.subcommand() { + ("list", Some(opt)) => { + call_result = self._effective_tags_list(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("effective-tags".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("folders", Some(opt)) => { match opt.subcommand() { ("create", Some(opt)) => { @@ -4092,6 +4379,15 @@ where ("set-iam-policy", Some(opt)) => { call_result = self._tag_values_set_iam_policy(opt, dry_run, &mut err).await; }, + ("tag-holds-create", Some(opt)) => { + call_result = self._tag_values_tag_holds_create(opt, dry_run, &mut err).await; + }, + ("tag-holds-delete", Some(opt)) => { + call_result = self._tag_values_tag_holds_delete(opt, dry_run, &mut err).await; + }, + ("tag-holds-list", Some(opt)) => { + call_result = self._tag_values_tag_holds_list(opt, dry_run, &mut err).await; + }, ("test-iam-permissions", Some(opt)) => { call_result = self._tag_values_test_iam_permissions(opt, dry_run, &mut err).await; }, @@ -4174,6 +4470,25 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ + ("effective-tags", "methods: 'list'", vec![ + ("list", + Some(r##"Return a list of effective tags for the given Google Cloud resource, as specified in `parent`."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/effective-tags_list", + vec![ + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("folders", "methods: 'create', 'delete', 'get', 'get-iam-policy', 'list', 'move', 'patch', 'search', 'set-iam-policy', 'test-iam-permissions' and 'undelete'", vec![ ("create", Some(r##"Creates a folder in the resource hierarchy. Returns an `Operation` which can be used to track the progress of the folder creation workflow. Upon success, the `Operation.response` field will be populated with the created Folder. In order to succeed, the addition of this new folder must not violate the folder naming, height, or fanout constraints. + The folder's `display_name` must be distinct from all other folders that share its parent. + The addition of the folder must not cause the active folder hierarchy to exceed a height of 10. Note, the full active + deleted folder hierarchy is allowed to reach a height of 20; this provides additional headroom when moving folders that contain deleted folders. + The addition of the folder must not cause the total number of folders under its parent to exceed 300. If the operation fails due to a folder constraint violation, some errors may be returned by the `CreateFolder` request, with status code `FAILED_PRECONDITION` and an error description. Other folder constraint violations will be communicated in the `Operation`, with the specific `PreconditionFailure` returned in the details list in the `Operation.error` field. The caller must have `resourcemanager.folders.create` permission on the identified parent."##), @@ -4247,7 +4562,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4363,7 +4678,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4391,7 +4706,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4582,7 +4897,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4626,7 +4941,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4654,7 +4969,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4751,7 +5066,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4862,12 +5177,12 @@ async fn main() { Some(false)), ]), ("set-iam-policy", - Some(r##"Sets the IAM access control policy for the specified project, in the format `projects/{ProjectIdOrNumber}` e.g. projects/123. CAUTION: This method will replace the existing policy, and cannot be used to append additional IAM settings. Note: Removing service accounts from policies or changing their roles can render services completely inoperable. It is important to understand how the service account is being used before removing or updating its roles. The following constraints apply when using `setIamPolicy()`: + Project does not support `allUsers` and `allAuthenticatedUsers` as `members` in a `Binding` of a `Policy`. + The owner role can be granted to a `user`, `serviceAccount`, or a group that is part of an organization. For example, group@myownpersonaldomain.com could be added as an owner to a project in the myownpersonaldomain.com organization, but not the examplepetstore.com organization. + Service accounts can be made owners of a project directly without any restrictions. However, to be added as an owner, a user must be invited using the Cloud Platform console and must accept the invitation. + A user cannot be granted the owner role using `setIamPolicy()`. The user must be granted the owner role using the Cloud Platform Console and must explicitly accept the invitation. + Invitations to grant the owner role cannot be sent using `setIamPolicy()`; they must be sent only using the Cloud Platform Console. + If the project is not part of an organization, there must be at least one owner who has accepted the Terms of Service (ToS) agreement in the policy. Calling `setIamPolicy()` to remove the last ToS-accepted owner from the policy will fail. This restriction also applies to legacy projects that no longer have owners who have accepted the ToS. Edits to IAM policies will be rejected until the lack of a ToS-accepting owner is rectified. If the project is part of an organization, you can remove all owners, potentially making the organization inaccessible. + Calling this method requires enabling the App Engine Admin API."##), + Some(r##"Sets the IAM access control policy for the specified project, in the format `projects/{ProjectIdOrNumber}` e.g. projects/123. CAUTION: This method will replace the existing policy, and cannot be used to append additional IAM settings. Note: Removing service accounts from policies or changing their roles can render services completely inoperable. It is important to understand how the service account is being used before removing or updating its roles. The following constraints apply when using `setIamPolicy()`: + Project does not support `allUsers` and `allAuthenticatedUsers` as `members` in a `Binding` of a `Policy`. + The owner role can be granted to a `user`, `serviceAccount`, or a group that is part of an organization. For example, group@myownpersonaldomain.com could be added as an owner to a project in the myownpersonaldomain.com organization, but not the examplepetstore.com organization. + Service accounts can be made owners of a project directly without any restrictions. However, to be added as an owner, a user must be invited using the Cloud Platform console and must accept the invitation. + A user cannot be granted the owner role using `setIamPolicy()`. The user must be granted the owner role using the Cloud Platform Console and must explicitly accept the invitation. + Invitations to grant the owner role cannot be sent using `setIamPolicy()`; they must be sent only using the Cloud Platform Console. + If the project is not part of an organization, there must be at least one owner who has accepted the Terms of Service (ToS) agreement in the policy. Calling `setIamPolicy()` to remove the last ToS-accepted owner from the policy will fail. This restriction also applies to legacy projects that no longer have owners who have accepted the ToS. Edits to IAM policies will be rejected until the lack of a ToS-accepting owner is rectified. If the project is part of an organization, you can remove all owners, potentially making the organization inaccessible."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/projects_set-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4895,7 +5210,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4949,7 +5264,7 @@ async fn main() { ("tag-bindings", "methods: 'create', 'delete' and 'list'", vec![ ("create", - Some(r##"Creates a TagBinding between a TagValue and a cloud resource (currently project, folder, or organization)."##), + Some(r##"Creates a TagBinding between a TagValue and a Google Cloud resource."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-bindings_create", vec![ (Some(r##"kv"##), @@ -4993,7 +5308,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"Lists the TagBindings for the given cloud resource, as specified with `parent`. NOTE: The `parent` field is expected to be a full resource name: https://cloud.google.com/apis/design/resource_names#full_resource_name"##), + Some(r##"Lists the TagBindings for the given Google Cloud resource, as specified with `parent`. NOTE: The `parent` field is expected to be a full resource name: https://cloud.google.com/apis/design/resource_names#full_resource_name"##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-bindings_list", vec![ (Some(r##"v"##), @@ -5012,7 +5327,7 @@ async fn main() { ("tag-keys", "methods: 'create', 'delete', 'get', 'get-iam-policy', 'list', 'patch', 'set-iam-policy' and 'test-iam-permissions'", vec![ ("create", - Some(r##"Creates a new TagKey. If another request with the same parameters is sent while the original request is in process, the second request will receive an error. A maximum of 300 TagKeys can exist under a parent at any given time."##), + Some(r##"Creates a new TagKey. If another request with the same parameters is sent while the original request is in process, the second request will receive an error. A maximum of 1000 TagKeys can exist under a parent at any given time."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-keys_create", vec![ (Some(r##"kv"##), @@ -5083,7 +5398,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5155,7 +5470,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5183,7 +5498,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5207,9 +5522,9 @@ async fn main() { ]), ]), - ("tag-values", "methods: 'create', 'delete', 'get', 'get-iam-policy', 'list', 'patch', 'set-iam-policy' and 'test-iam-permissions'", vec![ + ("tag-values", "methods: 'create', 'delete', 'get', 'get-iam-policy', 'list', 'patch', 'set-iam-policy', 'tag-holds-create', 'tag-holds-delete', 'tag-holds-list' and 'test-iam-permissions'", vec![ ("create", - Some(r##"Creates a TagValue as a child of the specified TagKey. If a another request with the same parameters is sent while the original request is in process the second request will receive an error. A maximum of 300 TagValues can exist under a TagKey at any given time."##), + Some(r##"Creates a TagValue as a child of the specified TagKey. If a another request with the same parameters is sent while the original request is in process the second request will receive an error. A maximum of 1000 TagValues can exist under a TagKey at any given time."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-values_create", vec![ (Some(r##"kv"##), @@ -5253,7 +5568,7 @@ async fn main() { Some(false)), ]), ("get", - Some(r##"Retrieves TagValue. If the TagValue or namespaced name does not exist, or if the user does not have permission to view it, this method will return `PERMISSION_DENIED`."##), + Some(r##"Retrieves a TagValue. This method will return `PERMISSION_DENIED` if the value does not exist or the user does not have permission to view it."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-values_get", vec![ (Some(r##"name"##), @@ -5280,7 +5595,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5352,7 +5667,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5368,6 +5683,78 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("tag-holds-create", + Some(r##"Creates a TagHold. Returns ALREADY_EXISTS if a TagHold with the same resource and origin exists under the same TagValue."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-values_tag-holds-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the TagHold's parent TagValue. Must be of the form: `tagValues/{tag-value-id}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("tag-holds-delete", + Some(r##"Deletes a TagHold."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-values_tag-holds-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the TagHold to delete. Must be of the form: `tagValues/{tag-value-id}/tagHolds/{tag-hold-id}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("tag-holds-list", + Some(r##"Lists TagHolds under a TagValue."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli/tag-values_tag-holds-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent TagValue. Must be of the form: `tagValues/{tag-value-id}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5380,7 +5767,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5408,7 +5795,7 @@ async fn main() { let mut app = App::new("cloudresourcemanager3") .author("Sebastian Thiel ") - .version("4.0.1+20220306") + .version("5.0.2+20230115") .about("Creates, reads, and updates metadata for Google Cloud Platform resource containers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudresourcemanager3_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudresourcemanager3/Cargo.toml b/gen/cloudresourcemanager3/Cargo.toml index f6d2903013..5b2e8ecced 100644 --- a/gen/cloudresourcemanager3/Cargo.toml +++ b/gen/cloudresourcemanager3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudresourcemanager3" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Resource Manager (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudresourcemanager3" homepage = "https://cloud.google.com/resource-manager" -documentation = "https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115" license = "MIT" keywords = ["cloudresourcemanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudresourcemanager3/README.md b/gen/cloudresourcemanager3/README.md index 87191089dc..336565579f 100644 --- a/gen/cloudresourcemanager3/README.md +++ b/gen/cloudresourcemanager3/README.md @@ -5,32 +5,32 @@ DO NOT EDIT ! --> The `google-cloudresourcemanager3` library allows access to all features of the *Google Cloud Resource Manager* service. -This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Resource Manager* *v3* API can be found at the [official documentation site](https://cloud.google.com/resource-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/CloudResourceManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/CloudResourceManager) ... -* [effective tags](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::EffectiveTag) - * [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::EffectiveTagListCall) -* [folders](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::Folder) - * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderListCall), [*move*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderMoveCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderPatchCall), [*search*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::FolderUndeleteCall) -* [liens](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::Lien) - * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::LienCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::LienDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::LienGetCall) and [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::LienListCall) -* [operations](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::Operation) - * [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::OperationGetCall) -* [organizations](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::Organization) - * [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::OrganizationGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::OrganizationGetIamPolicyCall), [*search*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::OrganizationSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::OrganizationSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::OrganizationTestIamPermissionCall) -* [projects](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::Project) - * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectListCall), [*move*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectMoveCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectPatchCall), [*search*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::ProjectUndeleteCall) -* [tag bindings](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagBinding) - * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagBindingCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagBindingDeleteCall) and [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagBindingListCall) -* [tag keys](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKey) - * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeyCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeyDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeyGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeyGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeyListCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeyPatchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagKeyTestIamPermissionCall) -* [tag values](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValue) - * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueListCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValuePatchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueSetIamPolicyCall), [*tag holds create*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueTagHoldCreateCall), [*tag holds delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueTagHoldDeleteCall), [*tag holds list*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueTagHoldListCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/api::TagValueTestIamPermissionCall) +* [effective tags](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::EffectiveTag) + * [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::EffectiveTagListCall) +* [folders](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::Folder) + * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderListCall), [*move*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderMoveCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderPatchCall), [*search*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::FolderUndeleteCall) +* [liens](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::Lien) + * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::LienCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::LienDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::LienGetCall) and [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::LienListCall) +* [operations](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::Operation) + * [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::OperationGetCall) +* [organizations](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::Organization) + * [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::OrganizationGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::OrganizationGetIamPolicyCall), [*search*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::OrganizationSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::OrganizationSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::OrganizationTestIamPermissionCall) +* [projects](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::Project) + * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectListCall), [*move*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectMoveCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectPatchCall), [*search*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectSearchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectTestIamPermissionCall) and [*undelete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::ProjectUndeleteCall) +* [tag bindings](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagBinding) + * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagBindingCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagBindingDeleteCall) and [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagBindingListCall) +* [tag keys](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKey) + * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeyCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeyDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeyGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeyGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeyListCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeyPatchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagKeyTestIamPermissionCall) +* [tag values](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValue) + * [*create*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueCreateCall), [*delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueDeleteCall), [*get*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueGetCall), [*get iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueGetIamPolicyCall), [*list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueListCall), [*patch*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValuePatchCall), [*set iam policy*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueSetIamPolicyCall), [*tag holds create*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueTagHoldCreateCall), [*tag holds delete*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueTagHoldDeleteCall), [*tag holds list*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueTagHoldListCall) and [*test iam permissions*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/api::TagValueTestIamPermissionCall) @@ -39,17 +39,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/CloudResourceManager)** +* **[Hub](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/CloudResourceManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Part)** + * **[Parts](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -162,17 +162,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -182,29 +182,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::RequestValue) and -[decodable](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::RequestValue) and +[decodable](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudresourcemanager3/5.0.2-beta-1+20230115/google_cloudresourcemanager3/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudresourcemanager3/5.0.2+20230115/google_cloudresourcemanager3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudresourcemanager3/src/api.rs b/gen/cloudresourcemanager3/src/api.rs index e130c9150b..6db6e16bae 100644 --- a/gen/cloudresourcemanager3/src/api.rs +++ b/gen/cloudresourcemanager3/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> CloudResourceManager { CloudResourceManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudresourcemanager.googleapis.com/".to_string(), _root_url: "https://cloudresourcemanager.googleapis.com/".to_string(), } @@ -166,7 +166,7 @@ impl<'a, S> CloudResourceManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudresourcemanager3/src/client.rs b/gen/cloudresourcemanager3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudresourcemanager3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudresourcemanager3/src/lib.rs b/gen/cloudresourcemanager3/src/lib.rs index 1e882154e9..bcd6d6fae9 100644 --- a/gen/cloudresourcemanager3/src/lib.rs +++ b/gen/cloudresourcemanager3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Resource Manager* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *cloudresourcemanager:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Resource Manager* *v3* API can be found at the //! [official documentation site](https://cloud.google.com/resource-manager). diff --git a/gen/cloudscheduler1-cli/Cargo.toml b/gen/cloudscheduler1-cli/Cargo.toml index 00f6938181..1feafe36a3 100644 --- a/gen/cloudscheduler1-cli/Cargo.toml +++ b/gen/cloudscheduler1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudscheduler1-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Scheduler (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudscheduler1-cli" @@ -20,13 +20,13 @@ name = "cloudscheduler1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudscheduler1] path = "../cloudscheduler1" -version = "4.0.1+20220212" +version = "5.0.2+20230106" + diff --git a/gen/cloudscheduler1-cli/README.md b/gen/cloudscheduler1-cli/README.md index a603a6cb17..4f99102b01 100644 --- a/gen/cloudscheduler1-cli/README.md +++ b/gen/cloudscheduler1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Scheduler* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Scheduler* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash cloudscheduler1 [options] diff --git a/gen/cloudscheduler1-cli/mkdocs.yml b/gen/cloudscheduler1-cli/mkdocs.yml index f3cc3fa846..7108fbae55 100644 --- a/gen/cloudscheduler1-cli/mkdocs.yml +++ b/gen/cloudscheduler1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Scheduler v4.0.1+20220212 +site_name: Cloud Scheduler v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-cloudscheduler1-cli site_description: A complete library to interact with Cloud Scheduler (protocol v1) @@ -7,18 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudscheduler1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-jobs-create.md', 'Projects', 'Locations Jobs Create'] -- ['projects_locations-jobs-delete.md', 'Projects', 'Locations Jobs Delete'] -- ['projects_locations-jobs-get.md', 'Projects', 'Locations Jobs Get'] -- ['projects_locations-jobs-list.md', 'Projects', 'Locations Jobs List'] -- ['projects_locations-jobs-patch.md', 'Projects', 'Locations Jobs Patch'] -- ['projects_locations-jobs-pause.md', 'Projects', 'Locations Jobs Pause'] -- ['projects_locations-jobs-resume.md', 'Projects', 'Locations Jobs Resume'] -- ['projects_locations-jobs-run.md', 'Projects', 'Locations Jobs Run'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Jobs Create': 'projects_locations-jobs-create.md' + - 'Locations Jobs Delete': 'projects_locations-jobs-delete.md' + - 'Locations Jobs Get': 'projects_locations-jobs-get.md' + - 'Locations Jobs List': 'projects_locations-jobs-list.md' + - 'Locations Jobs Patch': 'projects_locations-jobs-patch.md' + - 'Locations Jobs Pause': 'projects_locations-jobs-pause.md' + - 'Locations Jobs Resume': 'projects_locations-jobs-resume.md' + - 'Locations Jobs Run': 'projects_locations-jobs-run.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/cloudscheduler1-cli/src/client.rs b/gen/cloudscheduler1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudscheduler1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudscheduler1-cli/src/main.rs b/gen/cloudscheduler1-cli/src/main.rs index 41984e433f..d46b6bff34 100644 --- a/gen/cloudscheduler1-cli/src/main.rs +++ b/gen/cloudscheduler1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudscheduler1::{api, Error, oauth2}; +use google_cloudscheduler1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -336,7 +335,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -459,7 +458,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -770,7 +769,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1195,7 +1194,7 @@ async fn main() { let mut app = App::new("cloudscheduler1") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230106") .about("Creates and manages jobs run on a regular recurring schedule.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudscheduler1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudscheduler1/Cargo.toml b/gen/cloudscheduler1/Cargo.toml index a34e725b9a..16a5b48cb1 100644 --- a/gen/cloudscheduler1/Cargo.toml +++ b/gen/cloudscheduler1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudscheduler1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Scheduler (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudscheduler1" homepage = "https://cloud.google.com/scheduler/" -documentation = "https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-cloudscheduler1/5.0.2+20230106" license = "MIT" keywords = ["cloudscheduler", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudscheduler1/README.md b/gen/cloudscheduler1/README.md index 94c8ab525c..5e6cae4e02 100644 --- a/gen/cloudscheduler1/README.md +++ b/gen/cloudscheduler1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudscheduler1` library allows access to all features of the *Google Cloud Scheduler* service. -This documentation was generated from *Cloud Scheduler* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Scheduler* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Scheduler* *v1* API can be found at the [official documentation site](https://cloud.google.com/scheduler/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/CloudScheduler) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/CloudScheduler) ... * projects - * [*locations get*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationGetCall), [*locations jobs create*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobGetCall), [*locations jobs list*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobListCall), [*locations jobs patch*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobPatchCall), [*locations jobs pause*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobPauseCall), [*locations jobs resume*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobResumeCall), [*locations jobs run*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationJobRunCall) and [*locations list*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/api::ProjectLocationListCall) + * [*locations get*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationGetCall), [*locations jobs create*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobGetCall), [*locations jobs list*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobListCall), [*locations jobs patch*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobPatchCall), [*locations jobs pause*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobPauseCall), [*locations jobs resume*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobResumeCall), [*locations jobs run*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationJobRunCall) and [*locations list*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/api::ProjectLocationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/CloudScheduler)** +* **[Hub](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/CloudScheduler)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Part)** + * **[Parts](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudscheduler1/5.0.2-beta-1+20230106/google_cloudscheduler1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudscheduler1/5.0.2+20230106/google_cloudscheduler1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudscheduler1/src/api.rs b/gen/cloudscheduler1/src/api.rs index 3dc4c79bfa..39969379b7 100644 --- a/gen/cloudscheduler1/src/api.rs +++ b/gen/cloudscheduler1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudScheduler { CloudScheduler { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudscheduler.googleapis.com/".to_string(), _root_url: "https://cloudscheduler.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudScheduler { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudscheduler1/src/client.rs b/gen/cloudscheduler1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudscheduler1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudscheduler1/src/lib.rs b/gen/cloudscheduler1/src/lib.rs index 96ab852e4a..d6bb362ea2 100644 --- a/gen/cloudscheduler1/src/lib.rs +++ b/gen/cloudscheduler1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Scheduler* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Scheduler* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Scheduler* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/scheduler/). diff --git a/gen/cloudscheduler1_beta1-cli/Cargo.toml b/gen/cloudscheduler1_beta1-cli/Cargo.toml index f8f480a34a..f9f2a5bc36 100644 --- a/gen/cloudscheduler1_beta1-cli/Cargo.toml +++ b/gen/cloudscheduler1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudscheduler1_beta1-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Scheduler (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudscheduler1_beta1-cli" @@ -20,13 +20,13 @@ name = "cloudscheduler1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudscheduler1_beta1] path = "../cloudscheduler1_beta1" -version = "4.0.1+20220212" +version = "5.0.2+20230106" + diff --git a/gen/cloudscheduler1_beta1-cli/README.md b/gen/cloudscheduler1_beta1-cli/README.md index 7de63f91ed..208a79ebec 100644 --- a/gen/cloudscheduler1_beta1-cli/README.md +++ b/gen/cloudscheduler1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Scheduler* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Scheduler* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash cloudscheduler1-beta1 [options] diff --git a/gen/cloudscheduler1_beta1-cli/mkdocs.yml b/gen/cloudscheduler1_beta1-cli/mkdocs.yml index f785748e71..29b599c470 100644 --- a/gen/cloudscheduler1_beta1-cli/mkdocs.yml +++ b/gen/cloudscheduler1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Scheduler v4.0.1+20220212 +site_name: Cloud Scheduler v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-cloudscheduler1_beta1-cli site_description: A complete library to interact with Cloud Scheduler (protocol v1beta1) @@ -7,18 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudscheduler1_ docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-jobs-create.md', 'Projects', 'Locations Jobs Create'] -- ['projects_locations-jobs-delete.md', 'Projects', 'Locations Jobs Delete'] -- ['projects_locations-jobs-get.md', 'Projects', 'Locations Jobs Get'] -- ['projects_locations-jobs-list.md', 'Projects', 'Locations Jobs List'] -- ['projects_locations-jobs-patch.md', 'Projects', 'Locations Jobs Patch'] -- ['projects_locations-jobs-pause.md', 'Projects', 'Locations Jobs Pause'] -- ['projects_locations-jobs-resume.md', 'Projects', 'Locations Jobs Resume'] -- ['projects_locations-jobs-run.md', 'Projects', 'Locations Jobs Run'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Jobs Create': 'projects_locations-jobs-create.md' + - 'Locations Jobs Delete': 'projects_locations-jobs-delete.md' + - 'Locations Jobs Get': 'projects_locations-jobs-get.md' + - 'Locations Jobs List': 'projects_locations-jobs-list.md' + - 'Locations Jobs Patch': 'projects_locations-jobs-patch.md' + - 'Locations Jobs Pause': 'projects_locations-jobs-pause.md' + - 'Locations Jobs Resume': 'projects_locations-jobs-resume.md' + - 'Locations Jobs Run': 'projects_locations-jobs-run.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/cloudscheduler1_beta1-cli/src/client.rs b/gen/cloudscheduler1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudscheduler1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudscheduler1_beta1-cli/src/main.rs b/gen/cloudscheduler1_beta1-cli/src/main.rs index 68d8928b8c..1730b908f2 100644 --- a/gen/cloudscheduler1_beta1-cli/src/main.rs +++ b/gen/cloudscheduler1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudscheduler1_beta1::{api, Error, oauth2}; +use google_cloudscheduler1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -230,7 +229,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "legacy-app-engine-cron" => { - call = call.legacy_app_engine_cron(arg_from_str(value.unwrap_or("false"), err, "legacy-app-engine-cron", "boolean")); + call = call.legacy_app_engine_cron( value.map(|v| arg_from_str(v, err, "legacy-app-engine-cron", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -341,10 +340,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "legacy-app-engine-cron" => { - call = call.legacy_app_engine_cron(arg_from_str(value.unwrap_or("false"), err, "legacy-app-engine-cron", "boolean")); + call = call.legacy_app_engine_cron( value.map(|v| arg_from_str(v, err, "legacy-app-engine-cron", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -471,7 +470,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -783,7 +782,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1208,7 +1207,7 @@ async fn main() { let mut app = App::new("cloudscheduler1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230106") .about("Creates and manages jobs run on a regular recurring schedule.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudscheduler1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudscheduler1_beta1/Cargo.toml b/gen/cloudscheduler1_beta1/Cargo.toml index 9ea4c4aecf..30cf0bcf63 100644 --- a/gen/cloudscheduler1_beta1/Cargo.toml +++ b/gen/cloudscheduler1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudscheduler1_beta1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Scheduler (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudscheduler1_beta1" homepage = "https://cloud.google.com/scheduler/" -documentation = "https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106" license = "MIT" keywords = ["cloudscheduler", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudscheduler1_beta1/README.md b/gen/cloudscheduler1_beta1/README.md index 5564494ee8..5d71484daa 100644 --- a/gen/cloudscheduler1_beta1/README.md +++ b/gen/cloudscheduler1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudscheduler1_beta1` library allows access to all features of the *Google Cloud Scheduler* service. -This documentation was generated from *Cloud Scheduler* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Scheduler* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Scheduler* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/scheduler/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/CloudScheduler) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/CloudScheduler) ... * projects - * [*locations get*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationGetCall), [*locations jobs create*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobGetCall), [*locations jobs list*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobListCall), [*locations jobs patch*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobPatchCall), [*locations jobs pause*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobPauseCall), [*locations jobs resume*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobResumeCall), [*locations jobs run*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobRunCall) and [*locations list*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/api::ProjectLocationListCall) + * [*locations get*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationGetCall), [*locations jobs create*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobGetCall), [*locations jobs list*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobListCall), [*locations jobs patch*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobPatchCall), [*locations jobs pause*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobPauseCall), [*locations jobs resume*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobResumeCall), [*locations jobs run*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationJobRunCall) and [*locations list*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/api::ProjectLocationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/CloudScheduler)** +* **[Hub](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/CloudScheduler)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudscheduler1_beta1/5.0.2-beta-1+20230106/google_cloudscheduler1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudscheduler1_beta1/5.0.2+20230106/google_cloudscheduler1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudscheduler1_beta1/src/api.rs b/gen/cloudscheduler1_beta1/src/api.rs index f4a6205875..ce959c56aa 100644 --- a/gen/cloudscheduler1_beta1/src/api.rs +++ b/gen/cloudscheduler1_beta1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudScheduler { CloudScheduler { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudscheduler.googleapis.com/".to_string(), _root_url: "https://cloudscheduler.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudScheduler { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudscheduler1_beta1/src/client.rs b/gen/cloudscheduler1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudscheduler1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudscheduler1_beta1/src/lib.rs b/gen/cloudscheduler1_beta1/src/lib.rs index ec3d53fd2b..269cab1d97 100644 --- a/gen/cloudscheduler1_beta1/src/lib.rs +++ b/gen/cloudscheduler1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Scheduler* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Scheduler* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *cloudscheduler:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Scheduler* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/scheduler/). diff --git a/gen/cloudshell1-cli/Cargo.toml b/gen/cloudshell1-cli/Cargo.toml index d66d5b7e1b..f11516ac2b 100644 --- a/gen/cloudshell1-cli/Cargo.toml +++ b/gen/cloudshell1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudshell1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230109" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Shell (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudshell1-cli" @@ -20,13 +20,13 @@ name = "cloudshell1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudshell1] path = "../cloudshell1" -version = "4.0.1+20220301" +version = "5.0.2+20230109" + diff --git a/gen/cloudshell1-cli/README.md b/gen/cloudshell1-cli/README.md index 865cc32fe0..e1a703ca50 100644 --- a/gen/cloudshell1-cli/README.md +++ b/gen/cloudshell1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Shell* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Shell* API at revision *20230109*. The CLI is at version *5.0.2*. ```bash cloudshell1 [options] diff --git a/gen/cloudshell1-cli/mkdocs.yml b/gen/cloudshell1-cli/mkdocs.yml index fb6562410b..27e2c5b91b 100644 --- a/gen/cloudshell1-cli/mkdocs.yml +++ b/gen/cloudshell1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Shell v4.0.1+20220301 +site_name: Cloud Shell v5.0.2+20230109 site_url: http://byron.github.io/google-apis-rs/google-cloudshell1-cli site_description: A complete library to interact with Cloud Shell (protocol v1) @@ -7,17 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudshell1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['users_environments-add-public-key.md', 'Users', 'Environments Add Public Key'] -- ['users_environments-authorize.md', 'Users', 'Environments Authorize'] -- ['users_environments-get.md', 'Users', 'Environments Get'] -- ['users_environments-remove-public-key.md', 'Users', 'Environments Remove Public Key'] -- ['users_environments-start.md', 'Users', 'Environments Start'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Users': + - 'Environments Add Public Key': 'users_environments-add-public-key.md' + - 'Environments Authorize': 'users_environments-authorize.md' + - 'Environments Get': 'users_environments-get.md' + - 'Environments Remove Public Key': 'users_environments-remove-public-key.md' + - 'Environments Start': 'users_environments-start.md' theme: readthedocs diff --git a/gen/cloudshell1-cli/src/client.rs b/gen/cloudshell1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudshell1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudshell1-cli/src/main.rs b/gen/cloudshell1-cli/src/main.rs index 016c9303ef..c375f581f6 100644 --- a/gen/cloudshell1-cli/src/main.rs +++ b/gen/cloudshell1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudshell1::{api, Error, oauth2}; +use google_cloudshell1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -249,7 +248,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1055,7 +1054,7 @@ async fn main() { let mut app = App::new("cloudshell1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230109") .about("Allows users to start, configure, and connect to interactive shell sessions running in the cloud. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudshell1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudshell1/Cargo.toml b/gen/cloudshell1/Cargo.toml index fcf3f38712..cf0c8e255d 100644 --- a/gen/cloudshell1/Cargo.toml +++ b/gen/cloudshell1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudshell1" -version = "5.0.2-beta-1+20230109" +version = "5.0.2+20230109" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Shell (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudshell1" homepage = "https://cloud.google.com/shell/docs/" -documentation = "https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109" +documentation = "https://docs.rs/google-cloudshell1/5.0.2+20230109" license = "MIT" keywords = ["cloudshell", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudshell1/README.md b/gen/cloudshell1/README.md index 9bf212bc7e..33e865947c 100644 --- a/gen/cloudshell1/README.md +++ b/gen/cloudshell1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-cloudshell1` library allows access to all features of the *Google Cloud Shell* service. -This documentation was generated from *Cloud Shell* crate version *5.0.2-beta-1+20230109*, where *20230109* is the exact revision of the *cloudshell:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Shell* crate version *5.0.2+20230109*, where *20230109* is the exact revision of the *cloudshell:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Shell* *v1* API can be found at the [official documentation site](https://cloud.google.com/shell/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/CloudShell) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/CloudShell) ... -* [operations](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::Operation) - * [*cancel*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::OperationCancelCall), [*delete*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::OperationDeleteCall), [*get*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::OperationListCall) +* [operations](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::Operation) + * [*cancel*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::OperationCancelCall), [*delete*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::OperationDeleteCall), [*get*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::OperationGetCall) and [*list*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::OperationListCall) * users - * [*environments add public key*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::UserEnvironmentAddPublicKeyCall), [*environments authorize*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::UserEnvironmentAuthorizeCall), [*environments get*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::UserEnvironmentGetCall), [*environments remove public key*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::UserEnvironmentRemovePublicKeyCall) and [*environments start*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/api::UserEnvironmentStartCall) + * [*environments add public key*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::UserEnvironmentAddPublicKeyCall), [*environments authorize*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::UserEnvironmentAuthorizeCall), [*environments get*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::UserEnvironmentGetCall), [*environments remove public key*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::UserEnvironmentRemovePublicKeyCall) and [*environments start*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/api::UserEnvironmentStartCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/CloudShell)** +* **[Hub](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/CloudShell)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Part)** + * **[Parts](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudshell1/5.0.2-beta-1+20230109/google_cloudshell1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudshell1/5.0.2+20230109/google_cloudshell1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudshell1/src/api.rs b/gen/cloudshell1/src/api.rs index 083cc67bb2..604276ade8 100644 --- a/gen/cloudshell1/src/api.rs +++ b/gen/cloudshell1/src/api.rs @@ -122,7 +122,7 @@ impl<'a, S> CloudShell { CloudShell { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudshell.googleapis.com/".to_string(), _root_url: "https://cloudshell.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CloudShell { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudshell1/src/client.rs b/gen/cloudshell1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudshell1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudshell1/src/lib.rs b/gen/cloudshell1/src/lib.rs index 870f3aa23e..0bc61dc85a 100644 --- a/gen/cloudshell1/src/lib.rs +++ b/gen/cloudshell1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Shell* crate version *5.0.2-beta-1+20230109*, where *20230109* is the exact revision of the *cloudshell:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Shell* crate version *5.0.2+20230109*, where *20230109* is the exact revision of the *cloudshell:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Shell* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/shell/docs/). diff --git a/gen/cloudsupport2_beta-cli/Cargo.toml b/gen/cloudsupport2_beta-cli/Cargo.toml index b2dcb718a0..a8a054389d 100644 --- a/gen/cloudsupport2_beta-cli/Cargo.toml +++ b/gen/cloudsupport2_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudsupport2_beta-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Support (protocol v2beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudsupport2_beta-cli" @@ -20,13 +20,13 @@ name = "cloudsupport2-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudsupport2_beta] path = "../cloudsupport2_beta" -version = "4.0.1+20220305" +version = "5.0.2+20230121" + diff --git a/gen/cloudsupport2_beta-cli/README.md b/gen/cloudsupport2_beta-cli/README.md index 8734632bd8..c463576a4f 100644 --- a/gen/cloudsupport2_beta-cli/README.md +++ b/gen/cloudsupport2_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Support* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Support* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash cloudsupport2-beta [options] diff --git a/gen/cloudsupport2_beta-cli/mkdocs.yml b/gen/cloudsupport2_beta-cli/mkdocs.yml index 949d4364fe..349833522d 100644 --- a/gen/cloudsupport2_beta-cli/mkdocs.yml +++ b/gen/cloudsupport2_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Support v4.0.1+20220305 +site_name: Cloud Support v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-cloudsupport2_beta-cli site_description: A complete library to interact with Cloud Support (protocol v2beta) @@ -7,22 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudsupport2_be docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['attachments_create.md', 'Attachments', 'Create'] -- ['case-classifications_search.md', 'Case Classifications', 'Search'] -- ['cases_attachments-list.md', 'Cases', 'Attachments List'] -- ['cases_close.md', 'Cases', 'Close'] -- ['cases_comments-create.md', 'Cases', 'Comments Create'] -- ['cases_comments-list.md', 'Cases', 'Comments List'] -- ['cases_create.md', 'Cases', 'Create'] -- ['cases_escalate.md', 'Cases', 'Escalate'] -- ['cases_get.md', 'Cases', 'Get'] -- ['cases_list.md', 'Cases', 'List'] -- ['cases_patch.md', 'Cases', 'Patch'] -- ['cases_search.md', 'Cases', 'Search'] -- ['media_download.md', 'Media', 'Download'] -- ['media_upload.md', 'Media', 'Upload'] +nav: +- Home: 'index.md' +- 'Attachments': + - 'Create': 'attachments_create.md' +- 'Case Classifications': + - 'Search': 'case-classifications_search.md' +- 'Cases': + - 'Attachments List': 'cases_attachments-list.md' + - 'Close': 'cases_close.md' + - 'Comments Create': 'cases_comments-create.md' + - 'Comments List': 'cases_comments-list.md' + - 'Create': 'cases_create.md' + - 'Escalate': 'cases_escalate.md' + - 'Get': 'cases_get.md' + - 'List': 'cases_list.md' + - 'Patch': 'cases_patch.md' + - 'Search': 'cases_search.md' +- 'Media': + - 'Download': 'media_download.md' + - 'Upload': 'media_upload.md' theme: readthedocs diff --git a/gen/cloudsupport2_beta-cli/src/client.rs b/gen/cloudsupport2_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudsupport2_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudsupport2_beta-cli/src/main.rs b/gen/cloudsupport2_beta-cli/src/main.rs index 4752ecb0c6..df40e04902 100644 --- a/gen/cloudsupport2_beta-cli/src/main.rs +++ b/gen/cloudsupport2_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudsupport2_beta::{api, Error, oauth2}; +use google_cloudsupport2_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -156,7 +155,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -215,7 +214,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -449,7 +448,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -530,6 +529,7 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "escalated" => Some(("escalated", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "language-code" => Some(("languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "severity" => Some(("severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -539,7 +539,7 @@ where "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["classification", "create-time", "creator", "description", "display-name", "email", "escalated", "google-support", "id", "name", "priority", "severity", "state", "subscriber-email-addresses", "test-case", "time-zone", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["classification", "create-time", "creator", "description", "display-name", "email", "escalated", "google-support", "id", "language-code", "name", "priority", "severity", "state", "subscriber-email-addresses", "test-case", "time-zone", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -747,7 +747,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -831,6 +831,7 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "escalated" => Some(("escalated", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "language-code" => Some(("languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "severity" => Some(("severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -840,7 +841,7 @@ where "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["classification", "create-time", "creator", "description", "display-name", "email", "escalated", "google-support", "id", "name", "priority", "severity", "state", "subscriber-email-addresses", "test-case", "time-zone", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["classification", "create-time", "creator", "description", "display-name", "email", "escalated", "google-support", "id", "language-code", "name", "priority", "severity", "state", "subscriber-email-addresses", "test-case", "time-zone", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -855,7 +856,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -917,7 +918,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1278,12 +1279,12 @@ async fn main() { let arg_data = [ ("attachments", "methods: 'create'", vec![ ("create", - Some(r##"Create a file attachment on a case or Cloud resource."##), + Some(r##"Create a file attachment on a case or Cloud resource. The attachment object must have the following fields set: filename."##), "Details at http://byron.github.io/google-apis-rs/google_cloudsupport2_beta_cli/attachments_create", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the case to which attachment should be attached."##), + Some(r##"Required. The resource name of the case (or case parent) to which the attachment should be attached."##), Some(true), Some(false)), @@ -1378,7 +1379,7 @@ async fn main() { Some(false)), ]), ("comments-create", - Some(r##"Add a new comment to the specified Case."##), + Some(r##"Add a new comment to the specified Case. The comment object must have the following fields set: body."##), "Details at http://byron.github.io/google-apis-rs/google_cloudsupport2_beta_cli/cases_comments-create", vec![ (Some(r##"parent"##), @@ -1428,7 +1429,7 @@ async fn main() { Some(false)), ]), ("create", - Some(r##"Create a new case and associate it with the given Cloud resource."##), + Some(r##"Create a new case and associate it with the given Cloud resource. The case object must have the following fields set: display_name, description, classification, and severity."##), "Details at http://byron.github.io/google-apis-rs/google_cloudsupport2_beta_cli/cases_create", vec![ (Some(r##"parent"##), @@ -1506,7 +1507,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"Retrieve all cases under the specified parent."##), + Some(r##"Retrieve all cases under the specified parent. Note: Listing cases under an Organization returns only the cases directly parented by that organization. To retrieve all cases under an organization, including cases parented by projects under that organization, use `cases.search`."##), "Details at http://byron.github.io/google-apis-rs/google_cloudsupport2_beta_cli/cases_list", vec![ (Some(r##"parent"##), @@ -1528,7 +1529,7 @@ async fn main() { Some(false)), ]), ("patch", - Some(r##"Update the specified case. Only a subset of fields (display_name, description, time_zone, subscriber_email_addresses, related_resources, severity, priority, primary_contact, and labels) can be updated."##), + Some(r##"Update the specified case. Only a subset of fields can be updated."##), "Details at http://byron.github.io/google-apis-rs/google_cloudsupport2_beta_cli/cases_patch", vec![ (Some(r##"name"##), @@ -1597,12 +1598,12 @@ async fn main() { Some(false)), ]), ("upload", - Some(r##"Create a file attachment on a case or Cloud resource."##), + Some(r##"Create a file attachment on a case or Cloud resource. The attachment object must have the following fields set: filename."##), "Details at http://byron.github.io/google-apis-rs/google_cloudsupport2_beta_cli/media_upload", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the case to which attachment should be attached."##), + Some(r##"Required. The resource name of the case (or case parent) to which the attachment should be attached."##), Some(true), Some(false)), @@ -1636,7 +1637,7 @@ async fn main() { let mut app = App::new("cloudsupport2-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230121") .about("Manages Google Cloud technical support cases for Customer Care support offerings. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudsupport2_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudsupport2_beta/Cargo.toml b/gen/cloudsupport2_beta/Cargo.toml index 04d8cfb3ab..2ef11615de 100644 --- a/gen/cloudsupport2_beta/Cargo.toml +++ b/gen/cloudsupport2_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudsupport2_beta" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Support (protocol v2beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudsupport2_beta" homepage = "https://cloud.google.com/support/docs/apis" -documentation = "https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121" license = "MIT" keywords = ["cloudsupport", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudsupport2_beta/README.md b/gen/cloudsupport2_beta/README.md index c7f939a53f..969509dae6 100644 --- a/gen/cloudsupport2_beta/README.md +++ b/gen/cloudsupport2_beta/README.md @@ -5,31 +5,31 @@ DO NOT EDIT ! --> The `google-cloudsupport2_beta` library allows access to all features of the *Google Cloud Support* service. -This documentation was generated from *Cloud Support* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *cloudsupport:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Support* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *cloudsupport:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Support* *v2_beta* API can be found at the [official documentation site](https://cloud.google.com/support/docs/apis). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/CloudSupport) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/CloudSupport) ... -* [attachments](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::Attachment) - * [*create*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::AttachmentCreateCall) -* [case classifications](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseClassification) - * [*search*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseClassificationSearchCall) -* [cases](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::Case) - * [*attachments list*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseAttachmentListCall), [*close*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseCloseCall), [*comments create*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseCommentCreateCall), [*comments list*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseCommentListCall), [*create*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseCreateCall), [*escalate*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseEscalateCall), [*get*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseGetCall), [*list*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseListCall), [*patch*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CasePatchCall) and [*search*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::CaseSearchCall) -* [media](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::Media) - * [*download*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::MediaDownloadCall) and [*upload*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::MediaUploadCall) +* [attachments](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::Attachment) + * [*create*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::AttachmentCreateCall) +* [case classifications](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseClassification) + * [*search*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseClassificationSearchCall) +* [cases](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::Case) + * [*attachments list*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseAttachmentListCall), [*close*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseCloseCall), [*comments create*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseCommentCreateCall), [*comments list*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseCommentListCall), [*create*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseCreateCall), [*escalate*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseEscalateCall), [*get*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseGetCall), [*list*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseListCall), [*patch*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CasePatchCall) and [*search*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::CaseSearchCall) +* [media](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::Media) + * [*download*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::MediaDownloadCall) and [*upload*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::MediaUploadCall) Upload supported by ... -* [*upload media*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::MediaUploadCall) +* [*upload media*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::MediaUploadCall) Download supported by ... -* [*download media*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/api::MediaDownloadCall) +* [*download media*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/api::MediaDownloadCall) @@ -37,17 +37,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/CloudSupport)** +* **[Hub](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/CloudSupport)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Part)** + * **[Parts](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -144,17 +144,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -164,29 +164,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::RequestValue) and -[decodable](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::RequestValue) and +[decodable](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudsupport2_beta/5.0.2-beta-1+20230121/google_cloudsupport2_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudsupport2_beta/5.0.2+20230121/google_cloudsupport2_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudsupport2_beta/src/api.rs b/gen/cloudsupport2_beta/src/api.rs index febf80fb95..69a967227e 100644 --- a/gen/cloudsupport2_beta/src/api.rs +++ b/gen/cloudsupport2_beta/src/api.rs @@ -122,7 +122,7 @@ impl<'a, S> CloudSupport { CloudSupport { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudsupport.googleapis.com/".to_string(), _root_url: "https://cloudsupport.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> CloudSupport { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudsupport2_beta/src/client.rs b/gen/cloudsupport2_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudsupport2_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudsupport2_beta/src/lib.rs b/gen/cloudsupport2_beta/src/lib.rs index f328895e33..927a9f9264 100644 --- a/gen/cloudsupport2_beta/src/lib.rs +++ b/gen/cloudsupport2_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Support* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *cloudsupport:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Support* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *cloudsupport:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Support* *v2_beta* API can be found at the //! [official documentation site](https://cloud.google.com/support/docs/apis). diff --git a/gen/cloudtasks2-cli/Cargo.toml b/gen/cloudtasks2-cli/Cargo.toml index 19093f47c2..7dffdf4b0a 100644 --- a/gen/cloudtasks2-cli/Cargo.toml +++ b/gen/cloudtasks2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudtasks2-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Tasks (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2-cli" @@ -20,13 +20,13 @@ name = "cloudtasks2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudtasks2] path = "../cloudtasks2" -version = "4.0.1+20220212" +version = "5.0.2+20230105" + diff --git a/gen/cloudtasks2-cli/README.md b/gen/cloudtasks2-cli/README.md index 975bfc0304..03c66fe53a 100644 --- a/gen/cloudtasks2-cli/README.md +++ b/gen/cloudtasks2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Tasks* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Tasks* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash cloudtasks2 [options] diff --git a/gen/cloudtasks2-cli/mkdocs.yml b/gen/cloudtasks2-cli/mkdocs.yml index 99e257da29..c41664bb83 100644 --- a/gen/cloudtasks2-cli/mkdocs.yml +++ b/gen/cloudtasks2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Tasks v4.0.1+20220212 +site_name: Cloud Tasks v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-cloudtasks2-cli site_description: A complete library to interact with Cloud Tasks (protocol v2) @@ -7,26 +7,27 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-queues-create.md', 'Projects', 'Locations Queues Create'] -- ['projects_locations-queues-delete.md', 'Projects', 'Locations Queues Delete'] -- ['projects_locations-queues-get.md', 'Projects', 'Locations Queues Get'] -- ['projects_locations-queues-get-iam-policy.md', 'Projects', 'Locations Queues Get Iam Policy'] -- ['projects_locations-queues-list.md', 'Projects', 'Locations Queues List'] -- ['projects_locations-queues-patch.md', 'Projects', 'Locations Queues Patch'] -- ['projects_locations-queues-pause.md', 'Projects', 'Locations Queues Pause'] -- ['projects_locations-queues-purge.md', 'Projects', 'Locations Queues Purge'] -- ['projects_locations-queues-resume.md', 'Projects', 'Locations Queues Resume'] -- ['projects_locations-queues-set-iam-policy.md', 'Projects', 'Locations Queues Set Iam Policy'] -- ['projects_locations-queues-tasks-create.md', 'Projects', 'Locations Queues Tasks Create'] -- ['projects_locations-queues-tasks-delete.md', 'Projects', 'Locations Queues Tasks Delete'] -- ['projects_locations-queues-tasks-get.md', 'Projects', 'Locations Queues Tasks Get'] -- ['projects_locations-queues-tasks-list.md', 'Projects', 'Locations Queues Tasks List'] -- ['projects_locations-queues-tasks-run.md', 'Projects', 'Locations Queues Tasks Run'] -- ['projects_locations-queues-test-iam-permissions.md', 'Projects', 'Locations Queues Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Queues Create': 'projects_locations-queues-create.md' + - 'Locations Queues Delete': 'projects_locations-queues-delete.md' + - 'Locations Queues Get': 'projects_locations-queues-get.md' + - 'Locations Queues Get Iam Policy': 'projects_locations-queues-get-iam-policy.md' + - 'Locations Queues List': 'projects_locations-queues-list.md' + - 'Locations Queues Patch': 'projects_locations-queues-patch.md' + - 'Locations Queues Pause': 'projects_locations-queues-pause.md' + - 'Locations Queues Purge': 'projects_locations-queues-purge.md' + - 'Locations Queues Resume': 'projects_locations-queues-resume.md' + - 'Locations Queues Set Iam Policy': 'projects_locations-queues-set-iam-policy.md' + - 'Locations Queues Tasks Create': 'projects_locations-queues-tasks-create.md' + - 'Locations Queues Tasks Delete': 'projects_locations-queues-tasks-delete.md' + - 'Locations Queues Tasks Get': 'projects_locations-queues-tasks-get.md' + - 'Locations Queues Tasks List': 'projects_locations-queues-tasks-list.md' + - 'Locations Queues Tasks Run': 'projects_locations-queues-tasks-run.md' + - 'Locations Queues Test Iam Permissions': 'projects_locations-queues-test-iam-permissions.md' theme: readthedocs diff --git a/gen/cloudtasks2-cli/src/client.rs b/gen/cloudtasks2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudtasks2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudtasks2-cli/src/main.rs b/gen/cloudtasks2-cli/src/main.rs index 1297c35445..0fe97ff2c9 100644 --- a/gen/cloudtasks2-cli/src/main.rs +++ b/gen/cloudtasks2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudtasks2::{api, Error, oauth2}; +use google_cloudtasks2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -464,7 +463,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -571,7 +570,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1197,7 +1196,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1679,7 +1678,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1841,7 +1840,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1991,7 +1990,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2019,7 +2018,7 @@ async fn main() { let mut app = App::new("cloudtasks2") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230105") .about("Manages the execution of large numbers of distributed requests.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudtasks2_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudtasks2/Cargo.toml b/gen/cloudtasks2/Cargo.toml index 12a779689c..297f16bfb8 100644 --- a/gen/cloudtasks2/Cargo.toml +++ b/gen/cloudtasks2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudtasks2" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Tasks (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2" homepage = "https://cloud.google.com/tasks/" -documentation = "https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-cloudtasks2/5.0.2+20230105" license = "MIT" keywords = ["cloudtasks", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudtasks2/README.md b/gen/cloudtasks2/README.md index d11c0d427c..8cec08bf57 100644 --- a/gen/cloudtasks2/README.md +++ b/gen/cloudtasks2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudtasks2` library allows access to all features of the *Google Cloud Tasks* service. -This documentation was generated from *Cloud Tasks* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Tasks* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Tasks* *v2* API can be found at the [official documentation site](https://cloud.google.com/tasks/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/CloudTasks) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/CloudTasks) ... * projects - * [*locations get*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationListCall), [*locations queues create*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueCreateCall), [*locations queues delete*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueDeleteCall), [*locations queues get*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueGetCall), [*locations queues get iam policy*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueGetIamPolicyCall), [*locations queues list*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueListCall), [*locations queues patch*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueuePatchCall), [*locations queues pause*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueuePauseCall), [*locations queues purge*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueuePurgeCall), [*locations queues resume*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueResumeCall), [*locations queues set iam policy*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueSetIamPolicyCall), [*locations queues tasks create*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskCreateCall), [*locations queues tasks delete*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskDeleteCall), [*locations queues tasks get*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskGetCall), [*locations queues tasks list*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskListCall), [*locations queues tasks run*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskRunCall) and [*locations queues test iam permissions*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/api::ProjectLocationQueueTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationListCall), [*locations queues create*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueCreateCall), [*locations queues delete*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueDeleteCall), [*locations queues get*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueGetCall), [*locations queues get iam policy*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueGetIamPolicyCall), [*locations queues list*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueListCall), [*locations queues patch*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueuePatchCall), [*locations queues pause*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueuePauseCall), [*locations queues purge*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueuePurgeCall), [*locations queues resume*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueResumeCall), [*locations queues set iam policy*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueSetIamPolicyCall), [*locations queues tasks create*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskCreateCall), [*locations queues tasks delete*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskDeleteCall), [*locations queues tasks get*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskGetCall), [*locations queues tasks list*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskListCall), [*locations queues tasks run*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueTaskRunCall) and [*locations queues test iam permissions*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/api::ProjectLocationQueueTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/CloudTasks)** +* **[Hub](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/CloudTasks)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Part)** + * **[Parts](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::RequestValue) and -[decodable](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::RequestValue) and +[decodable](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudtasks2/5.0.2-beta-1+20230105/google_cloudtasks2/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudtasks2/5.0.2+20230105/google_cloudtasks2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudtasks2/src/api.rs b/gen/cloudtasks2/src/api.rs index d63028670a..135a1cadcb 100644 --- a/gen/cloudtasks2/src/api.rs +++ b/gen/cloudtasks2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudTasks { CloudTasks { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudtasks.googleapis.com/".to_string(), _root_url: "https://cloudtasks.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudTasks { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudtasks2/src/client.rs b/gen/cloudtasks2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudtasks2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudtasks2/src/lib.rs b/gen/cloudtasks2/src/lib.rs index c05f71181c..80d02f78e4 100644 --- a/gen/cloudtasks2/src/lib.rs +++ b/gen/cloudtasks2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Tasks* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Tasks* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Tasks* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/tasks/). diff --git a/gen/cloudtasks2_beta2-cli/Cargo.toml b/gen/cloudtasks2_beta2-cli/Cargo.toml index c70c139dfb..d2371b6e63 100644 --- a/gen/cloudtasks2_beta2-cli/Cargo.toml +++ b/gen/cloudtasks2_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudtasks2_beta2-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Tasks (protocol v2beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2_beta2-cli" @@ -20,13 +20,13 @@ name = "cloudtasks2-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudtasks2_beta2] path = "../cloudtasks2_beta2" -version = "4.0.1+20220212" +version = "5.0.2+20230105" + diff --git a/gen/cloudtasks2_beta2-cli/README.md b/gen/cloudtasks2_beta2-cli/README.md index df413d4608..bca29e2584 100644 --- a/gen/cloudtasks2_beta2-cli/README.md +++ b/gen/cloudtasks2_beta2-cli/README.md @@ -25,10 +25,12 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Tasks* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Tasks* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash cloudtasks2-beta2 [options] + api + queue-update (-r )... [-p ]... [-o ] projects locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] @@ -43,6 +45,7 @@ cloudtasks2-beta2 [options] locations-queues-resume (-r )... [-p ]... [-o ] locations-queues-set-iam-policy (-r )... [-p ]... [-o ] locations-queues-tasks-acknowledge (-r )... [-p ]... [-o ] + locations-queues-tasks-buffer (-r )... [-p ]... [-o ] locations-queues-tasks-cancel-lease (-r )... [-p ]... [-o ] locations-queues-tasks-create (-r )... [-p ]... [-o ] locations-queues-tasks-delete [-p ]... [-o ] diff --git a/gen/cloudtasks2_beta2-cli/mkdocs.yml b/gen/cloudtasks2_beta2-cli/mkdocs.yml index d9802b6174..23f625c00a 100644 --- a/gen/cloudtasks2_beta2-cli/mkdocs.yml +++ b/gen/cloudtasks2_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Tasks v4.0.1+20220212 +site_name: Cloud Tasks v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-cloudtasks2_beta2-cli site_description: A complete library to interact with Cloud Tasks (protocol v2beta2) @@ -7,30 +7,34 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-queues-create.md', 'Projects', 'Locations Queues Create'] -- ['projects_locations-queues-delete.md', 'Projects', 'Locations Queues Delete'] -- ['projects_locations-queues-get.md', 'Projects', 'Locations Queues Get'] -- ['projects_locations-queues-get-iam-policy.md', 'Projects', 'Locations Queues Get Iam Policy'] -- ['projects_locations-queues-list.md', 'Projects', 'Locations Queues List'] -- ['projects_locations-queues-patch.md', 'Projects', 'Locations Queues Patch'] -- ['projects_locations-queues-pause.md', 'Projects', 'Locations Queues Pause'] -- ['projects_locations-queues-purge.md', 'Projects', 'Locations Queues Purge'] -- ['projects_locations-queues-resume.md', 'Projects', 'Locations Queues Resume'] -- ['projects_locations-queues-set-iam-policy.md', 'Projects', 'Locations Queues Set Iam Policy'] -- ['projects_locations-queues-tasks-acknowledge.md', 'Projects', 'Locations Queues Tasks Acknowledge'] -- ['projects_locations-queues-tasks-cancel-lease.md', 'Projects', 'Locations Queues Tasks Cancel Lease'] -- ['projects_locations-queues-tasks-create.md', 'Projects', 'Locations Queues Tasks Create'] -- ['projects_locations-queues-tasks-delete.md', 'Projects', 'Locations Queues Tasks Delete'] -- ['projects_locations-queues-tasks-get.md', 'Projects', 'Locations Queues Tasks Get'] -- ['projects_locations-queues-tasks-lease.md', 'Projects', 'Locations Queues Tasks Lease'] -- ['projects_locations-queues-tasks-list.md', 'Projects', 'Locations Queues Tasks List'] -- ['projects_locations-queues-tasks-renew-lease.md', 'Projects', 'Locations Queues Tasks Renew Lease'] -- ['projects_locations-queues-tasks-run.md', 'Projects', 'Locations Queues Tasks Run'] -- ['projects_locations-queues-test-iam-permissions.md', 'Projects', 'Locations Queues Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Api': + - 'Queue Update': 'api_queue-update.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Queues Create': 'projects_locations-queues-create.md' + - 'Locations Queues Delete': 'projects_locations-queues-delete.md' + - 'Locations Queues Get': 'projects_locations-queues-get.md' + - 'Locations Queues Get Iam Policy': 'projects_locations-queues-get-iam-policy.md' + - 'Locations Queues List': 'projects_locations-queues-list.md' + - 'Locations Queues Patch': 'projects_locations-queues-patch.md' + - 'Locations Queues Pause': 'projects_locations-queues-pause.md' + - 'Locations Queues Purge': 'projects_locations-queues-purge.md' + - 'Locations Queues Resume': 'projects_locations-queues-resume.md' + - 'Locations Queues Set Iam Policy': 'projects_locations-queues-set-iam-policy.md' + - 'Locations Queues Tasks Acknowledge': 'projects_locations-queues-tasks-acknowledge.md' + - 'Locations Queues Tasks Buffer': 'projects_locations-queues-tasks-buffer.md' + - 'Locations Queues Tasks Cancel Lease': 'projects_locations-queues-tasks-cancel-lease.md' + - 'Locations Queues Tasks Create': 'projects_locations-queues-tasks-create.md' + - 'Locations Queues Tasks Delete': 'projects_locations-queues-tasks-delete.md' + - 'Locations Queues Tasks Get': 'projects_locations-queues-tasks-get.md' + - 'Locations Queues Tasks Lease': 'projects_locations-queues-tasks-lease.md' + - 'Locations Queues Tasks List': 'projects_locations-queues-tasks-list.md' + - 'Locations Queues Tasks Renew Lease': 'projects_locations-queues-tasks-renew-lease.md' + - 'Locations Queues Tasks Run': 'projects_locations-queues-tasks-run.md' + - 'Locations Queues Test Iam Permissions': 'projects_locations-queues-test-iam-permissions.md' theme: readthedocs diff --git a/gen/cloudtasks2_beta2-cli/src/client.rs b/gen/cloudtasks2_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudtasks2_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudtasks2_beta2-cli/src/main.rs b/gen/cloudtasks2_beta2-cli/src/main.rs index 9ec9c9f2d3..e60bfac01b 100644 --- a/gen/cloudtasks2_beta2-cli/src/main.rs +++ b/gen/cloudtasks2_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudtasks2_beta2::{api, Error, oauth2}; +use google_cloudtasks2_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,96 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _api_queue_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "content-type" => Some(("contentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data" => Some(("data", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["content-type", "data"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::HttpBody = json::value::from_value(object).unwrap(); + let mut call = self.hub.api().queue_update(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "app-id" => { + call = call.app_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["app-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or("")); @@ -113,7 +202,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -192,6 +281,17 @@ where "app-engine-http-target.app-engine-routing-override.instance" => Some(("appEngineHttpTarget.appEngineRoutingOverride.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-target.app-engine-routing-override.service" => Some(("appEngineHttpTarget.appEngineRoutingOverride.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-target.app-engine-routing-override.version" => Some(("appEngineHttpTarget.appEngineRoutingOverride.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.http-method" => Some(("httpTarget.httpMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oauth-token.scope" => Some(("httpTarget.oauthToken.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oauth-token.service-account-email" => Some(("httpTarget.oauthToken.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oidc-token.audience" => Some(("httpTarget.oidcToken.audience", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oidc-token.service-account-email" => Some(("httpTarget.oidcToken.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.host" => Some(("httpTarget.uriOverride.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.path-override.path" => Some(("httpTarget.uriOverride.pathOverride.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.port" => Some(("httpTarget.uriOverride.port", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.query-override.query-params" => Some(("httpTarget.uriOverride.queryOverride.queryParams", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.scheme" => Some(("httpTarget.uriOverride.scheme", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.uri-override-enforce-mode" => Some(("httpTarget.uriOverride.uriOverrideEnforceMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "purge-time" => Some(("purgeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rate-limits.max-burst-size" => Some(("rateLimits.maxBurstSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -212,7 +312,7 @@ where "task-ttl" => Some(("taskTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tombstone-ttl" => Some(("tombstoneTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-target", "app-engine-routing-override", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-tasks", "max-doublings", "max-retry-duration", "max-tasks-dispatched-per-second", "min-backoff", "name", "oldest-estimated-arrival-time", "purge-time", "rate-limits", "retry-config", "service", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "unlimited-attempts", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-target", "app-engine-routing-override", "audience", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "http-method", "http-target", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-tasks", "max-doublings", "max-retry-duration", "max-tasks-dispatched-per-second", "min-backoff", "name", "oauth-token", "oidc-token", "oldest-estimated-arrival-time", "path", "path-override", "port", "purge-time", "query-override", "query-params", "rate-limits", "retry-config", "scheme", "scope", "service", "service-account-email", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "unlimited-attempts", "uri-override", "uri-override-enforce-mode", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -331,7 +431,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -472,13 +572,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -557,6 +657,17 @@ where "app-engine-http-target.app-engine-routing-override.instance" => Some(("appEngineHttpTarget.appEngineRoutingOverride.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-target.app-engine-routing-override.service" => Some(("appEngineHttpTarget.appEngineRoutingOverride.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-target.app-engine-routing-override.version" => Some(("appEngineHttpTarget.appEngineRoutingOverride.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.http-method" => Some(("httpTarget.httpMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oauth-token.scope" => Some(("httpTarget.oauthToken.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oauth-token.service-account-email" => Some(("httpTarget.oauthToken.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oidc-token.audience" => Some(("httpTarget.oidcToken.audience", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.oidc-token.service-account-email" => Some(("httpTarget.oidcToken.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.host" => Some(("httpTarget.uriOverride.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.path-override.path" => Some(("httpTarget.uriOverride.pathOverride.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.port" => Some(("httpTarget.uriOverride.port", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.query-override.query-params" => Some(("httpTarget.uriOverride.queryOverride.queryParams", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.scheme" => Some(("httpTarget.uriOverride.scheme", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.uri-override-enforce-mode" => Some(("httpTarget.uriOverride.uriOverrideEnforceMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "purge-time" => Some(("purgeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rate-limits.max-burst-size" => Some(("rateLimits.maxBurstSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -577,7 +688,7 @@ where "task-ttl" => Some(("taskTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tombstone-ttl" => Some(("tombstoneTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-target", "app-engine-routing-override", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-tasks", "max-doublings", "max-retry-duration", "max-tasks-dispatched-per-second", "min-backoff", "name", "oldest-estimated-arrival-time", "purge-time", "rate-limits", "retry-config", "service", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "unlimited-attempts", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-target", "app-engine-routing-override", "audience", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "http-method", "http-target", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-tasks", "max-doublings", "max-retry-duration", "max-tasks-dispatched-per-second", "min-backoff", "name", "oauth-token", "oidc-token", "oldest-estimated-arrival-time", "path", "path-override", "port", "purge-time", "query-override", "query-params", "rate-limits", "retry-config", "scheme", "scope", "service", "service-account-email", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "unlimited-attempts", "uri-override", "uri-override-enforce-mode", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -592,7 +703,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1064,6 +1175,92 @@ where } } + async fn _projects_locations_queues_tasks_buffer(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "body.content-type" => Some(("body.contentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "body.data" => Some(("body.data", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["body", "content-type", "data"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::BufferTaskRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_queues_tasks_buffer(request, opt.value_of("queue").unwrap_or(""), opt.value_of("task-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_queues_tasks_cancel_lease(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1183,6 +1380,14 @@ where "task.app-engine-http-request.payload" => Some(("task.appEngineHttpRequest.payload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "task.app-engine-http-request.relative-url" => Some(("task.appEngineHttpRequest.relativeUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "task.create-time" => Some(("task.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "task.http-request.body" => Some(("task.httpRequest.body", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "task.http-request.headers" => Some(("task.httpRequest.headers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "task.http-request.http-method" => Some(("task.httpRequest.httpMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "task.http-request.oauth-token.scope" => Some(("task.httpRequest.oauthToken.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "task.http-request.oauth-token.service-account-email" => Some(("task.httpRequest.oauthToken.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "task.http-request.oidc-token.audience" => Some(("task.httpRequest.oidcToken.audience", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "task.http-request.oidc-token.service-account-email" => Some(("task.httpRequest.oidcToken.serviceAccountEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "task.http-request.url" => Some(("task.httpRequest.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "task.name" => Some(("task.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "task.pull-message.payload" => Some(("task.pullMessage.payload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "task.pull-message.tag" => Some(("task.pullMessage.tag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1201,7 +1406,7 @@ where "task.status.last-attempt-status.schedule-time" => Some(("task.status.lastAttemptStatus.scheduleTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "task.view" => Some(("task.view", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-request", "app-engine-routing", "attempt-dispatch-count", "attempt-response-count", "code", "create-time", "dispatch-time", "first-attempt-status", "headers", "host", "http-method", "instance", "last-attempt-status", "message", "name", "payload", "pull-message", "relative-url", "response-status", "response-time", "response-view", "schedule-time", "service", "status", "tag", "task", "version", "view"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-request", "app-engine-routing", "attempt-dispatch-count", "attempt-response-count", "audience", "body", "code", "create-time", "dispatch-time", "first-attempt-status", "headers", "host", "http-method", "http-request", "instance", "last-attempt-status", "message", "name", "oauth-token", "oidc-token", "payload", "pull-message", "relative-url", "response-status", "response-time", "response-view", "schedule-time", "scope", "service", "service-account-email", "status", "tag", "task", "url", "version", "view"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1470,7 +1675,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1781,6 +1986,17 @@ where let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option = None; match self.opt.subcommand() { + ("api", Some(opt)) => { + match opt.subcommand() { + ("queue-update", Some(opt)) => { + call_result = self._api_queue_update(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("api".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("projects", Some(opt)) => { match opt.subcommand() { ("locations-get", Some(opt)) => { @@ -1822,6 +2038,9 @@ where ("locations-queues-tasks-acknowledge", Some(opt)) => { call_result = self._projects_locations_queues_tasks_acknowledge(opt, dry_run, &mut err).await; }, + ("locations-queues-tasks-buffer", Some(opt)) => { + call_result = self._projects_locations_queues_tasks_buffer(opt, dry_run, &mut err).await; + }, ("locations-queues-tasks-cancel-lease", Some(opt)) => { call_result = self._projects_locations_queues_tasks_cancel_lease(opt, dry_run, &mut err).await; }, @@ -1928,7 +2147,32 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-list', 'locations-queues-create', 'locations-queues-delete', 'locations-queues-get', 'locations-queues-get-iam-policy', 'locations-queues-list', 'locations-queues-patch', 'locations-queues-pause', 'locations-queues-purge', 'locations-queues-resume', 'locations-queues-set-iam-policy', 'locations-queues-tasks-acknowledge', 'locations-queues-tasks-cancel-lease', 'locations-queues-tasks-create', 'locations-queues-tasks-delete', 'locations-queues-tasks-get', 'locations-queues-tasks-lease', 'locations-queues-tasks-list', 'locations-queues-tasks-renew-lease', 'locations-queues-tasks-run' and 'locations-queues-test-iam-permissions'", vec![ + ("api", "methods: 'queue-update'", vec![ + ("queue-update", + Some(r##"Update queue list by uploading a queue.yaml file. The queue.yaml file is supplied in the request body as a YAML encoded string. This method was added to support gcloud clients versions before 322.0.0. New clients should use CreateQueue instead of this method."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudtasks2_beta2_cli/api_queue-update", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + + ("projects", "methods: 'locations-get', 'locations-list', 'locations-queues-create', 'locations-queues-delete', 'locations-queues-get', 'locations-queues-get-iam-policy', 'locations-queues-list', 'locations-queues-patch', 'locations-queues-pause', 'locations-queues-purge', 'locations-queues-resume', 'locations-queues-set-iam-policy', 'locations-queues-tasks-acknowledge', 'locations-queues-tasks-buffer', 'locations-queues-tasks-cancel-lease', 'locations-queues-tasks-create', 'locations-queues-tasks-delete', 'locations-queues-tasks-get', 'locations-queues-tasks-lease', 'locations-queues-tasks-list', 'locations-queues-tasks-renew-lease', 'locations-queues-tasks-run' and 'locations-queues-test-iam-permissions'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_cloudtasks2_beta2_cli/projects_locations-get", @@ -2051,7 +2295,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2213,7 +2457,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2257,6 +2501,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-queues-tasks-buffer", + Some(r##"Creates and buffers a new task without the need to explicitly define a Task message. The queue must have HTTP target. To create the task with a custom ID, use the following format and set TASK_ID to your desired ID: projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer To create the task with an automatically generated ID, use the following format: projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. Note: This feature is in its experimental stage. You must request access to the API through the [Cloud Tasks BufferTask Experiment Signup form](https://forms.gle/X8Zr5hiXH5tTGFqh8)."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudtasks2_beta2_cli/projects_locations-queues-tasks-buffer", + vec![ + (Some(r##"queue"##), + None, + Some(r##"Required. The parent queue name. For example: projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` The queue must already exist."##), + Some(true), + Some(false)), + + (Some(r##"task-id"##), + None, + Some(r##"Optional. Task ID for the task being created. If not provided, a random task ID is assigned to the task."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2475,7 +2753,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2503,7 +2781,7 @@ async fn main() { let mut app = App::new("cloudtasks2-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230105") .about("Manages the execution of large numbers of distributed requests.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudtasks2_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudtasks2_beta2/Cargo.toml b/gen/cloudtasks2_beta2/Cargo.toml index 4e49fe349d..2c603d3cd2 100644 --- a/gen/cloudtasks2_beta2/Cargo.toml +++ b/gen/cloudtasks2_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudtasks2_beta2" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Tasks (protocol v2beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2_beta2" homepage = "https://cloud.google.com/tasks/" -documentation = "https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105" license = "MIT" keywords = ["cloudtasks", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudtasks2_beta2/README.md b/gen/cloudtasks2_beta2/README.md index 2c3270b1f1..c0dddc1e2b 100644 --- a/gen/cloudtasks2_beta2/README.md +++ b/gen/cloudtasks2_beta2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-cloudtasks2_beta2` library allows access to all features of the *Google Cloud Tasks* service. -This documentation was generated from *Cloud Tasks* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Tasks* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Tasks* *v2_beta2* API can be found at the [official documentation site](https://cloud.google.com/tasks/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/CloudTasks) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/CloudTasks) ... * api - * [*queue update*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ApiQueueUpdateCall) + * [*queue update*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ApiQueueUpdateCall) * projects - * [*locations get*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationListCall), [*locations queues create*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueCreateCall), [*locations queues delete*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueDeleteCall), [*locations queues get*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueGetCall), [*locations queues get iam policy*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueGetIamPolicyCall), [*locations queues list*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueListCall), [*locations queues patch*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueuePatchCall), [*locations queues pause*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueuePauseCall), [*locations queues purge*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueuePurgeCall), [*locations queues resume*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueResumeCall), [*locations queues set iam policy*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueSetIamPolicyCall), [*locations queues tasks acknowledge*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskAcknowledgeCall), [*locations queues tasks buffer*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskBufferCall), [*locations queues tasks cancel lease*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskCancelLeaseCall), [*locations queues tasks create*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskCreateCall), [*locations queues tasks delete*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskDeleteCall), [*locations queues tasks get*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskGetCall), [*locations queues tasks lease*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskLeaseCall), [*locations queues tasks list*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskListCall), [*locations queues tasks renew lease*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskRenewLeaseCall), [*locations queues tasks run*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskRunCall) and [*locations queues test iam permissions*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationListCall), [*locations queues create*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueCreateCall), [*locations queues delete*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueDeleteCall), [*locations queues get*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueGetCall), [*locations queues get iam policy*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueGetIamPolicyCall), [*locations queues list*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueListCall), [*locations queues patch*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueuePatchCall), [*locations queues pause*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueuePauseCall), [*locations queues purge*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueuePurgeCall), [*locations queues resume*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueResumeCall), [*locations queues set iam policy*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueSetIamPolicyCall), [*locations queues tasks acknowledge*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskAcknowledgeCall), [*locations queues tasks buffer*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskBufferCall), [*locations queues tasks cancel lease*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskCancelLeaseCall), [*locations queues tasks create*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskCreateCall), [*locations queues tasks delete*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskDeleteCall), [*locations queues tasks get*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskGetCall), [*locations queues tasks lease*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskLeaseCall), [*locations queues tasks list*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskListCall), [*locations queues tasks renew lease*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskRenewLeaseCall), [*locations queues tasks run*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTaskRunCall) and [*locations queues test iam permissions*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/api::ProjectLocationQueueTestIamPermissionCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/CloudTasks)** +* **[Hub](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/CloudTasks)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Part)** + * **[Parts](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudtasks2_beta2/5.0.2-beta-1+20230105/google_cloudtasks2_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudtasks2_beta2/5.0.2+20230105/google_cloudtasks2_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudtasks2_beta2/src/api.rs b/gen/cloudtasks2_beta2/src/api.rs index 52a7866efb..4d5608ab46 100644 --- a/gen/cloudtasks2_beta2/src/api.rs +++ b/gen/cloudtasks2_beta2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudTasks { CloudTasks { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudtasks.googleapis.com/".to_string(), _root_url: "https://cloudtasks.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudTasks { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudtasks2_beta2/src/client.rs b/gen/cloudtasks2_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudtasks2_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudtasks2_beta2/src/lib.rs b/gen/cloudtasks2_beta2/src/lib.rs index ebb177514b..389b8b30af 100644 --- a/gen/cloudtasks2_beta2/src/lib.rs +++ b/gen/cloudtasks2_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Tasks* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Tasks* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Tasks* *v2_beta2* API can be found at the //! [official documentation site](https://cloud.google.com/tasks/). diff --git a/gen/cloudtasks2_beta3-cli/Cargo.toml b/gen/cloudtasks2_beta3-cli/Cargo.toml index 1ce137e41f..ae6a425758 100644 --- a/gen/cloudtasks2_beta3-cli/Cargo.toml +++ b/gen/cloudtasks2_beta3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudtasks2_beta3-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Tasks (protocol v2beta3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2_beta3-cli" @@ -20,13 +20,13 @@ name = "cloudtasks2-beta3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudtasks2_beta3] path = "../cloudtasks2_beta3" -version = "4.0.1+20220212" +version = "5.0.2+20230105" + diff --git a/gen/cloudtasks2_beta3-cli/README.md b/gen/cloudtasks2_beta3-cli/README.md index fa90d8342f..e231daa546 100644 --- a/gen/cloudtasks2_beta3-cli/README.md +++ b/gen/cloudtasks2_beta3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Tasks* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Tasks* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash cloudtasks2-beta3 [options] @@ -42,6 +42,7 @@ cloudtasks2-beta3 [options] locations-queues-purge (-r )... [-p ]... [-o ] locations-queues-resume (-r )... [-p ]... [-o ] locations-queues-set-iam-policy (-r )... [-p ]... [-o ] + locations-queues-tasks-buffer (-r )... [-p ]... [-o ] locations-queues-tasks-create (-r )... [-p ]... [-o ] locations-queues-tasks-delete [-p ]... [-o ] locations-queues-tasks-get [-p ]... [-o ] diff --git a/gen/cloudtasks2_beta3-cli/mkdocs.yml b/gen/cloudtasks2_beta3-cli/mkdocs.yml index fedd5ea4b4..830dadf280 100644 --- a/gen/cloudtasks2_beta3-cli/mkdocs.yml +++ b/gen/cloudtasks2_beta3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Tasks v4.0.1+20220212 +site_name: Cloud Tasks v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-cloudtasks2_beta3-cli site_description: A complete library to interact with Cloud Tasks (protocol v2beta3) @@ -7,26 +7,28 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-queues-create.md', 'Projects', 'Locations Queues Create'] -- ['projects_locations-queues-delete.md', 'Projects', 'Locations Queues Delete'] -- ['projects_locations-queues-get.md', 'Projects', 'Locations Queues Get'] -- ['projects_locations-queues-get-iam-policy.md', 'Projects', 'Locations Queues Get Iam Policy'] -- ['projects_locations-queues-list.md', 'Projects', 'Locations Queues List'] -- ['projects_locations-queues-patch.md', 'Projects', 'Locations Queues Patch'] -- ['projects_locations-queues-pause.md', 'Projects', 'Locations Queues Pause'] -- ['projects_locations-queues-purge.md', 'Projects', 'Locations Queues Purge'] -- ['projects_locations-queues-resume.md', 'Projects', 'Locations Queues Resume'] -- ['projects_locations-queues-set-iam-policy.md', 'Projects', 'Locations Queues Set Iam Policy'] -- ['projects_locations-queues-tasks-create.md', 'Projects', 'Locations Queues Tasks Create'] -- ['projects_locations-queues-tasks-delete.md', 'Projects', 'Locations Queues Tasks Delete'] -- ['projects_locations-queues-tasks-get.md', 'Projects', 'Locations Queues Tasks Get'] -- ['projects_locations-queues-tasks-list.md', 'Projects', 'Locations Queues Tasks List'] -- ['projects_locations-queues-tasks-run.md', 'Projects', 'Locations Queues Tasks Run'] -- ['projects_locations-queues-test-iam-permissions.md', 'Projects', 'Locations Queues Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Queues Create': 'projects_locations-queues-create.md' + - 'Locations Queues Delete': 'projects_locations-queues-delete.md' + - 'Locations Queues Get': 'projects_locations-queues-get.md' + - 'Locations Queues Get Iam Policy': 'projects_locations-queues-get-iam-policy.md' + - 'Locations Queues List': 'projects_locations-queues-list.md' + - 'Locations Queues Patch': 'projects_locations-queues-patch.md' + - 'Locations Queues Pause': 'projects_locations-queues-pause.md' + - 'Locations Queues Purge': 'projects_locations-queues-purge.md' + - 'Locations Queues Resume': 'projects_locations-queues-resume.md' + - 'Locations Queues Set Iam Policy': 'projects_locations-queues-set-iam-policy.md' + - 'Locations Queues Tasks Buffer': 'projects_locations-queues-tasks-buffer.md' + - 'Locations Queues Tasks Create': 'projects_locations-queues-tasks-create.md' + - 'Locations Queues Tasks Delete': 'projects_locations-queues-tasks-delete.md' + - 'Locations Queues Tasks Get': 'projects_locations-queues-tasks-get.md' + - 'Locations Queues Tasks List': 'projects_locations-queues-tasks-list.md' + - 'Locations Queues Tasks Run': 'projects_locations-queues-tasks-run.md' + - 'Locations Queues Test Iam Permissions': 'projects_locations-queues-test-iam-permissions.md' theme: readthedocs diff --git a/gen/cloudtasks2_beta3-cli/src/client.rs b/gen/cloudtasks2_beta3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudtasks2_beta3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudtasks2_beta3-cli/src/main.rs b/gen/cloudtasks2_beta3-cli/src/main.rs index 64247db54b..e780fa175e 100644 --- a/gen/cloudtasks2_beta3-cli/src/main.rs +++ b/gen/cloudtasks2_beta3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudtasks2_beta3::{api, Error, oauth2}; +use google_cloudtasks2_beta3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -192,6 +191,13 @@ where "app-engine-http-queue.app-engine-routing-override.instance" => Some(("appEngineHttpQueue.appEngineRoutingOverride.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-queue.app-engine-routing-override.service" => Some(("appEngineHttpQueue.appEngineRoutingOverride.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-queue.app-engine-routing-override.version" => Some(("appEngineHttpQueue.appEngineRoutingOverride.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.http-method" => Some(("httpTarget.httpMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.host" => Some(("httpTarget.uriOverride.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.path-override.path" => Some(("httpTarget.uriOverride.pathOverride.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.port" => Some(("httpTarget.uriOverride.port", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.query-override.query-params" => Some(("httpTarget.uriOverride.queryOverride.queryParams", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.scheme" => Some(("httpTarget.uriOverride.scheme", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.uri-override-enforce-mode" => Some(("httpTarget.uriOverride.uriOverrideEnforceMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "purge-time" => Some(("purgeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rate-limits.max-burst-size" => Some(("rateLimits.maxBurstSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -213,7 +219,7 @@ where "tombstone-ttl" => Some(("tombstoneTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-queue", "app-engine-routing-override", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-dispatches", "max-dispatches-per-second", "max-doublings", "max-retry-duration", "min-backoff", "name", "oldest-estimated-arrival-time", "purge-time", "rate-limits", "retry-config", "sampling-ratio", "service", "stackdriver-logging-config", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "type", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-queue", "app-engine-routing-override", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "http-method", "http-target", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-dispatches", "max-dispatches-per-second", "max-doublings", "max-retry-duration", "min-backoff", "name", "oldest-estimated-arrival-time", "path", "path-override", "port", "purge-time", "query-override", "query-params", "rate-limits", "retry-config", "sampling-ratio", "scheme", "service", "stackdriver-logging-config", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "type", "uri-override", "uri-override-enforce-mode", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -332,7 +338,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -473,13 +479,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -558,6 +564,13 @@ where "app-engine-http-queue.app-engine-routing-override.instance" => Some(("appEngineHttpQueue.appEngineRoutingOverride.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-queue.app-engine-routing-override.service" => Some(("appEngineHttpQueue.appEngineRoutingOverride.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-engine-http-queue.app-engine-routing-override.version" => Some(("appEngineHttpQueue.appEngineRoutingOverride.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.http-method" => Some(("httpTarget.httpMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.host" => Some(("httpTarget.uriOverride.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.path-override.path" => Some(("httpTarget.uriOverride.pathOverride.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.port" => Some(("httpTarget.uriOverride.port", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.query-override.query-params" => Some(("httpTarget.uriOverride.queryOverride.queryParams", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.scheme" => Some(("httpTarget.uriOverride.scheme", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-target.uri-override.uri-override-enforce-mode" => Some(("httpTarget.uriOverride.uriOverrideEnforceMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "purge-time" => Some(("purgeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rate-limits.max-burst-size" => Some(("rateLimits.maxBurstSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -579,7 +592,7 @@ where "tombstone-ttl" => Some(("tombstoneTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-queue", "app-engine-routing-override", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-dispatches", "max-dispatches-per-second", "max-doublings", "max-retry-duration", "min-backoff", "name", "oldest-estimated-arrival-time", "purge-time", "rate-limits", "retry-config", "sampling-ratio", "service", "stackdriver-logging-config", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "type", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-http-queue", "app-engine-routing-override", "concurrent-dispatches-count", "effective-execution-rate", "executed-last-minute-count", "host", "http-method", "http-target", "instance", "max-attempts", "max-backoff", "max-burst-size", "max-concurrent-dispatches", "max-dispatches-per-second", "max-doublings", "max-retry-duration", "min-backoff", "name", "oldest-estimated-arrival-time", "path", "path-override", "port", "purge-time", "query-override", "query-params", "rate-limits", "retry-config", "sampling-ratio", "scheme", "service", "stackdriver-logging-config", "state", "stats", "task-ttl", "tasks-count", "tombstone-ttl", "type", "uri-override", "uri-override-enforce-mode", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -594,7 +607,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -981,6 +994,92 @@ where } } + async fn _projects_locations_queues_tasks_buffer(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "body.content-type" => Some(("body.contentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "body.data" => Some(("body.data", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["body", "content-type", "data"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::BufferTaskRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_queues_tasks_buffer(request, opt.value_of("queue").unwrap_or(""), opt.value_of("task-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_queues_tasks_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1222,7 +1321,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1484,6 +1583,9 @@ where ("locations-queues-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_queues_set_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-queues-tasks-buffer", Some(opt)) => { + call_result = self._projects_locations_queues_tasks_buffer(opt, dry_run, &mut err).await; + }, ("locations-queues-tasks-create", Some(opt)) => { call_result = self._projects_locations_queues_tasks_create(opt, dry_run, &mut err).await; }, @@ -1581,7 +1683,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-list', 'locations-queues-create', 'locations-queues-delete', 'locations-queues-get', 'locations-queues-get-iam-policy', 'locations-queues-list', 'locations-queues-patch', 'locations-queues-pause', 'locations-queues-purge', 'locations-queues-resume', 'locations-queues-set-iam-policy', 'locations-queues-tasks-create', 'locations-queues-tasks-delete', 'locations-queues-tasks-get', 'locations-queues-tasks-list', 'locations-queues-tasks-run' and 'locations-queues-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-get', 'locations-list', 'locations-queues-create', 'locations-queues-delete', 'locations-queues-get', 'locations-queues-get-iam-policy', 'locations-queues-list', 'locations-queues-patch', 'locations-queues-pause', 'locations-queues-purge', 'locations-queues-resume', 'locations-queues-set-iam-policy', 'locations-queues-tasks-buffer', 'locations-queues-tasks-create', 'locations-queues-tasks-delete', 'locations-queues-tasks-get', 'locations-queues-tasks-list', 'locations-queues-tasks-run' and 'locations-queues-test-iam-permissions'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_cloudtasks2_beta3_cli/projects_locations-get", @@ -1704,7 +1806,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1866,7 +1968,41 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-queues-tasks-buffer", + Some(r##"Creates and buffers a new task without the need to explicitly define a Task message. The queue must have HTTP target. To create the task with a custom ID, use the following format and set TASK_ID to your desired ID: projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer To create the task with an automatically generated ID, use the following format: projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. Note: This feature is in its experimental stage. You must request access to the API through the [Cloud Tasks BufferTask Experiment Signup form](https://forms.gle/X8Zr5hiXH5tTGFqh8)."##), + "Details at http://byron.github.io/google-apis-rs/google_cloudtasks2_beta3_cli/projects_locations-queues-tasks-buffer", + vec![ + (Some(r##"queue"##), + None, + Some(r##"Required. The parent queue name. For example: projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` The queue must already exist."##), + Some(true), + Some(false)), + + (Some(r##"task-id"##), + None, + Some(r##"Optional. Task ID for the task being created. If not provided, a random task ID is assigned to the task."##), Some(true), Some(false)), @@ -2016,7 +2152,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2044,7 +2180,7 @@ async fn main() { let mut app = App::new("cloudtasks2-beta3") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230105") .about("Manages the execution of large numbers of distributed requests.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudtasks2_beta3_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudtasks2_beta3/Cargo.toml b/gen/cloudtasks2_beta3/Cargo.toml index 53211793b1..fad9fc9e94 100644 --- a/gen/cloudtasks2_beta3/Cargo.toml +++ b/gen/cloudtasks2_beta3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudtasks2_beta3" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Tasks (protocol v2beta3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtasks2_beta3" homepage = "https://cloud.google.com/tasks/" -documentation = "https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105" license = "MIT" keywords = ["cloudtasks", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudtasks2_beta3/README.md b/gen/cloudtasks2_beta3/README.md index 561cf77924..cc87e478f0 100644 --- a/gen/cloudtasks2_beta3/README.md +++ b/gen/cloudtasks2_beta3/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudtasks2_beta3` library allows access to all features of the *Google Cloud Tasks* service. -This documentation was generated from *Cloud Tasks* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Tasks* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Tasks* *v2_beta3* API can be found at the [official documentation site](https://cloud.google.com/tasks/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/CloudTasks) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/CloudTasks) ... * projects - * [*locations get*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationListCall), [*locations queues create*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueCreateCall), [*locations queues delete*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueDeleteCall), [*locations queues get*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueGetCall), [*locations queues get iam policy*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueGetIamPolicyCall), [*locations queues list*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueListCall), [*locations queues patch*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueuePatchCall), [*locations queues pause*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueuePauseCall), [*locations queues purge*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueuePurgeCall), [*locations queues resume*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueResumeCall), [*locations queues set iam policy*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueSetIamPolicyCall), [*locations queues tasks buffer*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskBufferCall), [*locations queues tasks create*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskCreateCall), [*locations queues tasks delete*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskDeleteCall), [*locations queues tasks get*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskGetCall), [*locations queues tasks list*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskListCall), [*locations queues tasks run*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskRunCall) and [*locations queues test iam permissions*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationListCall), [*locations queues create*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueCreateCall), [*locations queues delete*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueDeleteCall), [*locations queues get*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueGetCall), [*locations queues get iam policy*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueGetIamPolicyCall), [*locations queues list*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueListCall), [*locations queues patch*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueuePatchCall), [*locations queues pause*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueuePauseCall), [*locations queues purge*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueuePurgeCall), [*locations queues resume*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueResumeCall), [*locations queues set iam policy*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueSetIamPolicyCall), [*locations queues tasks buffer*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskBufferCall), [*locations queues tasks create*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskCreateCall), [*locations queues tasks delete*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskDeleteCall), [*locations queues tasks get*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskGetCall), [*locations queues tasks list*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskListCall), [*locations queues tasks run*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTaskRunCall) and [*locations queues test iam permissions*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/api::ProjectLocationQueueTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/CloudTasks)** +* **[Hub](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/CloudTasks)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Part)** + * **[Parts](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::RequestValue) and -[decodable](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::RequestValue) and +[decodable](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudtasks2_beta3/5.0.2-beta-1+20230105/google_cloudtasks2_beta3/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudtasks2_beta3/5.0.2+20230105/google_cloudtasks2_beta3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudtasks2_beta3/src/api.rs b/gen/cloudtasks2_beta3/src/api.rs index 748f87aed7..65821f7a0f 100644 --- a/gen/cloudtasks2_beta3/src/api.rs +++ b/gen/cloudtasks2_beta3/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudTasks { CloudTasks { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudtasks.googleapis.com/".to_string(), _root_url: "https://cloudtasks.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudTasks { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudtasks2_beta3/src/client.rs b/gen/cloudtasks2_beta3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudtasks2_beta3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudtasks2_beta3/src/lib.rs b/gen/cloudtasks2_beta3/src/lib.rs index d16e281574..fed752632c 100644 --- a/gen/cloudtasks2_beta3/src/lib.rs +++ b/gen/cloudtasks2_beta3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Tasks* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Tasks* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *cloudtasks:v2beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Tasks* *v2_beta3* API can be found at the //! [official documentation site](https://cloud.google.com/tasks/). diff --git a/gen/cloudtrace1-cli/Cargo.toml b/gen/cloudtrace1-cli/Cargo.toml index 6a4a66762c..421db34475 100644 --- a/gen/cloudtrace1-cli/Cargo.toml +++ b/gen/cloudtrace1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudtrace1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Trace (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtrace1-cli" @@ -20,13 +20,13 @@ name = "cloudtrace1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudtrace1] path = "../cloudtrace1" -version = "4.0.1+20220224" +version = "5.0.2+20230112" + diff --git a/gen/cloudtrace1-cli/README.md b/gen/cloudtrace1-cli/README.md index 0518cd278e..cf3f3607a3 100644 --- a/gen/cloudtrace1-cli/README.md +++ b/gen/cloudtrace1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Trace* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Trace* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash cloudtrace1 [options] diff --git a/gen/cloudtrace1-cli/mkdocs.yml b/gen/cloudtrace1-cli/mkdocs.yml index 16df32e12f..f2c6d0da17 100644 --- a/gen/cloudtrace1-cli/mkdocs.yml +++ b/gen/cloudtrace1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Trace v4.0.1+20220224 +site_name: Cloud Trace v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-cloudtrace1-cli site_description: A complete library to interact with Cloud Trace (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtrace1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_patch-traces.md', 'Projects', 'Patch Traces'] -- ['projects_traces-get.md', 'Projects', 'Traces Get'] -- ['projects_traces-list.md', 'Projects', 'Traces List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Patch Traces': 'projects_patch-traces.md' + - 'Traces Get': 'projects_traces-get.md' + - 'Traces List': 'projects_traces-list.md' theme: readthedocs diff --git a/gen/cloudtrace1-cli/src/client.rs b/gen/cloudtrace1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudtrace1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudtrace1-cli/src/main.rs b/gen/cloudtrace1-cli/src/main.rs index c829643bef..019efc960d 100644 --- a/gen/cloudtrace1-cli/src/main.rs +++ b/gen/cloudtrace1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudtrace1::{api, Error, oauth2}; +use google_cloudtrace1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -197,13 +196,13 @@ where call = call.view(value.unwrap_or("")); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -212,7 +211,7 @@ where call = call.filter(value.unwrap_or("")); }, "end-time" => { - call = call.end_time(value.unwrap_or("")); + call = call.end_time( value.map(|v| arg_from_str(v, err, "end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -441,7 +440,7 @@ async fn main() { let mut app = App::new("cloudtrace1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230112") .about("Sends application trace data to Cloud Trace for viewing. Trace data is collected for all App Engine applications by default. Trace data from other applications can be provided using this API. This library is used to interact with the Cloud Trace API directly. If you are looking to instrument your application for Cloud Trace, we recommend using OpenTelemetry. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudtrace1_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudtrace1/Cargo.toml b/gen/cloudtrace1/Cargo.toml index c06333b809..bec441764f 100644 --- a/gen/cloudtrace1/Cargo.toml +++ b/gen/cloudtrace1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudtrace1" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Trace (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtrace1" homepage = "https://cloud.google.com/trace" -documentation = "https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-cloudtrace1/5.0.2+20230112" license = "MIT" keywords = ["cloudtrace", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudtrace1/README.md b/gen/cloudtrace1/README.md index 40fd68d31c..bd43cef29c 100644 --- a/gen/cloudtrace1/README.md +++ b/gen/cloudtrace1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudtrace1` library allows access to all features of the *Google Cloud Trace* service. -This documentation was generated from *Cloud Trace* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *cloudtrace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Trace* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *cloudtrace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Trace* *v1* API can be found at the [official documentation site](https://cloud.google.com/trace). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/CloudTrace) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/CloudTrace) ... * projects - * [*patch traces*](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/api::ProjectPatchTraceCall), [*traces get*](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/api::ProjectTraceGetCall) and [*traces list*](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/api::ProjectTraceListCall) + * [*patch traces*](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/api::ProjectPatchTraceCall), [*traces get*](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/api::ProjectTraceGetCall) and [*traces list*](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/api::ProjectTraceListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/CloudTrace)** +* **[Hub](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/CloudTrace)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Part)** + * **[Parts](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -118,17 +118,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -138,29 +138,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::RequestValue) and -[decodable](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::RequestValue) and +[decodable](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudtrace1/5.0.2-beta-1+20230112/google_cloudtrace1/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudtrace1/5.0.2+20230112/google_cloudtrace1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudtrace1/src/api.rs b/gen/cloudtrace1/src/api.rs index 2a94767dd1..f1766cc8ed 100644 --- a/gen/cloudtrace1/src/api.rs +++ b/gen/cloudtrace1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> CloudTrace { CloudTrace { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudtrace.googleapis.com/".to_string(), _root_url: "https://cloudtrace.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> CloudTrace { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudtrace1/src/client.rs b/gen/cloudtrace1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudtrace1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudtrace1/src/lib.rs b/gen/cloudtrace1/src/lib.rs index a3c43c2cae..c76115ed0c 100644 --- a/gen/cloudtrace1/src/lib.rs +++ b/gen/cloudtrace1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Trace* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *cloudtrace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Trace* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *cloudtrace:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Trace* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/trace). diff --git a/gen/cloudtrace2-cli/Cargo.toml b/gen/cloudtrace2-cli/Cargo.toml index b5dcb2845f..f2c1b0d974 100644 --- a/gen/cloudtrace2-cli/Cargo.toml +++ b/gen/cloudtrace2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-cloudtrace2-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Trace (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtrace2-cli" @@ -20,13 +20,13 @@ name = "cloudtrace2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-cloudtrace2] path = "../cloudtrace2" -version = "4.0.1+20220224" +version = "5.0.2+20230112" + diff --git a/gen/cloudtrace2-cli/README.md b/gen/cloudtrace2-cli/README.md index e49ddbe370..d85d5ccc43 100644 --- a/gen/cloudtrace2-cli/README.md +++ b/gen/cloudtrace2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Trace* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Trace* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash cloudtrace2 [options] diff --git a/gen/cloudtrace2-cli/mkdocs.yml b/gen/cloudtrace2-cli/mkdocs.yml index 6700204fad..76750f9636 100644 --- a/gen/cloudtrace2-cli/mkdocs.yml +++ b/gen/cloudtrace2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Trace v4.0.1+20220224 +site_name: Cloud Trace v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-cloudtrace2-cli site_description: A complete library to interact with Cloud Trace (protocol v2) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtrace2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_traces-batch-write.md', 'Projects', 'Traces Batch Write'] -- ['projects_traces-spans-create-span.md', 'Projects', 'Traces Spans Create Span'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Traces Batch Write': 'projects_traces-batch-write.md' + - 'Traces Spans Create Span': 'projects_traces-spans-create-span.md' theme: readthedocs diff --git a/gen/cloudtrace2-cli/src/client.rs b/gen/cloudtrace2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/cloudtrace2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/cloudtrace2-cli/src/main.rs b/gen/cloudtrace2-cli/src/main.rs index c90150a14e..584dffe07b 100644 --- a/gen/cloudtrace2-cli/src/main.rs +++ b/gen/cloudtrace2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_cloudtrace2::{api, Error, oauth2}; +use google_cloudtrace2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -392,7 +391,7 @@ async fn main() { let mut app = App::new("cloudtrace2") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230112") .about("Sends application trace data to Cloud Trace for viewing. Trace data is collected for all App Engine applications by default. Trace data from other applications can be provided using this API. This library is used to interact with the Cloud Trace API directly. If you are looking to instrument your application for Cloud Trace, we recommend using OpenTelemetry. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudtrace2_cli") .arg(Arg::with_name("url") diff --git a/gen/cloudtrace2/Cargo.toml b/gen/cloudtrace2/Cargo.toml index 3699a2bc15..58671ffb3c 100644 --- a/gen/cloudtrace2/Cargo.toml +++ b/gen/cloudtrace2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-cloudtrace2" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Trace (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/cloudtrace2" homepage = "https://cloud.google.com/trace" -documentation = "https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-cloudtrace2/5.0.2+20230112" license = "MIT" keywords = ["cloudtrace", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/cloudtrace2/README.md b/gen/cloudtrace2/README.md index f812c064de..62fe5279a8 100644 --- a/gen/cloudtrace2/README.md +++ b/gen/cloudtrace2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-cloudtrace2` library allows access to all features of the *Google Cloud Trace* service. -This documentation was generated from *Cloud Trace* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *cloudtrace:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Trace* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *cloudtrace:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Trace* *v2* API can be found at the [official documentation site](https://cloud.google.com/trace). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/CloudTrace) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/CloudTrace) ... * projects - * [*traces batch write*](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/api::ProjectTraceBatchWriteCall) and [*traces spans create span*](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/api::ProjectTraceSpanCreateSpanCall) + * [*traces batch write*](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/api::ProjectTraceBatchWriteCall) and [*traces spans create span*](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/api::ProjectTraceSpanCreateSpanCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/CloudTrace)** +* **[Hub](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/CloudTrace)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::CallBuilder) -* **[Resources](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::CallBuilder) +* **[Resources](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Part)** + * **[Parts](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Delegate) to the -[Method Builder](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Delegate) to the +[Method Builder](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::RequestValue) and -[decodable](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::RequestValue) and +[decodable](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-cloudtrace2/5.0.2-beta-1+20230112/google_cloudtrace2/client::RequestValue) are moved +* [request values](https://docs.rs/google-cloudtrace2/5.0.2+20230112/google_cloudtrace2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/cloudtrace2/src/api.rs b/gen/cloudtrace2/src/api.rs index 77ea82b1c9..bff4816d73 100644 --- a/gen/cloudtrace2/src/api.rs +++ b/gen/cloudtrace2/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> CloudTrace { CloudTrace { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://cloudtrace.googleapis.com/".to_string(), _root_url: "https://cloudtrace.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudTrace { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/cloudtrace2/src/client.rs b/gen/cloudtrace2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/cloudtrace2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/cloudtrace2/src/lib.rs b/gen/cloudtrace2/src/lib.rs index 98fb5784c2..9b49b0b8f6 100644 --- a/gen/cloudtrace2/src/lib.rs +++ b/gen/cloudtrace2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Trace* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *cloudtrace:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Trace* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *cloudtrace:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Trace* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/trace). diff --git a/gen/clouduseraccountsvm_beta-cli/Cargo.toml b/gen/clouduseraccountsvm_beta-cli/Cargo.toml index c843e57ec0..d3577d6858 100644 --- a/gen/clouduseraccountsvm_beta-cli/Cargo.toml +++ b/gen/clouduseraccountsvm_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-clouduseraccountsvm_beta-cli" -version = "4.0.1+20160316" +version = "5.0.2+20160316" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud User Accounts (protocol vm_beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouduseraccountsvm_beta-cli" @@ -20,13 +20,13 @@ name = "clouduseraccountsvm-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-clouduseraccountsvm_beta] path = "../clouduseraccountsvm_beta" -version = "4.0.1+20160316" +version = "5.0.2+20160316" + diff --git a/gen/clouduseraccountsvm_beta-cli/README.md b/gen/clouduseraccountsvm_beta-cli/README.md index 61932aadd1..05f20e2d5f 100644 --- a/gen/clouduseraccountsvm_beta-cli/README.md +++ b/gen/clouduseraccountsvm_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud User Accounts* API at revision *20160316*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud User Accounts* API at revision *20160316*. The CLI is at version *5.0.2*. ```bash clouduseraccountsvm-beta [options] diff --git a/gen/clouduseraccountsvm_beta-cli/mkdocs.yml b/gen/clouduseraccountsvm_beta-cli/mkdocs.yml index bc12b7481d..0f3640074f 100644 --- a/gen/clouduseraccountsvm_beta-cli/mkdocs.yml +++ b/gen/clouduseraccountsvm_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud User Accounts v4.0.1+20160316 +site_name: Cloud User Accounts v5.0.2+20160316 site_url: http://byron.github.io/google-apis-rs/google-clouduseraccountsvm_beta-cli site_description: A complete library to interact with Cloud User Accounts (protocol vm_beta) @@ -7,25 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/clouduseraccount docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['global-accounts-operations_delete.md', 'Global Accounts Operations', 'Delete'] -- ['global-accounts-operations_get.md', 'Global Accounts Operations', 'Get'] -- ['global-accounts-operations_list.md', 'Global Accounts Operations', 'List'] -- ['groups_add-member.md', 'Groups', 'Add Member'] -- ['groups_delete.md', 'Groups', 'Delete'] -- ['groups_get.md', 'Groups', 'Get'] -- ['groups_insert.md', 'Groups', 'Insert'] -- ['groups_list.md', 'Groups', 'List'] -- ['groups_remove-member.md', 'Groups', 'Remove Member'] -- ['linux_get-authorized-keys-view.md', 'Linux', 'Get Authorized Keys View'] -- ['linux_get-linux-account-views.md', 'Linux', 'Get Linux Account Views'] -- ['users_add-public-key.md', 'Users', 'Add Public Key'] -- ['users_delete.md', 'Users', 'Delete'] -- ['users_get.md', 'Users', 'Get'] -- ['users_insert.md', 'Users', 'Insert'] -- ['users_list.md', 'Users', 'List'] -- ['users_remove-public-key.md', 'Users', 'Remove Public Key'] +nav: +- Home: 'index.md' +- 'Global Accounts Operations': + - 'Delete': 'global-accounts-operations_delete.md' + - 'Get': 'global-accounts-operations_get.md' + - 'List': 'global-accounts-operations_list.md' +- 'Groups': + - 'Add Member': 'groups_add-member.md' + - 'Delete': 'groups_delete.md' + - 'Get': 'groups_get.md' + - 'Insert': 'groups_insert.md' + - 'List': 'groups_list.md' + - 'Remove Member': 'groups_remove-member.md' +- 'Linux': + - 'Get Authorized Keys View': 'linux_get-authorized-keys-view.md' + - 'Get Linux Account Views': 'linux_get-linux-account-views.md' +- 'Users': + - 'Add Public Key': 'users_add-public-key.md' + - 'Delete': 'users_delete.md' + - 'Get': 'users_get.md' + - 'Insert': 'users_insert.md' + - 'List': 'users_list.md' + - 'Remove Public Key': 'users_remove-public-key.md' theme: readthedocs diff --git a/gen/clouduseraccountsvm_beta-cli/src/client.rs b/gen/clouduseraccountsvm_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/clouduseraccountsvm_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/clouduseraccountsvm_beta-cli/src/main.rs b/gen/clouduseraccountsvm_beta-cli/src/main.rs index bad6ca5324..428b19310d 100644 --- a/gen/clouduseraccountsvm_beta-cli/src/main.rs +++ b/gen/clouduseraccountsvm_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_clouduseraccountsvm_beta::{api, Error, oauth2}; +use google_clouduseraccountsvm_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -160,7 +159,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -505,7 +504,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -649,7 +648,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "login" => { - call = call.login(arg_from_str(value.unwrap_or("false"), err, "login", "boolean")); + call = call.login( value.map(|v| arg_from_str(v, err, "login", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -711,7 +710,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1061,7 +1060,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1833,7 +1832,7 @@ async fn main() { let mut app = App::new("clouduseraccountsvm-beta") .author("Sebastian Thiel ") - .version("4.0.1+20160316") + .version("5.0.2+20160316") .about("Creates and manages users and groups for accessing Google Compute Engine virtual machines.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_clouduseraccountsvm_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/clouduseraccountsvm_beta/Cargo.toml b/gen/clouduseraccountsvm_beta/Cargo.toml index 6c3cfe33fc..1558e9b2df 100644 --- a/gen/clouduseraccountsvm_beta/Cargo.toml +++ b/gen/clouduseraccountsvm_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-clouduseraccountsvm_beta" -version = "5.0.2-beta-1+20160316" +version = "5.0.2+20160316" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud User Accounts (protocol vm_beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/clouduseraccountsvm_beta" homepage = "https://cloud.google.com/compute/docs/access/user-accounts/api/latest/" -documentation = "https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316" +documentation = "https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316" license = "MIT" keywords = ["clouduseraccounts", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/clouduseraccountsvm_beta/README.md b/gen/clouduseraccountsvm_beta/README.md index 028985da15..b7ba559181 100644 --- a/gen/clouduseraccountsvm_beta/README.md +++ b/gen/clouduseraccountsvm_beta/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-clouduseraccountsvm_beta` library allows access to all features of the *Google Cloud User Accounts* service. -This documentation was generated from *Cloud User Accounts* crate version *5.0.2-beta-1+20160316*, where *20160316* is the exact revision of the *clouduseraccounts:vm_beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud User Accounts* crate version *5.0.2+20160316*, where *20160316* is the exact revision of the *clouduseraccounts:vm_beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud User Accounts* *vm_beta* API can be found at the [official documentation site](https://cloud.google.com/compute/docs/access/user-accounts/api/latest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/CloudUserAccounts) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/CloudUserAccounts) ... * global accounts operations - * [*delete*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GlobalAccountsOperationDeleteCall), [*get*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GlobalAccountsOperationGetCall) and [*list*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GlobalAccountsOperationListCall) -* [groups](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::Group) - * [*add member*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GroupAddMemberCall), [*delete*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GroupDeleteCall), [*get*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GroupGetCall), [*insert*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GroupInsertCall), [*list*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GroupListCall) and [*remove member*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::GroupRemoveMemberCall) + * [*delete*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GlobalAccountsOperationDeleteCall), [*get*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GlobalAccountsOperationGetCall) and [*list*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GlobalAccountsOperationListCall) +* [groups](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::Group) + * [*add member*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GroupAddMemberCall), [*delete*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GroupDeleteCall), [*get*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GroupGetCall), [*insert*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GroupInsertCall), [*list*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GroupListCall) and [*remove member*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::GroupRemoveMemberCall) * linux - * [*get authorized keys view*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::LinuxGetAuthorizedKeysViewCall) and [*get linux account views*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::LinuxGetLinuxAccountViewCall) -* [users](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::User) - * [*add public key*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::UserAddPublicKeyCall), [*delete*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::UserDeleteCall), [*get*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::UserGetCall), [*insert*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::UserInsertCall), [*list*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::UserListCall) and [*remove public key*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/api::UserRemovePublicKeyCall) + * [*get authorized keys view*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::LinuxGetAuthorizedKeysViewCall) and [*get linux account views*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::LinuxGetLinuxAccountViewCall) +* [users](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::User) + * [*add public key*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::UserAddPublicKeyCall), [*delete*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::UserDeleteCall), [*get*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::UserGetCall), [*insert*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::UserInsertCall), [*list*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::UserListCall) and [*remove public key*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/api::UserRemovePublicKeyCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/CloudUserAccounts)** +* **[Hub](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/CloudUserAccounts)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Part)** + * **[Parts](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -138,17 +138,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -158,29 +158,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::RequestValue) and -[decodable](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::RequestValue) and +[decodable](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2-beta-1+20160316/google_clouduseraccountsvm_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-clouduseraccountsvm_beta/5.0.2+20160316/google_clouduseraccountsvm_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/clouduseraccountsvm_beta/src/api.rs b/gen/clouduseraccountsvm_beta/src/api.rs index 22f2564c6d..ae048f460d 100644 --- a/gen/clouduseraccountsvm_beta/src/api.rs +++ b/gen/clouduseraccountsvm_beta/src/api.rs @@ -137,7 +137,7 @@ impl<'a, S> CloudUserAccounts { CloudUserAccounts { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/clouduseraccounts/vm_beta/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -157,7 +157,7 @@ impl<'a, S> CloudUserAccounts { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/clouduseraccountsvm_beta/src/client.rs b/gen/clouduseraccountsvm_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/clouduseraccountsvm_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/clouduseraccountsvm_beta/src/lib.rs b/gen/clouduseraccountsvm_beta/src/lib.rs index 946dde4eba..d1badcc99c 100644 --- a/gen/clouduseraccountsvm_beta/src/lib.rs +++ b/gen/clouduseraccountsvm_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud User Accounts* crate version *5.0.2-beta-1+20160316*, where *20160316* is the exact revision of the *clouduseraccounts:vm_beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud User Accounts* crate version *5.0.2+20160316*, where *20160316* is the exact revision of the *clouduseraccounts:vm_beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud User Accounts* *vm_beta* API can be found at the //! [official documentation site](https://cloud.google.com/compute/docs/access/user-accounts/api/latest/). diff --git a/gen/commentanalyzer1_alpha1-cli/Cargo.toml b/gen/commentanalyzer1_alpha1-cli/Cargo.toml index c4b18ba0d8..08ad334f53 100644 --- a/gen/commentanalyzer1_alpha1-cli/Cargo.toml +++ b/gen/commentanalyzer1_alpha1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-commentanalyzer1_alpha1-cli" -version = "4.0.1+20200405" +version = "5.0.2+20200405" authors = ["Sebastian Thiel "] description = "A complete library to interact with Comment Analyzer (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/commentanalyzer1_alpha1-cli" @@ -20,13 +20,13 @@ name = "commentanalyzer1-alpha1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-commentanalyzer1_alpha1] path = "../commentanalyzer1_alpha1" -version = "4.0.1+20200405" +version = "5.0.2+20200405" + diff --git a/gen/commentanalyzer1_alpha1-cli/README.md b/gen/commentanalyzer1_alpha1-cli/README.md index 03051e4a12..eedc51b089 100644 --- a/gen/commentanalyzer1_alpha1-cli/README.md +++ b/gen/commentanalyzer1_alpha1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Comment Analyzer* API at revision *20200405*. The CLI is at version *4.0.1*. +This documentation was generated from the *Comment Analyzer* API at revision *20200405*. The CLI is at version *5.0.2*. ```bash commentanalyzer1-alpha1 [options] diff --git a/gen/commentanalyzer1_alpha1-cli/mkdocs.yml b/gen/commentanalyzer1_alpha1-cli/mkdocs.yml index 31ab2330f3..fae6b38358 100644 --- a/gen/commentanalyzer1_alpha1-cli/mkdocs.yml +++ b/gen/commentanalyzer1_alpha1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Comment Analyzer v4.0.1+20200405 +site_name: Comment Analyzer v5.0.2+20200405 site_url: http://byron.github.io/google-apis-rs/google-commentanalyzer1_alpha1-cli site_description: A complete library to interact with Comment Analyzer (protocol v1alpha1) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/commentanalyzer1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['comments_analyze.md', 'Comments', 'Analyze'] -- ['comments_suggestscore.md', 'Comments', 'Suggestscore'] +nav: +- Home: 'index.md' +- 'Comments': + - 'Analyze': 'comments_analyze.md' + - 'Suggestscore': 'comments_suggestscore.md' theme: readthedocs diff --git a/gen/commentanalyzer1_alpha1-cli/src/client.rs b/gen/commentanalyzer1_alpha1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/commentanalyzer1_alpha1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/commentanalyzer1_alpha1-cli/src/main.rs b/gen/commentanalyzer1_alpha1-cli/src/main.rs index d11c19dcd6..08295fb796 100644 --- a/gen/commentanalyzer1_alpha1-cli/src/main.rs +++ b/gen/commentanalyzer1_alpha1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_commentanalyzer1_alpha1::{api, Error, oauth2}; +use google_commentanalyzer1_alpha1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -384,7 +383,7 @@ async fn main() { let mut app = App::new("commentanalyzer1-alpha1") .author("Sebastian Thiel ") - .version("4.0.1+20200405") + .version("5.0.2+20200405") .about("The Perspective Comment Analyzer API provides information about the potential impact of a comment on a conversation (e.g. it can provide a score for the \"toxicity\" of a comment). Users can leverage the \"SuggestCommentScore\" method to submit corrections to improve Perspective over time. Users can set the \"doNotStore\" flag to ensure that all submitted comments are automatically deleted after scores are returned.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_commentanalyzer1_alpha1_cli") .arg(Arg::with_name("url") diff --git a/gen/commentanalyzer1_alpha1/Cargo.toml b/gen/commentanalyzer1_alpha1/Cargo.toml index 79bd807b4f..5c75562af5 100644 --- a/gen/commentanalyzer1_alpha1/Cargo.toml +++ b/gen/commentanalyzer1_alpha1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-commentanalyzer1_alpha1" -version = "5.0.2-beta-1+20200405" +version = "5.0.2+20200405" authors = ["Sebastian Thiel "] description = "A complete library to interact with Comment Analyzer (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/commentanalyzer1_alpha1" homepage = "https://github.com/conversationai/perspectiveapi/blob/master/README.md" -documentation = "https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405" +documentation = "https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405" license = "MIT" keywords = ["commentanalyzer", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/commentanalyzer1_alpha1/README.md b/gen/commentanalyzer1_alpha1/README.md index 60a5ea4156..d04babcda6 100644 --- a/gen/commentanalyzer1_alpha1/README.md +++ b/gen/commentanalyzer1_alpha1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-commentanalyzer1_alpha1` library allows access to all features of the *Google Comment Analyzer* service. -This documentation was generated from *Comment Analyzer* crate version *5.0.2-beta-1+20200405*, where *20200405* is the exact revision of the *commentanalyzer:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Comment Analyzer* crate version *5.0.2+20200405*, where *20200405* is the exact revision of the *commentanalyzer:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Comment Analyzer* *v1_alpha1* API can be found at the [official documentation site](https://github.com/conversationai/perspectiveapi/blob/master/README.md). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/CommentAnalyzer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/CommentAnalyzer) ... * comments - * [*analyze*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/api::CommentAnalyzeCall) and [*suggestscore*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/api::CommentSuggestscoreCall) + * [*analyze*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/api::CommentAnalyzeCall) and [*suggestscore*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/api::CommentSuggestscoreCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/CommentAnalyzer)** +* **[Hub](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/CommentAnalyzer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::CallBuilder) -* **[Resources](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::CallBuilder) +* **[Resources](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Part)** + * **[Parts](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Delegate) to the -[Method Builder](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Delegate) to the +[Method Builder](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::RequestValue) and -[decodable](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::RequestValue) and +[decodable](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2-beta-1+20200405/google_commentanalyzer1_alpha1/client::RequestValue) are moved +* [request values](https://docs.rs/google-commentanalyzer1_alpha1/5.0.2+20200405/google_commentanalyzer1_alpha1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/commentanalyzer1_alpha1/src/api.rs b/gen/commentanalyzer1_alpha1/src/api.rs index f0191d6d41..0f47415e7b 100644 --- a/gen/commentanalyzer1_alpha1/src/api.rs +++ b/gen/commentanalyzer1_alpha1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CommentAnalyzer { CommentAnalyzer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://commentanalyzer.googleapis.com/".to_string(), _root_url: "https://commentanalyzer.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CommentAnalyzer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/commentanalyzer1_alpha1/src/client.rs b/gen/commentanalyzer1_alpha1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/commentanalyzer1_alpha1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/commentanalyzer1_alpha1/src/lib.rs b/gen/commentanalyzer1_alpha1/src/lib.rs index f9cce6b8e7..c9e41f54f5 100644 --- a/gen/commentanalyzer1_alpha1/src/lib.rs +++ b/gen/commentanalyzer1_alpha1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Comment Analyzer* crate version *5.0.2-beta-1+20200405*, where *20200405* is the exact revision of the *commentanalyzer:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Comment Analyzer* crate version *5.0.2+20200405*, where *20200405* is the exact revision of the *commentanalyzer:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Comment Analyzer* *v1_alpha1* API can be found at the //! [official documentation site](https://github.com/conversationai/perspectiveapi/blob/master/README.md). diff --git a/gen/composer1-cli/Cargo.toml b/gen/composer1-cli/Cargo.toml index fa73d9b992..62824db46d 100644 --- a/gen/composer1-cli/Cargo.toml +++ b/gen/composer1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-composer1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230107" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Composer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/composer1-cli" @@ -20,13 +20,13 @@ name = "composer1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-composer1] path = "../composer1" -version = "4.0.1+20220224" +version = "5.0.2+20230107" + diff --git a/gen/composer1-cli/README.md b/gen/composer1-cli/README.md index f423588251..e40223ba51 100644 --- a/gen/composer1-cli/README.md +++ b/gen/composer1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Composer* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Composer* API at revision *20230107*. The CLI is at version *5.0.2*. ```bash composer1 [options] @@ -34,7 +34,9 @@ composer1 [options] locations-environments-delete [-p ]... [-o ] locations-environments-get [-p ]... [-o ] locations-environments-list [-p ]... [-o ] + locations-environments-load-snapshot (-r )... [-p ]... [-o ] locations-environments-patch (-r )... [-p ]... [-o ] + locations-environments-save-snapshot (-r )... [-p ]... [-o ] locations-image-versions-list [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] diff --git a/gen/composer1-cli/mkdocs.yml b/gen/composer1-cli/mkdocs.yml index 270390f13b..8ff44cc9a9 100644 --- a/gen/composer1-cli/mkdocs.yml +++ b/gen/composer1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Composer v4.0.1+20220224 +site_name: Cloud Composer v5.0.2+20230107 site_url: http://byron.github.io/google-apis-rs/google-composer1-cli site_description: A complete library to interact with Cloud Composer (protocol v1) @@ -7,17 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/composer1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-environments-create.md', 'Projects', 'Locations Environments Create'] -- ['projects_locations-environments-delete.md', 'Projects', 'Locations Environments Delete'] -- ['projects_locations-environments-get.md', 'Projects', 'Locations Environments Get'] -- ['projects_locations-environments-list.md', 'Projects', 'Locations Environments List'] -- ['projects_locations-environments-patch.md', 'Projects', 'Locations Environments Patch'] -- ['projects_locations-image-versions-list.md', 'Projects', 'Locations Image Versions List'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Environments Create': 'projects_locations-environments-create.md' + - 'Locations Environments Delete': 'projects_locations-environments-delete.md' + - 'Locations Environments Get': 'projects_locations-environments-get.md' + - 'Locations Environments List': 'projects_locations-environments-list.md' + - 'Locations Environments Load Snapshot': 'projects_locations-environments-load-snapshot.md' + - 'Locations Environments Patch': 'projects_locations-environments-patch.md' + - 'Locations Environments Save Snapshot': 'projects_locations-environments-save-snapshot.md' + - 'Locations Image Versions List': 'projects_locations-image-versions-list.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/composer1-cli/src/client.rs b/gen/composer1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/composer1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/composer1-cli/src/main.rs b/gen/composer1-cli/src/main.rs index 4bf51d5d27..6f8174cf41 100644 --- a/gen/composer1-cli/src/main.rs +++ b/gen/composer1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_composer1::{api, Error, oauth2}; +use google_composer1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -83,7 +82,9 @@ where "config.maintenance-window.end-time" => Some(("config.maintenanceWindow.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.maintenance-window.recurrence" => Some(("config.maintenanceWindow.recurrence", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.maintenance-window.start-time" => Some(("config.maintenanceWindow.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.master-authorized-networks-config.enabled" => Some(("config.masterAuthorizedNetworksConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.node-config.disk-size-gb" => Some(("config.nodeConfig.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "config.node-config.enable-ip-masq-agent" => Some(("config.nodeConfig.enableIpMasqAgent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.node-config.ip-allocation-policy.cluster-ipv4-cidr-block" => Some(("config.nodeConfig.ipAllocationPolicy.clusterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.node-config.ip-allocation-policy.cluster-secondary-range-name" => Some(("config.nodeConfig.ipAllocationPolicy.clusterSecondaryRangeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.node-config.ip-allocation-policy.services-ipv4-cidr-block" => Some(("config.nodeConfig.ipAllocationPolicy.servicesIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -102,11 +103,17 @@ where "config.private-environment-config.cloud-composer-network-ipv4-reserved-range" => Some(("config.privateEnvironmentConfig.cloudComposerNetworkIpv4ReservedRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.cloud-sql-ipv4-cidr-block" => Some(("config.privateEnvironmentConfig.cloudSqlIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.enable-private-environment" => Some(("config.privateEnvironmentConfig.enablePrivateEnvironment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.private-environment-config.enable-privately-used-public-ips" => Some(("config.privateEnvironmentConfig.enablePrivatelyUsedPublicIps", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.private-environment-config.networking-config.connection-type" => Some(("config.privateEnvironmentConfig.networkingConfig.connectionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.private-cluster-config.enable-private-endpoint" => Some(("config.privateEnvironmentConfig.privateClusterConfig.enablePrivateEndpoint", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.private-environment-config.private-cluster-config.master-ipv4-cidr-block" => Some(("config.privateEnvironmentConfig.privateClusterConfig.masterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.private-cluster-config.master-ipv4-reserved-range" => Some(("config.privateEnvironmentConfig.privateClusterConfig.masterIpv4ReservedRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.web-server-ipv4-cidr-block" => Some(("config.privateEnvironmentConfig.webServerIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.web-server-ipv4-reserved-range" => Some(("config.privateEnvironmentConfig.webServerIpv4ReservedRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.enabled" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.snapshot-creation-schedule" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.snapshotCreationSchedule", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.snapshot-location" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.snapshotLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.time-zone" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.software-config.airflow-config-overrides" => Some(("config.softwareConfig.airflowConfigOverrides", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "config.software-config.env-variables" => Some(("config.softwareConfig.envVariables", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "config.software-config.image-version" => Some(("config.softwareConfig.imageVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -133,7 +140,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uuid" => Some(("uuid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["airflow-config-overrides", "airflow-uri", "cloud-composer-connection-subnetwork", "cloud-composer-network-ipv4-cidr-block", "cloud-composer-network-ipv4-reserved-range", "cloud-sql-ipv4-cidr-block", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "config", "count", "cpu", "create-time", "dag-gcs-prefix", "database-config", "disk-size-gb", "enable-private-endpoint", "enable-private-environment", "encryption-config", "end-time", "env-variables", "environment-size", "gke-cluster", "image-version", "ip-allocation-policy", "kms-key-name", "labels", "location", "machine-type", "maintenance-window", "master-ipv4-cidr-block", "master-ipv4-reserved-range", "max-count", "memory-gb", "min-count", "name", "network", "node-config", "node-count", "oauth-scopes", "private-cluster-config", "private-environment-config", "pypi-packages", "python-version", "recurrence", "scheduler", "scheduler-count", "service-account", "services-ipv4-cidr-block", "services-secondary-range-name", "software-config", "start-time", "state", "storage-gb", "subnetwork", "tags", "update-time", "use-ip-aliases", "uuid", "web-server", "web-server-config", "web-server-ipv4-cidr-block", "web-server-ipv4-reserved-range", "worker", "workloads-config"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["airflow-config-overrides", "airflow-uri", "cloud-composer-connection-subnetwork", "cloud-composer-network-ipv4-cidr-block", "cloud-composer-network-ipv4-reserved-range", "cloud-sql-ipv4-cidr-block", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "config", "connection-type", "count", "cpu", "create-time", "dag-gcs-prefix", "database-config", "disk-size-gb", "enable-ip-masq-agent", "enable-private-endpoint", "enable-private-environment", "enable-privately-used-public-ips", "enabled", "encryption-config", "end-time", "env-variables", "environment-size", "gke-cluster", "image-version", "ip-allocation-policy", "kms-key-name", "labels", "location", "machine-type", "maintenance-window", "master-authorized-networks-config", "master-ipv4-cidr-block", "master-ipv4-reserved-range", "max-count", "memory-gb", "min-count", "name", "network", "networking-config", "node-config", "node-count", "oauth-scopes", "private-cluster-config", "private-environment-config", "pypi-packages", "python-version", "recovery-config", "recurrence", "scheduled-snapshots-config", "scheduler", "scheduler-count", "service-account", "services-ipv4-cidr-block", "services-secondary-range-name", "snapshot-creation-schedule", "snapshot-location", "software-config", "start-time", "state", "storage-gb", "subnetwork", "tags", "time-zone", "update-time", "use-ip-aliases", "uuid", "web-server", "web-server-config", "web-server-ipv4-cidr-block", "web-server-ipv4-reserved-range", "worker", "workloads-config"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -307,7 +314,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -356,6 +363,95 @@ where } } + async fn _projects_locations_environments_load_snapshot(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "skip-airflow-overrides-setting" => Some(("skipAirflowOverridesSetting", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "skip-environment-variables-setting" => Some(("skipEnvironmentVariablesSetting", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "skip-gcs-data-copying" => Some(("skipGcsDataCopying", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "skip-pypi-packages-installation" => Some(("skipPypiPackagesInstallation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "snapshot-path" => Some(("snapshotPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["skip-airflow-overrides-setting", "skip-environment-variables-setting", "skip-gcs-data-copying", "skip-pypi-packages-installation", "snapshot-path"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::LoadSnapshotRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_environments_load_snapshot(request, opt.value_of("environment").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_environments_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -388,7 +484,9 @@ where "config.maintenance-window.end-time" => Some(("config.maintenanceWindow.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.maintenance-window.recurrence" => Some(("config.maintenanceWindow.recurrence", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.maintenance-window.start-time" => Some(("config.maintenanceWindow.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.master-authorized-networks-config.enabled" => Some(("config.masterAuthorizedNetworksConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.node-config.disk-size-gb" => Some(("config.nodeConfig.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "config.node-config.enable-ip-masq-agent" => Some(("config.nodeConfig.enableIpMasqAgent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.node-config.ip-allocation-policy.cluster-ipv4-cidr-block" => Some(("config.nodeConfig.ipAllocationPolicy.clusterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.node-config.ip-allocation-policy.cluster-secondary-range-name" => Some(("config.nodeConfig.ipAllocationPolicy.clusterSecondaryRangeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.node-config.ip-allocation-policy.services-ipv4-cidr-block" => Some(("config.nodeConfig.ipAllocationPolicy.servicesIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -407,11 +505,17 @@ where "config.private-environment-config.cloud-composer-network-ipv4-reserved-range" => Some(("config.privateEnvironmentConfig.cloudComposerNetworkIpv4ReservedRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.cloud-sql-ipv4-cidr-block" => Some(("config.privateEnvironmentConfig.cloudSqlIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.enable-private-environment" => Some(("config.privateEnvironmentConfig.enablePrivateEnvironment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.private-environment-config.enable-privately-used-public-ips" => Some(("config.privateEnvironmentConfig.enablePrivatelyUsedPublicIps", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.private-environment-config.networking-config.connection-type" => Some(("config.privateEnvironmentConfig.networkingConfig.connectionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.private-cluster-config.enable-private-endpoint" => Some(("config.privateEnvironmentConfig.privateClusterConfig.enablePrivateEndpoint", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.private-environment-config.private-cluster-config.master-ipv4-cidr-block" => Some(("config.privateEnvironmentConfig.privateClusterConfig.masterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.private-cluster-config.master-ipv4-reserved-range" => Some(("config.privateEnvironmentConfig.privateClusterConfig.masterIpv4ReservedRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.web-server-ipv4-cidr-block" => Some(("config.privateEnvironmentConfig.webServerIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.private-environment-config.web-server-ipv4-reserved-range" => Some(("config.privateEnvironmentConfig.webServerIpv4ReservedRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.enabled" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.snapshot-creation-schedule" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.snapshotCreationSchedule", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.snapshot-location" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.snapshotLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.recovery-config.scheduled-snapshots-config.time-zone" => Some(("config.recoveryConfig.scheduledSnapshotsConfig.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.software-config.airflow-config-overrides" => Some(("config.softwareConfig.airflowConfigOverrides", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "config.software-config.env-variables" => Some(("config.softwareConfig.envVariables", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "config.software-config.image-version" => Some(("config.softwareConfig.imageVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -438,7 +542,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uuid" => Some(("uuid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["airflow-config-overrides", "airflow-uri", "cloud-composer-connection-subnetwork", "cloud-composer-network-ipv4-cidr-block", "cloud-composer-network-ipv4-reserved-range", "cloud-sql-ipv4-cidr-block", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "config", "count", "cpu", "create-time", "dag-gcs-prefix", "database-config", "disk-size-gb", "enable-private-endpoint", "enable-private-environment", "encryption-config", "end-time", "env-variables", "environment-size", "gke-cluster", "image-version", "ip-allocation-policy", "kms-key-name", "labels", "location", "machine-type", "maintenance-window", "master-ipv4-cidr-block", "master-ipv4-reserved-range", "max-count", "memory-gb", "min-count", "name", "network", "node-config", "node-count", "oauth-scopes", "private-cluster-config", "private-environment-config", "pypi-packages", "python-version", "recurrence", "scheduler", "scheduler-count", "service-account", "services-ipv4-cidr-block", "services-secondary-range-name", "software-config", "start-time", "state", "storage-gb", "subnetwork", "tags", "update-time", "use-ip-aliases", "uuid", "web-server", "web-server-config", "web-server-ipv4-cidr-block", "web-server-ipv4-reserved-range", "worker", "workloads-config"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["airflow-config-overrides", "airflow-uri", "cloud-composer-connection-subnetwork", "cloud-composer-network-ipv4-cidr-block", "cloud-composer-network-ipv4-reserved-range", "cloud-sql-ipv4-cidr-block", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "config", "connection-type", "count", "cpu", "create-time", "dag-gcs-prefix", "database-config", "disk-size-gb", "enable-ip-masq-agent", "enable-private-endpoint", "enable-private-environment", "enable-privately-used-public-ips", "enabled", "encryption-config", "end-time", "env-variables", "environment-size", "gke-cluster", "image-version", "ip-allocation-policy", "kms-key-name", "labels", "location", "machine-type", "maintenance-window", "master-authorized-networks-config", "master-ipv4-cidr-block", "master-ipv4-reserved-range", "max-count", "memory-gb", "min-count", "name", "network", "networking-config", "node-config", "node-count", "oauth-scopes", "private-cluster-config", "private-environment-config", "pypi-packages", "python-version", "recovery-config", "recurrence", "scheduled-snapshots-config", "scheduler", "scheduler-count", "service-account", "services-ipv4-cidr-block", "services-secondary-range-name", "snapshot-creation-schedule", "snapshot-location", "software-config", "start-time", "state", "storage-gb", "subnetwork", "tags", "time-zone", "update-time", "use-ip-aliases", "uuid", "web-server", "web-server-config", "web-server-ipv4-cidr-block", "web-server-ipv4-reserved-range", "worker", "workloads-config"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -453,7 +557,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -502,6 +606,91 @@ where } } + async fn _projects_locations_environments_save_snapshot(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "snapshot-location" => Some(("snapshotLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["snapshot-location"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SaveSnapshotRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_environments_save_snapshot(request, opt.value_of("environment").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_image_versions_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_image_versions_list(opt.value_of("parent").unwrap_or("")); @@ -512,10 +701,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "include-past-releases" => { - call = call.include_past_releases(arg_from_str(value.unwrap_or("false"), err, "include-past-releases", "boolean")); + call = call.include_past_releases( value.map(|v| arg_from_str(v, err, "include-past-releases", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -678,7 +867,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -749,9 +938,15 @@ where ("locations-environments-list", Some(opt)) => { call_result = self._projects_locations_environments_list(opt, dry_run, &mut err).await; }, + ("locations-environments-load-snapshot", Some(opt)) => { + call_result = self._projects_locations_environments_load_snapshot(opt, dry_run, &mut err).await; + }, ("locations-environments-patch", Some(opt)) => { call_result = self._projects_locations_environments_patch(opt, dry_run, &mut err).await; }, + ("locations-environments-save-snapshot", Some(opt)) => { + call_result = self._projects_locations_environments_save_snapshot(opt, dry_run, &mut err).await; + }, ("locations-image-versions-list", Some(opt)) => { call_result = self._projects_locations_image_versions_list(opt, dry_run, &mut err).await; }, @@ -843,7 +1038,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-environments-create', 'locations-environments-delete', 'locations-environments-get', 'locations-environments-list', 'locations-environments-patch', 'locations-image-versions-list', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-environments-create', 'locations-environments-delete', 'locations-environments-get', 'locations-environments-list', 'locations-environments-load-snapshot', 'locations-environments-patch', 'locations-environments-save-snapshot', 'locations-image-versions-list', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ ("locations-environments-create", Some(r##"Create a new environment."##), "Details at http://byron.github.io/google-apis-rs/google_composer1_cli/projects_locations-environments-create", @@ -932,6 +1127,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-environments-load-snapshot", + Some(r##"Loads a snapshot of a Cloud Composer environment. As a result of this operation, a snapshot of environment's specified in LoadSnapshotRequest is loaded into the environment."##), + "Details at http://byron.github.io/google-apis-rs/google_composer1_cli/projects_locations-environments-load-snapshot", + vec![ + (Some(r##"environment"##), + None, + Some(r##"The resource name of the target environment in the form: "projects/{projectId}/locations/{locationId}/environments/{environmentId}""##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -960,6 +1183,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-environments-save-snapshot", + Some(r##"Creates a snapshots of a Cloud Composer environment. As a result of this operation, snapshot of environment's state is stored in a location specified in the SaveSnapshotRequest."##), + "Details at http://byron.github.io/google-apis-rs/google_composer1_cli/projects_locations-environments-save-snapshot", + vec![ + (Some(r##"environment"##), + None, + Some(r##"The resource name of the source environment in the form: "projects/{projectId}/locations/{locationId}/environments/{environmentId}""##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1060,7 +1311,7 @@ async fn main() { let mut app = App::new("composer1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230107") .about("Manages Apache Airflow environments on Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_composer1_cli") .arg(Arg::with_name("url") diff --git a/gen/composer1/Cargo.toml b/gen/composer1/Cargo.toml index 573c218177..fb779368c5 100644 --- a/gen/composer1/Cargo.toml +++ b/gen/composer1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-composer1" -version = "5.0.2-beta-1+20230107" +version = "5.0.2+20230107" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Composer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/composer1" homepage = "https://cloud.google.com/composer/" -documentation = "https://docs.rs/google-composer1/5.0.2-beta-1+20230107" +documentation = "https://docs.rs/google-composer1/5.0.2+20230107" license = "MIT" keywords = ["composer", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/composer1/README.md b/gen/composer1/README.md index fad7577e3c..303f2582e3 100644 --- a/gen/composer1/README.md +++ b/gen/composer1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-composer1` library allows access to all features of the *Google Cloud Composer* service. -This documentation was generated from *Cloud Composer* crate version *5.0.2-beta-1+20230107*, where *20230107* is the exact revision of the *composer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Composer* crate version *5.0.2+20230107*, where *20230107* is the exact revision of the *composer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Composer* *v1* API can be found at the [official documentation site](https://cloud.google.com/composer/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/CloudComposer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/CloudComposer) ... * projects - * [*locations environments create*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationEnvironmentCreateCall), [*locations environments delete*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationEnvironmentDeleteCall), [*locations environments get*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationEnvironmentGetCall), [*locations environments list*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationEnvironmentListCall), [*locations environments load snapshot*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationEnvironmentLoadSnapshotCall), [*locations environments patch*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationEnvironmentPatchCall), [*locations environments save snapshot*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationEnvironmentSaveSnapshotCall), [*locations image versions list*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationImageVersionListCall), [*locations operations delete*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/api::ProjectLocationOperationListCall) + * [*locations environments create*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationEnvironmentCreateCall), [*locations environments delete*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationEnvironmentDeleteCall), [*locations environments get*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationEnvironmentGetCall), [*locations environments list*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationEnvironmentListCall), [*locations environments load snapshot*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationEnvironmentLoadSnapshotCall), [*locations environments patch*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationEnvironmentPatchCall), [*locations environments save snapshot*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationEnvironmentSaveSnapshotCall), [*locations image versions list*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationImageVersionListCall), [*locations operations delete*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/CloudComposer)** +* **[Hub](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/CloudComposer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::CallBuilder) -* **[Resources](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::CallBuilder) +* **[Resources](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Part)** + * **[Parts](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Delegate) to the -[Method Builder](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Delegate) to the +[Method Builder](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::RequestValue) and -[decodable](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::RequestValue) and +[decodable](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-composer1/5.0.2-beta-1+20230107/google_composer1/client::RequestValue) are moved +* [request values](https://docs.rs/google-composer1/5.0.2+20230107/google_composer1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/composer1/src/api.rs b/gen/composer1/src/api.rs index 3c7d1755f6..7c55820948 100644 --- a/gen/composer1/src/api.rs +++ b/gen/composer1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudComposer { CloudComposer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://composer.googleapis.com/".to_string(), _root_url: "https://composer.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudComposer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/composer1/src/client.rs b/gen/composer1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/composer1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/composer1/src/lib.rs b/gen/composer1/src/lib.rs index 1e1e5163e5..92d74fbaab 100644 --- a/gen/composer1/src/lib.rs +++ b/gen/composer1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Composer* crate version *5.0.2-beta-1+20230107*, where *20230107* is the exact revision of the *composer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Composer* crate version *5.0.2+20230107*, where *20230107* is the exact revision of the *composer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Composer* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/composer/). diff --git a/gen/compute1-cli/Cargo.toml b/gen/compute1-cli/Cargo.toml index 28f51cccf1..5e85114607 100644 --- a/gen/compute1-cli/Cargo.toml +++ b/gen/compute1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-compute1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with compute (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/compute1-cli" @@ -20,13 +20,13 @@ name = "compute1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-compute1] path = "../compute1" -version = "4.0.1+20220224" +version = "5.0.2+20230103" + diff --git a/gen/compute1-cli/README.md b/gen/compute1-cli/README.md index 9ee182facc..b9292fde04 100644 --- a/gen/compute1-cli/README.md +++ b/gen/compute1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *compute* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *compute* API at revision *20230103*. The CLI is at version *5.0.2*. ```bash compute1 [options] @@ -39,6 +39,7 @@ compute1 [options] get
[-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] + set-labels (-r )... [-p ]... [-o ] autoscalers aggregated-list [-p ]... [-o ] delete [-p ]... [-o ] @@ -64,10 +65,12 @@ compute1 [options] delete-signed-url-key [-p ]... [-o ] get [-p ]... [-o ] get-health (-r )... [-p ]... [-o ] + get-iam-policy [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] set-edge-security-policy (-r )... [-p ]... [-o ] + set-iam-policy (-r )... [-p ]... [-o ] set-security-policy (-r )... [-p ]... [-o ] update (-r )... [-p ]... [-o ] disk-types @@ -135,6 +138,7 @@ compute1 [options] get
[-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] + set-labels (-r )... [-p ]... [-o ] global-forwarding-rules delete [-p ]... [-o ] get [-p ]... [-o ] @@ -295,6 +299,7 @@ compute1 [options] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] + set-labels (-r )... [-p ]... [-o ] interconnect-locations get [-p ]... [-o ] list [-p ]... [-o ] @@ -305,6 +310,7 @@ compute1 [options] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] + set-labels (-r )... [-p ]... [-o ] license-codes get [-p ]... [-o ] test-iam-permissions (-r )... [-p ]... [-o ] @@ -328,6 +334,21 @@ compute1 [options] aggregated-list [-p ]... [-o ] get [-p ]... [-o ] list [-p ]... [-o ] + network-attachments + aggregated-list [-p ]... [-o ] + delete [-p ]... [-o ] + get [-p ]... [-o ] + get-iam-policy [-p ]... [-o ] + insert (-r )... [-p ]... [-o ] + list [-p ]... [-o ] + set-iam-policy (-r )... [-p ]... [-o ] + test-iam-permissions (-r )... [-p ]... [-o ] + network-edge-security-services + aggregated-list [-p ]... [-o ] + delete [-p ]... [-o ] + get [-p ]... [-o ] + insert (-r )... [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] network-endpoint-groups aggregated-list [-p ]... [-o ] attach-network-endpoints (-r )... [-p ]... [-o ] @@ -440,9 +461,11 @@ compute1 [options] delete [-p ]... [-o ] get [-p ]... [-o ] get-health (-r )... [-p ]... [-o ] + get-iam-policy [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] + set-iam-policy (-r )... [-p ]... [-o ] update (-r )... [-p ]... [-o ] region-commitments aggregated-list [-p ]... [-o ] @@ -539,11 +562,24 @@ compute1 [options] get [-p ]... [-o ] list [-p ]... [-o ] wait [-p ]... [-o ] + region-security-policies + delete [-p ]... [-o ] + get [-p ]... [-o ] + insert (-r )... [-p ]... [-o ] + list [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] region-ssl-certificates delete [-p ]... [-o ] get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] + region-ssl-policies + delete [-p ]... [-o ] + get [-p ]... [-o ] + insert (-r )... [-p ]... [-o ] + list [-p ]... [-o ] + list-available-features [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] region-target-http-proxies delete [-p ]... [-o ] get [-p ]... [-o ] @@ -555,8 +591,14 @@ compute1 [options] get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] set-ssl-certificates (-r )... [-p ]... [-o ] set-url-map (-r )... [-p ]... [-o ] + region-target-tcp-proxies + delete [-p ]... [-o ] + get [-p ]... [-o ] + insert (-r )... [-p ]... [-o ] + list [-p ]... [-o ] region-url-maps delete [-p ]... [-o ] get [-p ]... [-o ] @@ -606,6 +648,7 @@ compute1 [options] list [-p ]... [-o ] security-policies add-rule (-r )... [-p ]... [-o ] + aggregated-list [-p ]... [-o ] delete [-p ]... [-o ] get [-p ]... [-o ] get-rule [-p ]... [-o ] @@ -615,6 +658,7 @@ compute1 [options] patch (-r )... [-p ]... [-o ] patch-rule (-r )... [-p ]... [-o ] remove-rule [-p ]... [-o ] + set-labels (-r )... [-p ]... [-o ] service-attachments aggregated-list [-p ]... [-o ] delete [-p ]... [-o ] @@ -641,6 +685,7 @@ compute1 [options] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] ssl-policies + aggregated-list [-p ]... [-o ] delete [-p ]... [-o ] get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] @@ -681,6 +726,7 @@ compute1 [options] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] + set-certificate-map (-r )... [-p ]... [-o ] set-quic-override (-r )... [-p ]... [-o ] set-ssl-certificates (-r )... [-p ]... [-o ] set-ssl-policy (-r )... [-p ]... [-o ] @@ -709,10 +755,12 @@ compute1 [options] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] set-backend-service (-r )... [-p ]... [-o ] + set-certificate-map (-r )... [-p ]... [-o ] set-proxy-header (-r )... [-p ]... [-o ] set-ssl-certificates (-r )... [-p ]... [-o ] set-ssl-policy (-r )... [-p ]... [-o ] target-tcp-proxies + aggregated-list [-p ]... [-o ] delete [-p ]... [-o ] get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] @@ -725,6 +773,7 @@ compute1 [options] get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] + set-labels (-r )... [-p ]... [-o ] url-maps aggregated-list [-p ]... [-o ] delete [-p ]... [-o ] @@ -750,6 +799,7 @@ compute1 [options] get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] + set-labels (-r )... [-p ]... [-o ] zone-operations delete [-p ]... get [-p ]... [-o ] diff --git a/gen/compute1-cli/mkdocs.yml b/gen/compute1-cli/mkdocs.yml index 7681441ff1..e4c9d54bda 100644 --- a/gen/compute1-cli/mkdocs.yml +++ b/gen/compute1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: compute v4.0.1+20220224 +site_name: compute v5.0.2+20230103 site_url: http://byron.github.io/google-apis-rs/google-compute1-cli site_description: A complete library to interact with compute (protocol v1) @@ -7,652 +7,787 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/compute1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accelerator-types_aggregated-list.md', 'Accelerator Types', 'Aggregated List'] -- ['accelerator-types_get.md', 'Accelerator Types', 'Get'] -- ['accelerator-types_list.md', 'Accelerator Types', 'List'] -- ['addresses_aggregated-list.md', 'Addresses', 'Aggregated List'] -- ['addresses_delete.md', 'Addresses', 'Delete'] -- ['addresses_get.md', 'Addresses', 'Get'] -- ['addresses_insert.md', 'Addresses', 'Insert'] -- ['addresses_list.md', 'Addresses', 'List'] -- ['autoscalers_aggregated-list.md', 'Autoscalers', 'Aggregated List'] -- ['autoscalers_delete.md', 'Autoscalers', 'Delete'] -- ['autoscalers_get.md', 'Autoscalers', 'Get'] -- ['autoscalers_insert.md', 'Autoscalers', 'Insert'] -- ['autoscalers_list.md', 'Autoscalers', 'List'] -- ['autoscalers_patch.md', 'Autoscalers', 'Patch'] -- ['autoscalers_update.md', 'Autoscalers', 'Update'] -- ['backend-buckets_add-signed-url-key.md', 'Backend Buckets', 'Add Signed Url Key'] -- ['backend-buckets_delete.md', 'Backend Buckets', 'Delete'] -- ['backend-buckets_delete-signed-url-key.md', 'Backend Buckets', 'Delete Signed Url Key'] -- ['backend-buckets_get.md', 'Backend Buckets', 'Get'] -- ['backend-buckets_insert.md', 'Backend Buckets', 'Insert'] -- ['backend-buckets_list.md', 'Backend Buckets', 'List'] -- ['backend-buckets_patch.md', 'Backend Buckets', 'Patch'] -- ['backend-buckets_set-edge-security-policy.md', 'Backend Buckets', 'Set Edge Security Policy'] -- ['backend-buckets_update.md', 'Backend Buckets', 'Update'] -- ['backend-services_add-signed-url-key.md', 'Backend Services', 'Add Signed Url Key'] -- ['backend-services_aggregated-list.md', 'Backend Services', 'Aggregated List'] -- ['backend-services_delete.md', 'Backend Services', 'Delete'] -- ['backend-services_delete-signed-url-key.md', 'Backend Services', 'Delete Signed Url Key'] -- ['backend-services_get.md', 'Backend Services', 'Get'] -- ['backend-services_get-health.md', 'Backend Services', 'Get Health'] -- ['backend-services_insert.md', 'Backend Services', 'Insert'] -- ['backend-services_list.md', 'Backend Services', 'List'] -- ['backend-services_patch.md', 'Backend Services', 'Patch'] -- ['backend-services_set-edge-security-policy.md', 'Backend Services', 'Set Edge Security Policy'] -- ['backend-services_set-security-policy.md', 'Backend Services', 'Set Security Policy'] -- ['backend-services_update.md', 'Backend Services', 'Update'] -- ['disk-types_aggregated-list.md', 'Disk Types', 'Aggregated List'] -- ['disk-types_get.md', 'Disk Types', 'Get'] -- ['disk-types_list.md', 'Disk Types', 'List'] -- ['disks_add-resource-policies.md', 'Disks', 'Add Resource Policies'] -- ['disks_aggregated-list.md', 'Disks', 'Aggregated List'] -- ['disks_create-snapshot.md', 'Disks', 'Create Snapshot'] -- ['disks_delete.md', 'Disks', 'Delete'] -- ['disks_get.md', 'Disks', 'Get'] -- ['disks_get-iam-policy.md', 'Disks', 'Get Iam Policy'] -- ['disks_insert.md', 'Disks', 'Insert'] -- ['disks_list.md', 'Disks', 'List'] -- ['disks_remove-resource-policies.md', 'Disks', 'Remove Resource Policies'] -- ['disks_resize.md', 'Disks', 'Resize'] -- ['disks_set-iam-policy.md', 'Disks', 'Set Iam Policy'] -- ['disks_set-labels.md', 'Disks', 'Set Labels'] -- ['disks_test-iam-permissions.md', 'Disks', 'Test Iam Permissions'] -- ['external-vpn-gateways_delete.md', 'External Vpn Gateways', 'Delete'] -- ['external-vpn-gateways_get.md', 'External Vpn Gateways', 'Get'] -- ['external-vpn-gateways_insert.md', 'External Vpn Gateways', 'Insert'] -- ['external-vpn-gateways_list.md', 'External Vpn Gateways', 'List'] -- ['external-vpn-gateways_set-labels.md', 'External Vpn Gateways', 'Set Labels'] -- ['external-vpn-gateways_test-iam-permissions.md', 'External Vpn Gateways', 'Test Iam Permissions'] -- ['firewall-policies_add-association.md', 'Firewall Policies', 'Add Association'] -- ['firewall-policies_add-rule.md', 'Firewall Policies', 'Add Rule'] -- ['firewall-policies_clone-rules.md', 'Firewall Policies', 'Clone Rules'] -- ['firewall-policies_delete.md', 'Firewall Policies', 'Delete'] -- ['firewall-policies_get.md', 'Firewall Policies', 'Get'] -- ['firewall-policies_get-association.md', 'Firewall Policies', 'Get Association'] -- ['firewall-policies_get-iam-policy.md', 'Firewall Policies', 'Get Iam Policy'] -- ['firewall-policies_get-rule.md', 'Firewall Policies', 'Get Rule'] -- ['firewall-policies_insert.md', 'Firewall Policies', 'Insert'] -- ['firewall-policies_list.md', 'Firewall Policies', 'List'] -- ['firewall-policies_list-associations.md', 'Firewall Policies', 'List Associations'] -- ['firewall-policies_move.md', 'Firewall Policies', 'Move'] -- ['firewall-policies_patch.md', 'Firewall Policies', 'Patch'] -- ['firewall-policies_patch-rule.md', 'Firewall Policies', 'Patch Rule'] -- ['firewall-policies_remove-association.md', 'Firewall Policies', 'Remove Association'] -- ['firewall-policies_remove-rule.md', 'Firewall Policies', 'Remove Rule'] -- ['firewall-policies_set-iam-policy.md', 'Firewall Policies', 'Set Iam Policy'] -- ['firewall-policies_test-iam-permissions.md', 'Firewall Policies', 'Test Iam Permissions'] -- ['firewalls_delete.md', 'Firewalls', 'Delete'] -- ['firewalls_get.md', 'Firewalls', 'Get'] -- ['firewalls_insert.md', 'Firewalls', 'Insert'] -- ['firewalls_list.md', 'Firewalls', 'List'] -- ['firewalls_patch.md', 'Firewalls', 'Patch'] -- ['firewalls_update.md', 'Firewalls', 'Update'] -- ['forwarding-rules_aggregated-list.md', 'Forwarding Rules', 'Aggregated List'] -- ['forwarding-rules_delete.md', 'Forwarding Rules', 'Delete'] -- ['forwarding-rules_get.md', 'Forwarding Rules', 'Get'] -- ['forwarding-rules_insert.md', 'Forwarding Rules', 'Insert'] -- ['forwarding-rules_list.md', 'Forwarding Rules', 'List'] -- ['forwarding-rules_patch.md', 'Forwarding Rules', 'Patch'] -- ['forwarding-rules_set-labels.md', 'Forwarding Rules', 'Set Labels'] -- ['forwarding-rules_set-target.md', 'Forwarding Rules', 'Set Target'] -- ['global-addresses_delete.md', 'Global Addresses', 'Delete'] -- ['global-addresses_get.md', 'Global Addresses', 'Get'] -- ['global-addresses_insert.md', 'Global Addresses', 'Insert'] -- ['global-addresses_list.md', 'Global Addresses', 'List'] -- ['global-forwarding-rules_delete.md', 'Global Forwarding Rules', 'Delete'] -- ['global-forwarding-rules_get.md', 'Global Forwarding Rules', 'Get'] -- ['global-forwarding-rules_insert.md', 'Global Forwarding Rules', 'Insert'] -- ['global-forwarding-rules_list.md', 'Global Forwarding Rules', 'List'] -- ['global-forwarding-rules_patch.md', 'Global Forwarding Rules', 'Patch'] -- ['global-forwarding-rules_set-labels.md', 'Global Forwarding Rules', 'Set Labels'] -- ['global-forwarding-rules_set-target.md', 'Global Forwarding Rules', 'Set Target'] -- ['global-network-endpoint-groups_attach-network-endpoints.md', 'Global Network Endpoint Groups', 'Attach Network Endpoints'] -- ['global-network-endpoint-groups_delete.md', 'Global Network Endpoint Groups', 'Delete'] -- ['global-network-endpoint-groups_detach-network-endpoints.md', 'Global Network Endpoint Groups', 'Detach Network Endpoints'] -- ['global-network-endpoint-groups_get.md', 'Global Network Endpoint Groups', 'Get'] -- ['global-network-endpoint-groups_insert.md', 'Global Network Endpoint Groups', 'Insert'] -- ['global-network-endpoint-groups_list.md', 'Global Network Endpoint Groups', 'List'] -- ['global-network-endpoint-groups_list-network-endpoints.md', 'Global Network Endpoint Groups', 'List Network Endpoints'] -- ['global-operations_aggregated-list.md', 'Global Operations', 'Aggregated List'] -- ['global-operations_delete.md', 'Global Operations', 'Delete'] -- ['global-operations_get.md', 'Global Operations', 'Get'] -- ['global-operations_list.md', 'Global Operations', 'List'] -- ['global-operations_wait.md', 'Global Operations', 'Wait'] -- ['global-organization-operations_delete.md', 'Global Organization Operations', 'Delete'] -- ['global-organization-operations_get.md', 'Global Organization Operations', 'Get'] -- ['global-organization-operations_list.md', 'Global Organization Operations', 'List'] -- ['global-public-delegated-prefixes_delete.md', 'Global Public Delegated Prefixes', 'Delete'] -- ['global-public-delegated-prefixes_get.md', 'Global Public Delegated Prefixes', 'Get'] -- ['global-public-delegated-prefixes_insert.md', 'Global Public Delegated Prefixes', 'Insert'] -- ['global-public-delegated-prefixes_list.md', 'Global Public Delegated Prefixes', 'List'] -- ['global-public-delegated-prefixes_patch.md', 'Global Public Delegated Prefixes', 'Patch'] -- ['health-checks_aggregated-list.md', 'Health Checks', 'Aggregated List'] -- ['health-checks_delete.md', 'Health Checks', 'Delete'] -- ['health-checks_get.md', 'Health Checks', 'Get'] -- ['health-checks_insert.md', 'Health Checks', 'Insert'] -- ['health-checks_list.md', 'Health Checks', 'List'] -- ['health-checks_patch.md', 'Health Checks', 'Patch'] -- ['health-checks_update.md', 'Health Checks', 'Update'] -- ['http-health-checks_delete.md', 'Http Health Checks', 'Delete'] -- ['http-health-checks_get.md', 'Http Health Checks', 'Get'] -- ['http-health-checks_insert.md', 'Http Health Checks', 'Insert'] -- ['http-health-checks_list.md', 'Http Health Checks', 'List'] -- ['http-health-checks_patch.md', 'Http Health Checks', 'Patch'] -- ['http-health-checks_update.md', 'Http Health Checks', 'Update'] -- ['https-health-checks_delete.md', 'Https Health Checks', 'Delete'] -- ['https-health-checks_get.md', 'Https Health Checks', 'Get'] -- ['https-health-checks_insert.md', 'Https Health Checks', 'Insert'] -- ['https-health-checks_list.md', 'Https Health Checks', 'List'] -- ['https-health-checks_patch.md', 'Https Health Checks', 'Patch'] -- ['https-health-checks_update.md', 'Https Health Checks', 'Update'] -- ['image-family-views_get.md', 'Image Family Views', 'Get'] -- ['images_delete.md', 'Images', 'Delete'] -- ['images_deprecate.md', 'Images', 'Deprecate'] -- ['images_get.md', 'Images', 'Get'] -- ['images_get-from-family.md', 'Images', 'Get From Family'] -- ['images_get-iam-policy.md', 'Images', 'Get Iam Policy'] -- ['images_insert.md', 'Images', 'Insert'] -- ['images_list.md', 'Images', 'List'] -- ['images_patch.md', 'Images', 'Patch'] -- ['images_set-iam-policy.md', 'Images', 'Set Iam Policy'] -- ['images_set-labels.md', 'Images', 'Set Labels'] -- ['images_test-iam-permissions.md', 'Images', 'Test Iam Permissions'] -- ['instance-group-managers_abandon-instances.md', 'Instance Group Managers', 'Abandon Instances'] -- ['instance-group-managers_aggregated-list.md', 'Instance Group Managers', 'Aggregated List'] -- ['instance-group-managers_apply-updates-to-instances.md', 'Instance Group Managers', 'Apply Updates To Instances'] -- ['instance-group-managers_create-instances.md', 'Instance Group Managers', 'Create Instances'] -- ['instance-group-managers_delete.md', 'Instance Group Managers', 'Delete'] -- ['instance-group-managers_delete-instances.md', 'Instance Group Managers', 'Delete Instances'] -- ['instance-group-managers_delete-per-instance-configs.md', 'Instance Group Managers', 'Delete Per Instance Configs'] -- ['instance-group-managers_get.md', 'Instance Group Managers', 'Get'] -- ['instance-group-managers_insert.md', 'Instance Group Managers', 'Insert'] -- ['instance-group-managers_list.md', 'Instance Group Managers', 'List'] -- ['instance-group-managers_list-errors.md', 'Instance Group Managers', 'List Errors'] -- ['instance-group-managers_list-managed-instances.md', 'Instance Group Managers', 'List Managed Instances'] -- ['instance-group-managers_list-per-instance-configs.md', 'Instance Group Managers', 'List Per Instance Configs'] -- ['instance-group-managers_patch.md', 'Instance Group Managers', 'Patch'] -- ['instance-group-managers_patch-per-instance-configs.md', 'Instance Group Managers', 'Patch Per Instance Configs'] -- ['instance-group-managers_recreate-instances.md', 'Instance Group Managers', 'Recreate Instances'] -- ['instance-group-managers_resize.md', 'Instance Group Managers', 'Resize'] -- ['instance-group-managers_set-instance-template.md', 'Instance Group Managers', 'Set Instance Template'] -- ['instance-group-managers_set-target-pools.md', 'Instance Group Managers', 'Set Target Pools'] -- ['instance-group-managers_update-per-instance-configs.md', 'Instance Group Managers', 'Update Per Instance Configs'] -- ['instance-groups_add-instances.md', 'Instance Groups', 'Add Instances'] -- ['instance-groups_aggregated-list.md', 'Instance Groups', 'Aggregated List'] -- ['instance-groups_delete.md', 'Instance Groups', 'Delete'] -- ['instance-groups_get.md', 'Instance Groups', 'Get'] -- ['instance-groups_insert.md', 'Instance Groups', 'Insert'] -- ['instance-groups_list.md', 'Instance Groups', 'List'] -- ['instance-groups_list-instances.md', 'Instance Groups', 'List Instances'] -- ['instance-groups_remove-instances.md', 'Instance Groups', 'Remove Instances'] -- ['instance-groups_set-named-ports.md', 'Instance Groups', 'Set Named Ports'] -- ['instance-templates_delete.md', 'Instance Templates', 'Delete'] -- ['instance-templates_get.md', 'Instance Templates', 'Get'] -- ['instance-templates_get-iam-policy.md', 'Instance Templates', 'Get Iam Policy'] -- ['instance-templates_insert.md', 'Instance Templates', 'Insert'] -- ['instance-templates_list.md', 'Instance Templates', 'List'] -- ['instance-templates_set-iam-policy.md', 'Instance Templates', 'Set Iam Policy'] -- ['instance-templates_test-iam-permissions.md', 'Instance Templates', 'Test Iam Permissions'] -- ['instances_add-access-config.md', 'Instances', 'Add Access Config'] -- ['instances_add-resource-policies.md', 'Instances', 'Add Resource Policies'] -- ['instances_aggregated-list.md', 'Instances', 'Aggregated List'] -- ['instances_attach-disk.md', 'Instances', 'Attach Disk'] -- ['instances_bulk-insert.md', 'Instances', 'Bulk Insert'] -- ['instances_delete.md', 'Instances', 'Delete'] -- ['instances_delete-access-config.md', 'Instances', 'Delete Access Config'] -- ['instances_detach-disk.md', 'Instances', 'Detach Disk'] -- ['instances_get.md', 'Instances', 'Get'] -- ['instances_get-effective-firewalls.md', 'Instances', 'Get Effective Firewalls'] -- ['instances_get-guest-attributes.md', 'Instances', 'Get Guest Attributes'] -- ['instances_get-iam-policy.md', 'Instances', 'Get Iam Policy'] -- ['instances_get-screenshot.md', 'Instances', 'Get Screenshot'] -- ['instances_get-serial-port-output.md', 'Instances', 'Get Serial Port Output'] -- ['instances_get-shielded-instance-identity.md', 'Instances', 'Get Shielded Instance Identity'] -- ['instances_insert.md', 'Instances', 'Insert'] -- ['instances_list.md', 'Instances', 'List'] -- ['instances_list-referrers.md', 'Instances', 'List Referrers'] -- ['instances_remove-resource-policies.md', 'Instances', 'Remove Resource Policies'] -- ['instances_reset.md', 'Instances', 'Reset'] -- ['instances_resume.md', 'Instances', 'Resume'] -- ['instances_send-diagnostic-interrupt.md', 'Instances', 'Send Diagnostic Interrupt'] -- ['instances_set-deletion-protection.md', 'Instances', 'Set Deletion Protection'] -- ['instances_set-disk-auto-delete.md', 'Instances', 'Set Disk Auto Delete'] -- ['instances_set-iam-policy.md', 'Instances', 'Set Iam Policy'] -- ['instances_set-labels.md', 'Instances', 'Set Labels'] -- ['instances_set-machine-resources.md', 'Instances', 'Set Machine Resources'] -- ['instances_set-machine-type.md', 'Instances', 'Set Machine Type'] -- ['instances_set-metadata.md', 'Instances', 'Set Metadata'] -- ['instances_set-min-cpu-platform.md', 'Instances', 'Set Min Cpu Platform'] -- ['instances_set-scheduling.md', 'Instances', 'Set Scheduling'] -- ['instances_set-service-account.md', 'Instances', 'Set Service Account'] -- ['instances_set-shielded-instance-integrity-policy.md', 'Instances', 'Set Shielded Instance Integrity Policy'] -- ['instances_set-tags.md', 'Instances', 'Set Tags'] -- ['instances_simulate-maintenance-event.md', 'Instances', 'Simulate Maintenance Event'] -- ['instances_start.md', 'Instances', 'Start'] -- ['instances_start-with-encryption-key.md', 'Instances', 'Start With Encryption Key'] -- ['instances_stop.md', 'Instances', 'Stop'] -- ['instances_suspend.md', 'Instances', 'Suspend'] -- ['instances_test-iam-permissions.md', 'Instances', 'Test Iam Permissions'] -- ['instances_update.md', 'Instances', 'Update'] -- ['instances_update-access-config.md', 'Instances', 'Update Access Config'] -- ['instances_update-display-device.md', 'Instances', 'Update Display Device'] -- ['instances_update-network-interface.md', 'Instances', 'Update Network Interface'] -- ['instances_update-shielded-instance-config.md', 'Instances', 'Update Shielded Instance Config'] -- ['interconnect-attachments_aggregated-list.md', 'Interconnect Attachments', 'Aggregated List'] -- ['interconnect-attachments_delete.md', 'Interconnect Attachments', 'Delete'] -- ['interconnect-attachments_get.md', 'Interconnect Attachments', 'Get'] -- ['interconnect-attachments_insert.md', 'Interconnect Attachments', 'Insert'] -- ['interconnect-attachments_list.md', 'Interconnect Attachments', 'List'] -- ['interconnect-attachments_patch.md', 'Interconnect Attachments', 'Patch'] -- ['interconnect-locations_get.md', 'Interconnect Locations', 'Get'] -- ['interconnect-locations_list.md', 'Interconnect Locations', 'List'] -- ['interconnects_delete.md', 'Interconnects', 'Delete'] -- ['interconnects_get.md', 'Interconnects', 'Get'] -- ['interconnects_get-diagnostics.md', 'Interconnects', 'Get Diagnostics'] -- ['interconnects_insert.md', 'Interconnects', 'Insert'] -- ['interconnects_list.md', 'Interconnects', 'List'] -- ['interconnects_patch.md', 'Interconnects', 'Patch'] -- ['license-codes_get.md', 'License Codes', 'Get'] -- ['license-codes_test-iam-permissions.md', 'License Codes', 'Test Iam Permissions'] -- ['licenses_delete.md', 'Licenses', 'Delete'] -- ['licenses_get.md', 'Licenses', 'Get'] -- ['licenses_get-iam-policy.md', 'Licenses', 'Get Iam Policy'] -- ['licenses_insert.md', 'Licenses', 'Insert'] -- ['licenses_list.md', 'Licenses', 'List'] -- ['licenses_set-iam-policy.md', 'Licenses', 'Set Iam Policy'] -- ['licenses_test-iam-permissions.md', 'Licenses', 'Test Iam Permissions'] -- ['machine-images_delete.md', 'Machine Images', 'Delete'] -- ['machine-images_get.md', 'Machine Images', 'Get'] -- ['machine-images_get-iam-policy.md', 'Machine Images', 'Get Iam Policy'] -- ['machine-images_insert.md', 'Machine Images', 'Insert'] -- ['machine-images_list.md', 'Machine Images', 'List'] -- ['machine-images_set-iam-policy.md', 'Machine Images', 'Set Iam Policy'] -- ['machine-images_test-iam-permissions.md', 'Machine Images', 'Test Iam Permissions'] -- ['machine-types_aggregated-list.md', 'Machine Types', 'Aggregated List'] -- ['machine-types_get.md', 'Machine Types', 'Get'] -- ['machine-types_list.md', 'Machine Types', 'List'] -- ['network-endpoint-groups_aggregated-list.md', 'Network Endpoint Groups', 'Aggregated List'] -- ['network-endpoint-groups_attach-network-endpoints.md', 'Network Endpoint Groups', 'Attach Network Endpoints'] -- ['network-endpoint-groups_delete.md', 'Network Endpoint Groups', 'Delete'] -- ['network-endpoint-groups_detach-network-endpoints.md', 'Network Endpoint Groups', 'Detach Network Endpoints'] -- ['network-endpoint-groups_get.md', 'Network Endpoint Groups', 'Get'] -- ['network-endpoint-groups_insert.md', 'Network Endpoint Groups', 'Insert'] -- ['network-endpoint-groups_list.md', 'Network Endpoint Groups', 'List'] -- ['network-endpoint-groups_list-network-endpoints.md', 'Network Endpoint Groups', 'List Network Endpoints'] -- ['network-endpoint-groups_test-iam-permissions.md', 'Network Endpoint Groups', 'Test Iam Permissions'] -- ['network-firewall-policies_add-association.md', 'Network Firewall Policies', 'Add Association'] -- ['network-firewall-policies_add-rule.md', 'Network Firewall Policies', 'Add Rule'] -- ['network-firewall-policies_clone-rules.md', 'Network Firewall Policies', 'Clone Rules'] -- ['network-firewall-policies_delete.md', 'Network Firewall Policies', 'Delete'] -- ['network-firewall-policies_get.md', 'Network Firewall Policies', 'Get'] -- ['network-firewall-policies_get-association.md', 'Network Firewall Policies', 'Get Association'] -- ['network-firewall-policies_get-iam-policy.md', 'Network Firewall Policies', 'Get Iam Policy'] -- ['network-firewall-policies_get-rule.md', 'Network Firewall Policies', 'Get Rule'] -- ['network-firewall-policies_insert.md', 'Network Firewall Policies', 'Insert'] -- ['network-firewall-policies_list.md', 'Network Firewall Policies', 'List'] -- ['network-firewall-policies_patch.md', 'Network Firewall Policies', 'Patch'] -- ['network-firewall-policies_patch-rule.md', 'Network Firewall Policies', 'Patch Rule'] -- ['network-firewall-policies_remove-association.md', 'Network Firewall Policies', 'Remove Association'] -- ['network-firewall-policies_remove-rule.md', 'Network Firewall Policies', 'Remove Rule'] -- ['network-firewall-policies_set-iam-policy.md', 'Network Firewall Policies', 'Set Iam Policy'] -- ['network-firewall-policies_test-iam-permissions.md', 'Network Firewall Policies', 'Test Iam Permissions'] -- ['networks_add-peering.md', 'Networks', 'Add Peering'] -- ['networks_delete.md', 'Networks', 'Delete'] -- ['networks_get.md', 'Networks', 'Get'] -- ['networks_get-effective-firewalls.md', 'Networks', 'Get Effective Firewalls'] -- ['networks_insert.md', 'Networks', 'Insert'] -- ['networks_list.md', 'Networks', 'List'] -- ['networks_list-peering-routes.md', 'Networks', 'List Peering Routes'] -- ['networks_patch.md', 'Networks', 'Patch'] -- ['networks_remove-peering.md', 'Networks', 'Remove Peering'] -- ['networks_switch-to-custom-mode.md', 'Networks', 'Switch To Custom Mode'] -- ['networks_update-peering.md', 'Networks', 'Update Peering'] -- ['node-groups_add-nodes.md', 'Node Groups', 'Add Nodes'] -- ['node-groups_aggregated-list.md', 'Node Groups', 'Aggregated List'] -- ['node-groups_delete.md', 'Node Groups', 'Delete'] -- ['node-groups_delete-nodes.md', 'Node Groups', 'Delete Nodes'] -- ['node-groups_get.md', 'Node Groups', 'Get'] -- ['node-groups_get-iam-policy.md', 'Node Groups', 'Get Iam Policy'] -- ['node-groups_insert.md', 'Node Groups', 'Insert'] -- ['node-groups_list.md', 'Node Groups', 'List'] -- ['node-groups_list-nodes.md', 'Node Groups', 'List Nodes'] -- ['node-groups_patch.md', 'Node Groups', 'Patch'] -- ['node-groups_set-iam-policy.md', 'Node Groups', 'Set Iam Policy'] -- ['node-groups_set-node-template.md', 'Node Groups', 'Set Node Template'] -- ['node-groups_test-iam-permissions.md', 'Node Groups', 'Test Iam Permissions'] -- ['node-templates_aggregated-list.md', 'Node Templates', 'Aggregated List'] -- ['node-templates_delete.md', 'Node Templates', 'Delete'] -- ['node-templates_get.md', 'Node Templates', 'Get'] -- ['node-templates_get-iam-policy.md', 'Node Templates', 'Get Iam Policy'] -- ['node-templates_insert.md', 'Node Templates', 'Insert'] -- ['node-templates_list.md', 'Node Templates', 'List'] -- ['node-templates_set-iam-policy.md', 'Node Templates', 'Set Iam Policy'] -- ['node-templates_test-iam-permissions.md', 'Node Templates', 'Test Iam Permissions'] -- ['node-types_aggregated-list.md', 'Node Types', 'Aggregated List'] -- ['node-types_get.md', 'Node Types', 'Get'] -- ['node-types_list.md', 'Node Types', 'List'] -- ['packet-mirrorings_aggregated-list.md', 'Packet Mirrorings', 'Aggregated List'] -- ['packet-mirrorings_delete.md', 'Packet Mirrorings', 'Delete'] -- ['packet-mirrorings_get.md', 'Packet Mirrorings', 'Get'] -- ['packet-mirrorings_insert.md', 'Packet Mirrorings', 'Insert'] -- ['packet-mirrorings_list.md', 'Packet Mirrorings', 'List'] -- ['packet-mirrorings_patch.md', 'Packet Mirrorings', 'Patch'] -- ['packet-mirrorings_test-iam-permissions.md', 'Packet Mirrorings', 'Test Iam Permissions'] -- ['projects_disable-xpn-host.md', 'Projects', 'Disable Xpn Host'] -- ['projects_disable-xpn-resource.md', 'Projects', 'Disable Xpn Resource'] -- ['projects_enable-xpn-host.md', 'Projects', 'Enable Xpn Host'] -- ['projects_enable-xpn-resource.md', 'Projects', 'Enable Xpn Resource'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_get-xpn-host.md', 'Projects', 'Get Xpn Host'] -- ['projects_get-xpn-resources.md', 'Projects', 'Get Xpn Resources'] -- ['projects_list-xpn-hosts.md', 'Projects', 'List Xpn Hosts'] -- ['projects_move-disk.md', 'Projects', 'Move Disk'] -- ['projects_move-instance.md', 'Projects', 'Move Instance'] -- ['projects_set-common-instance-metadata.md', 'Projects', 'Set Common Instance Metadata'] -- ['projects_set-default-network-tier.md', 'Projects', 'Set Default Network Tier'] -- ['projects_set-usage-export-bucket.md', 'Projects', 'Set Usage Export Bucket'] -- ['public-advertised-prefixes_delete.md', 'Public Advertised Prefixes', 'Delete'] -- ['public-advertised-prefixes_get.md', 'Public Advertised Prefixes', 'Get'] -- ['public-advertised-prefixes_insert.md', 'Public Advertised Prefixes', 'Insert'] -- ['public-advertised-prefixes_list.md', 'Public Advertised Prefixes', 'List'] -- ['public-advertised-prefixes_patch.md', 'Public Advertised Prefixes', 'Patch'] -- ['public-delegated-prefixes_aggregated-list.md', 'Public Delegated Prefixes', 'Aggregated List'] -- ['public-delegated-prefixes_delete.md', 'Public Delegated Prefixes', 'Delete'] -- ['public-delegated-prefixes_get.md', 'Public Delegated Prefixes', 'Get'] -- ['public-delegated-prefixes_insert.md', 'Public Delegated Prefixes', 'Insert'] -- ['public-delegated-prefixes_list.md', 'Public Delegated Prefixes', 'List'] -- ['public-delegated-prefixes_patch.md', 'Public Delegated Prefixes', 'Patch'] -- ['region-autoscalers_delete.md', 'Region Autoscalers', 'Delete'] -- ['region-autoscalers_get.md', 'Region Autoscalers', 'Get'] -- ['region-autoscalers_insert.md', 'Region Autoscalers', 'Insert'] -- ['region-autoscalers_list.md', 'Region Autoscalers', 'List'] -- ['region-autoscalers_patch.md', 'Region Autoscalers', 'Patch'] -- ['region-autoscalers_update.md', 'Region Autoscalers', 'Update'] -- ['region-backend-services_delete.md', 'Region Backend Services', 'Delete'] -- ['region-backend-services_get.md', 'Region Backend Services', 'Get'] -- ['region-backend-services_get-health.md', 'Region Backend Services', 'Get Health'] -- ['region-backend-services_insert.md', 'Region Backend Services', 'Insert'] -- ['region-backend-services_list.md', 'Region Backend Services', 'List'] -- ['region-backend-services_patch.md', 'Region Backend Services', 'Patch'] -- ['region-backend-services_update.md', 'Region Backend Services', 'Update'] -- ['region-commitments_aggregated-list.md', 'Region Commitments', 'Aggregated List'] -- ['region-commitments_get.md', 'Region Commitments', 'Get'] -- ['region-commitments_insert.md', 'Region Commitments', 'Insert'] -- ['region-commitments_list.md', 'Region Commitments', 'List'] -- ['region-commitments_update.md', 'Region Commitments', 'Update'] -- ['region-disk-types_get.md', 'Region Disk Types', 'Get'] -- ['region-disk-types_list.md', 'Region Disk Types', 'List'] -- ['region-disks_add-resource-policies.md', 'Region Disks', 'Add Resource Policies'] -- ['region-disks_create-snapshot.md', 'Region Disks', 'Create Snapshot'] -- ['region-disks_delete.md', 'Region Disks', 'Delete'] -- ['region-disks_get.md', 'Region Disks', 'Get'] -- ['region-disks_get-iam-policy.md', 'Region Disks', 'Get Iam Policy'] -- ['region-disks_insert.md', 'Region Disks', 'Insert'] -- ['region-disks_list.md', 'Region Disks', 'List'] -- ['region-disks_remove-resource-policies.md', 'Region Disks', 'Remove Resource Policies'] -- ['region-disks_resize.md', 'Region Disks', 'Resize'] -- ['region-disks_set-iam-policy.md', 'Region Disks', 'Set Iam Policy'] -- ['region-disks_set-labels.md', 'Region Disks', 'Set Labels'] -- ['region-disks_test-iam-permissions.md', 'Region Disks', 'Test Iam Permissions'] -- ['region-health-check-services_delete.md', 'Region Health Check Services', 'Delete'] -- ['region-health-check-services_get.md', 'Region Health Check Services', 'Get'] -- ['region-health-check-services_insert.md', 'Region Health Check Services', 'Insert'] -- ['region-health-check-services_list.md', 'Region Health Check Services', 'List'] -- ['region-health-check-services_patch.md', 'Region Health Check Services', 'Patch'] -- ['region-health-checks_delete.md', 'Region Health Checks', 'Delete'] -- ['region-health-checks_get.md', 'Region Health Checks', 'Get'] -- ['region-health-checks_insert.md', 'Region Health Checks', 'Insert'] -- ['region-health-checks_list.md', 'Region Health Checks', 'List'] -- ['region-health-checks_patch.md', 'Region Health Checks', 'Patch'] -- ['region-health-checks_update.md', 'Region Health Checks', 'Update'] -- ['region-instance-group-managers_abandon-instances.md', 'Region Instance Group Managers', 'Abandon Instances'] -- ['region-instance-group-managers_apply-updates-to-instances.md', 'Region Instance Group Managers', 'Apply Updates To Instances'] -- ['region-instance-group-managers_create-instances.md', 'Region Instance Group Managers', 'Create Instances'] -- ['region-instance-group-managers_delete.md', 'Region Instance Group Managers', 'Delete'] -- ['region-instance-group-managers_delete-instances.md', 'Region Instance Group Managers', 'Delete Instances'] -- ['region-instance-group-managers_delete-per-instance-configs.md', 'Region Instance Group Managers', 'Delete Per Instance Configs'] -- ['region-instance-group-managers_get.md', 'Region Instance Group Managers', 'Get'] -- ['region-instance-group-managers_insert.md', 'Region Instance Group Managers', 'Insert'] -- ['region-instance-group-managers_list.md', 'Region Instance Group Managers', 'List'] -- ['region-instance-group-managers_list-errors.md', 'Region Instance Group Managers', 'List Errors'] -- ['region-instance-group-managers_list-managed-instances.md', 'Region Instance Group Managers', 'List Managed Instances'] -- ['region-instance-group-managers_list-per-instance-configs.md', 'Region Instance Group Managers', 'List Per Instance Configs'] -- ['region-instance-group-managers_patch.md', 'Region Instance Group Managers', 'Patch'] -- ['region-instance-group-managers_patch-per-instance-configs.md', 'Region Instance Group Managers', 'Patch Per Instance Configs'] -- ['region-instance-group-managers_recreate-instances.md', 'Region Instance Group Managers', 'Recreate Instances'] -- ['region-instance-group-managers_resize.md', 'Region Instance Group Managers', 'Resize'] -- ['region-instance-group-managers_set-instance-template.md', 'Region Instance Group Managers', 'Set Instance Template'] -- ['region-instance-group-managers_set-target-pools.md', 'Region Instance Group Managers', 'Set Target Pools'] -- ['region-instance-group-managers_update-per-instance-configs.md', 'Region Instance Group Managers', 'Update Per Instance Configs'] -- ['region-instance-groups_get.md', 'Region Instance Groups', 'Get'] -- ['region-instance-groups_list.md', 'Region Instance Groups', 'List'] -- ['region-instance-groups_list-instances.md', 'Region Instance Groups', 'List Instances'] -- ['region-instance-groups_set-named-ports.md', 'Region Instance Groups', 'Set Named Ports'] -- ['region-instances_bulk-insert.md', 'Region Instances', 'Bulk Insert'] -- ['region-network-endpoint-groups_delete.md', 'Region Network Endpoint Groups', 'Delete'] -- ['region-network-endpoint-groups_get.md', 'Region Network Endpoint Groups', 'Get'] -- ['region-network-endpoint-groups_insert.md', 'Region Network Endpoint Groups', 'Insert'] -- ['region-network-endpoint-groups_list.md', 'Region Network Endpoint Groups', 'List'] -- ['region-network-firewall-policies_add-association.md', 'Region Network Firewall Policies', 'Add Association'] -- ['region-network-firewall-policies_add-rule.md', 'Region Network Firewall Policies', 'Add Rule'] -- ['region-network-firewall-policies_clone-rules.md', 'Region Network Firewall Policies', 'Clone Rules'] -- ['region-network-firewall-policies_delete.md', 'Region Network Firewall Policies', 'Delete'] -- ['region-network-firewall-policies_get.md', 'Region Network Firewall Policies', 'Get'] -- ['region-network-firewall-policies_get-association.md', 'Region Network Firewall Policies', 'Get Association'] -- ['region-network-firewall-policies_get-effective-firewalls.md', 'Region Network Firewall Policies', 'Get Effective Firewalls'] -- ['region-network-firewall-policies_get-iam-policy.md', 'Region Network Firewall Policies', 'Get Iam Policy'] -- ['region-network-firewall-policies_get-rule.md', 'Region Network Firewall Policies', 'Get Rule'] -- ['region-network-firewall-policies_insert.md', 'Region Network Firewall Policies', 'Insert'] -- ['region-network-firewall-policies_list.md', 'Region Network Firewall Policies', 'List'] -- ['region-network-firewall-policies_patch.md', 'Region Network Firewall Policies', 'Patch'] -- ['region-network-firewall-policies_patch-rule.md', 'Region Network Firewall Policies', 'Patch Rule'] -- ['region-network-firewall-policies_remove-association.md', 'Region Network Firewall Policies', 'Remove Association'] -- ['region-network-firewall-policies_remove-rule.md', 'Region Network Firewall Policies', 'Remove Rule'] -- ['region-network-firewall-policies_set-iam-policy.md', 'Region Network Firewall Policies', 'Set Iam Policy'] -- ['region-network-firewall-policies_test-iam-permissions.md', 'Region Network Firewall Policies', 'Test Iam Permissions'] -- ['region-notification-endpoints_delete.md', 'Region Notification Endpoints', 'Delete'] -- ['region-notification-endpoints_get.md', 'Region Notification Endpoints', 'Get'] -- ['region-notification-endpoints_insert.md', 'Region Notification Endpoints', 'Insert'] -- ['region-notification-endpoints_list.md', 'Region Notification Endpoints', 'List'] -- ['region-operations_delete.md', 'Region Operations', 'Delete'] -- ['region-operations_get.md', 'Region Operations', 'Get'] -- ['region-operations_list.md', 'Region Operations', 'List'] -- ['region-operations_wait.md', 'Region Operations', 'Wait'] -- ['region-ssl-certificates_delete.md', 'Region Ssl Certificates', 'Delete'] -- ['region-ssl-certificates_get.md', 'Region Ssl Certificates', 'Get'] -- ['region-ssl-certificates_insert.md', 'Region Ssl Certificates', 'Insert'] -- ['region-ssl-certificates_list.md', 'Region Ssl Certificates', 'List'] -- ['region-target-http-proxies_delete.md', 'Region Target Http Proxies', 'Delete'] -- ['region-target-http-proxies_get.md', 'Region Target Http Proxies', 'Get'] -- ['region-target-http-proxies_insert.md', 'Region Target Http Proxies', 'Insert'] -- ['region-target-http-proxies_list.md', 'Region Target Http Proxies', 'List'] -- ['region-target-http-proxies_set-url-map.md', 'Region Target Http Proxies', 'Set Url Map'] -- ['region-target-https-proxies_delete.md', 'Region Target Https Proxies', 'Delete'] -- ['region-target-https-proxies_get.md', 'Region Target Https Proxies', 'Get'] -- ['region-target-https-proxies_insert.md', 'Region Target Https Proxies', 'Insert'] -- ['region-target-https-proxies_list.md', 'Region Target Https Proxies', 'List'] -- ['region-target-https-proxies_set-ssl-certificates.md', 'Region Target Https Proxies', 'Set Ssl Certificates'] -- ['region-target-https-proxies_set-url-map.md', 'Region Target Https Proxies', 'Set Url Map'] -- ['region-url-maps_delete.md', 'Region Url Maps', 'Delete'] -- ['region-url-maps_get.md', 'Region Url Maps', 'Get'] -- ['region-url-maps_insert.md', 'Region Url Maps', 'Insert'] -- ['region-url-maps_list.md', 'Region Url Maps', 'List'] -- ['region-url-maps_patch.md', 'Region Url Maps', 'Patch'] -- ['region-url-maps_update.md', 'Region Url Maps', 'Update'] -- ['region-url-maps_validate.md', 'Region Url Maps', 'Validate'] -- ['regions_get.md', 'Regions', 'Get'] -- ['regions_list.md', 'Regions', 'List'] -- ['reservations_aggregated-list.md', 'Reservations', 'Aggregated List'] -- ['reservations_delete.md', 'Reservations', 'Delete'] -- ['reservations_get.md', 'Reservations', 'Get'] -- ['reservations_get-iam-policy.md', 'Reservations', 'Get Iam Policy'] -- ['reservations_insert.md', 'Reservations', 'Insert'] -- ['reservations_list.md', 'Reservations', 'List'] -- ['reservations_resize.md', 'Reservations', 'Resize'] -- ['reservations_set-iam-policy.md', 'Reservations', 'Set Iam Policy'] -- ['reservations_test-iam-permissions.md', 'Reservations', 'Test Iam Permissions'] -- ['reservations_update.md', 'Reservations', 'Update'] -- ['resource-policies_aggregated-list.md', 'Resource Policies', 'Aggregated List'] -- ['resource-policies_delete.md', 'Resource Policies', 'Delete'] -- ['resource-policies_get.md', 'Resource Policies', 'Get'] -- ['resource-policies_get-iam-policy.md', 'Resource Policies', 'Get Iam Policy'] -- ['resource-policies_insert.md', 'Resource Policies', 'Insert'] -- ['resource-policies_list.md', 'Resource Policies', 'List'] -- ['resource-policies_set-iam-policy.md', 'Resource Policies', 'Set Iam Policy'] -- ['resource-policies_test-iam-permissions.md', 'Resource Policies', 'Test Iam Permissions'] -- ['routers_aggregated-list.md', 'Routers', 'Aggregated List'] -- ['routers_delete.md', 'Routers', 'Delete'] -- ['routers_get.md', 'Routers', 'Get'] -- ['routers_get-nat-mapping-info.md', 'Routers', 'Get Nat Mapping Info'] -- ['routers_get-router-status.md', 'Routers', 'Get Router Status'] -- ['routers_insert.md', 'Routers', 'Insert'] -- ['routers_list.md', 'Routers', 'List'] -- ['routers_patch.md', 'Routers', 'Patch'] -- ['routers_preview.md', 'Routers', 'Preview'] -- ['routers_update.md', 'Routers', 'Update'] -- ['routes_delete.md', 'Routes', 'Delete'] -- ['routes_get.md', 'Routes', 'Get'] -- ['routes_insert.md', 'Routes', 'Insert'] -- ['routes_list.md', 'Routes', 'List'] -- ['security-policies_add-rule.md', 'Security Policies', 'Add Rule'] -- ['security-policies_delete.md', 'Security Policies', 'Delete'] -- ['security-policies_get.md', 'Security Policies', 'Get'] -- ['security-policies_get-rule.md', 'Security Policies', 'Get Rule'] -- ['security-policies_insert.md', 'Security Policies', 'Insert'] -- ['security-policies_list.md', 'Security Policies', 'List'] -- ['security-policies_list-preconfigured-expression-sets.md', 'Security Policies', 'List Preconfigured Expression Sets'] -- ['security-policies_patch.md', 'Security Policies', 'Patch'] -- ['security-policies_patch-rule.md', 'Security Policies', 'Patch Rule'] -- ['security-policies_remove-rule.md', 'Security Policies', 'Remove Rule'] -- ['service-attachments_aggregated-list.md', 'Service Attachments', 'Aggregated List'] -- ['service-attachments_delete.md', 'Service Attachments', 'Delete'] -- ['service-attachments_get.md', 'Service Attachments', 'Get'] -- ['service-attachments_get-iam-policy.md', 'Service Attachments', 'Get Iam Policy'] -- ['service-attachments_insert.md', 'Service Attachments', 'Insert'] -- ['service-attachments_list.md', 'Service Attachments', 'List'] -- ['service-attachments_patch.md', 'Service Attachments', 'Patch'] -- ['service-attachments_set-iam-policy.md', 'Service Attachments', 'Set Iam Policy'] -- ['service-attachments_test-iam-permissions.md', 'Service Attachments', 'Test Iam Permissions'] -- ['snapshots_delete.md', 'Snapshots', 'Delete'] -- ['snapshots_get.md', 'Snapshots', 'Get'] -- ['snapshots_get-iam-policy.md', 'Snapshots', 'Get Iam Policy'] -- ['snapshots_insert.md', 'Snapshots', 'Insert'] -- ['snapshots_list.md', 'Snapshots', 'List'] -- ['snapshots_set-iam-policy.md', 'Snapshots', 'Set Iam Policy'] -- ['snapshots_set-labels.md', 'Snapshots', 'Set Labels'] -- ['snapshots_test-iam-permissions.md', 'Snapshots', 'Test Iam Permissions'] -- ['ssl-certificates_aggregated-list.md', 'Ssl Certificates', 'Aggregated List'] -- ['ssl-certificates_delete.md', 'Ssl Certificates', 'Delete'] -- ['ssl-certificates_get.md', 'Ssl Certificates', 'Get'] -- ['ssl-certificates_insert.md', 'Ssl Certificates', 'Insert'] -- ['ssl-certificates_list.md', 'Ssl Certificates', 'List'] -- ['ssl-policies_delete.md', 'Ssl Policies', 'Delete'] -- ['ssl-policies_get.md', 'Ssl Policies', 'Get'] -- ['ssl-policies_insert.md', 'Ssl Policies', 'Insert'] -- ['ssl-policies_list.md', 'Ssl Policies', 'List'] -- ['ssl-policies_list-available-features.md', 'Ssl Policies', 'List Available Features'] -- ['ssl-policies_patch.md', 'Ssl Policies', 'Patch'] -- ['subnetworks_aggregated-list.md', 'Subnetworks', 'Aggregated List'] -- ['subnetworks_delete.md', 'Subnetworks', 'Delete'] -- ['subnetworks_expand-ip-cidr-range.md', 'Subnetworks', 'Expand Ip Cidr Range'] -- ['subnetworks_get.md', 'Subnetworks', 'Get'] -- ['subnetworks_get-iam-policy.md', 'Subnetworks', 'Get Iam Policy'] -- ['subnetworks_insert.md', 'Subnetworks', 'Insert'] -- ['subnetworks_list.md', 'Subnetworks', 'List'] -- ['subnetworks_list-usable.md', 'Subnetworks', 'List Usable'] -- ['subnetworks_patch.md', 'Subnetworks', 'Patch'] -- ['subnetworks_set-iam-policy.md', 'Subnetworks', 'Set Iam Policy'] -- ['subnetworks_set-private-ip-google-access.md', 'Subnetworks', 'Set Private Ip Google Access'] -- ['subnetworks_test-iam-permissions.md', 'Subnetworks', 'Test Iam Permissions'] -- ['target-grpc-proxies_delete.md', 'Target Grpc Proxies', 'Delete'] -- ['target-grpc-proxies_get.md', 'Target Grpc Proxies', 'Get'] -- ['target-grpc-proxies_insert.md', 'Target Grpc Proxies', 'Insert'] -- ['target-grpc-proxies_list.md', 'Target Grpc Proxies', 'List'] -- ['target-grpc-proxies_patch.md', 'Target Grpc Proxies', 'Patch'] -- ['target-http-proxies_aggregated-list.md', 'Target Http Proxies', 'Aggregated List'] -- ['target-http-proxies_delete.md', 'Target Http Proxies', 'Delete'] -- ['target-http-proxies_get.md', 'Target Http Proxies', 'Get'] -- ['target-http-proxies_insert.md', 'Target Http Proxies', 'Insert'] -- ['target-http-proxies_list.md', 'Target Http Proxies', 'List'] -- ['target-http-proxies_patch.md', 'Target Http Proxies', 'Patch'] -- ['target-http-proxies_set-url-map.md', 'Target Http Proxies', 'Set Url Map'] -- ['target-https-proxies_aggregated-list.md', 'Target Https Proxies', 'Aggregated List'] -- ['target-https-proxies_delete.md', 'Target Https Proxies', 'Delete'] -- ['target-https-proxies_get.md', 'Target Https Proxies', 'Get'] -- ['target-https-proxies_insert.md', 'Target Https Proxies', 'Insert'] -- ['target-https-proxies_list.md', 'Target Https Proxies', 'List'] -- ['target-https-proxies_patch.md', 'Target Https Proxies', 'Patch'] -- ['target-https-proxies_set-quic-override.md', 'Target Https Proxies', 'Set Quic Override'] -- ['target-https-proxies_set-ssl-certificates.md', 'Target Https Proxies', 'Set Ssl Certificates'] -- ['target-https-proxies_set-ssl-policy.md', 'Target Https Proxies', 'Set Ssl Policy'] -- ['target-https-proxies_set-url-map.md', 'Target Https Proxies', 'Set Url Map'] -- ['target-instances_aggregated-list.md', 'Target Instances', 'Aggregated List'] -- ['target-instances_delete.md', 'Target Instances', 'Delete'] -- ['target-instances_get.md', 'Target Instances', 'Get'] -- ['target-instances_insert.md', 'Target Instances', 'Insert'] -- ['target-instances_list.md', 'Target Instances', 'List'] -- ['target-pools_add-health-check.md', 'Target Pools', 'Add Health Check'] -- ['target-pools_add-instance.md', 'Target Pools', 'Add Instance'] -- ['target-pools_aggregated-list.md', 'Target Pools', 'Aggregated List'] -- ['target-pools_delete.md', 'Target Pools', 'Delete'] -- ['target-pools_get.md', 'Target Pools', 'Get'] -- ['target-pools_get-health.md', 'Target Pools', 'Get Health'] -- ['target-pools_insert.md', 'Target Pools', 'Insert'] -- ['target-pools_list.md', 'Target Pools', 'List'] -- ['target-pools_remove-health-check.md', 'Target Pools', 'Remove Health Check'] -- ['target-pools_remove-instance.md', 'Target Pools', 'Remove Instance'] -- ['target-pools_set-backup.md', 'Target Pools', 'Set Backup'] -- ['target-ssl-proxies_delete.md', 'Target Ssl Proxies', 'Delete'] -- ['target-ssl-proxies_get.md', 'Target Ssl Proxies', 'Get'] -- ['target-ssl-proxies_insert.md', 'Target Ssl Proxies', 'Insert'] -- ['target-ssl-proxies_list.md', 'Target Ssl Proxies', 'List'] -- ['target-ssl-proxies_set-backend-service.md', 'Target Ssl Proxies', 'Set Backend Service'] -- ['target-ssl-proxies_set-proxy-header.md', 'Target Ssl Proxies', 'Set Proxy Header'] -- ['target-ssl-proxies_set-ssl-certificates.md', 'Target Ssl Proxies', 'Set Ssl Certificates'] -- ['target-ssl-proxies_set-ssl-policy.md', 'Target Ssl Proxies', 'Set Ssl Policy'] -- ['target-tcp-proxies_delete.md', 'Target Tcp Proxies', 'Delete'] -- ['target-tcp-proxies_get.md', 'Target Tcp Proxies', 'Get'] -- ['target-tcp-proxies_insert.md', 'Target Tcp Proxies', 'Insert'] -- ['target-tcp-proxies_list.md', 'Target Tcp Proxies', 'List'] -- ['target-tcp-proxies_set-backend-service.md', 'Target Tcp Proxies', 'Set Backend Service'] -- ['target-tcp-proxies_set-proxy-header.md', 'Target Tcp Proxies', 'Set Proxy Header'] -- ['target-vpn-gateways_aggregated-list.md', 'Target Vpn Gateways', 'Aggregated List'] -- ['target-vpn-gateways_delete.md', 'Target Vpn Gateways', 'Delete'] -- ['target-vpn-gateways_get.md', 'Target Vpn Gateways', 'Get'] -- ['target-vpn-gateways_insert.md', 'Target Vpn Gateways', 'Insert'] -- ['target-vpn-gateways_list.md', 'Target Vpn Gateways', 'List'] -- ['url-maps_aggregated-list.md', 'Url Maps', 'Aggregated List'] -- ['url-maps_delete.md', 'Url Maps', 'Delete'] -- ['url-maps_get.md', 'Url Maps', 'Get'] -- ['url-maps_insert.md', 'Url Maps', 'Insert'] -- ['url-maps_invalidate-cache.md', 'Url Maps', 'Invalidate Cache'] -- ['url-maps_list.md', 'Url Maps', 'List'] -- ['url-maps_patch.md', 'Url Maps', 'Patch'] -- ['url-maps_update.md', 'Url Maps', 'Update'] -- ['url-maps_validate.md', 'Url Maps', 'Validate'] -- ['vpn-gateways_aggregated-list.md', 'Vpn Gateways', 'Aggregated List'] -- ['vpn-gateways_delete.md', 'Vpn Gateways', 'Delete'] -- ['vpn-gateways_get.md', 'Vpn Gateways', 'Get'] -- ['vpn-gateways_get-status.md', 'Vpn Gateways', 'Get Status'] -- ['vpn-gateways_insert.md', 'Vpn Gateways', 'Insert'] -- ['vpn-gateways_list.md', 'Vpn Gateways', 'List'] -- ['vpn-gateways_set-labels.md', 'Vpn Gateways', 'Set Labels'] -- ['vpn-gateways_test-iam-permissions.md', 'Vpn Gateways', 'Test Iam Permissions'] -- ['vpn-tunnels_aggregated-list.md', 'Vpn Tunnels', 'Aggregated List'] -- ['vpn-tunnels_delete.md', 'Vpn Tunnels', 'Delete'] -- ['vpn-tunnels_get.md', 'Vpn Tunnels', 'Get'] -- ['vpn-tunnels_insert.md', 'Vpn Tunnels', 'Insert'] -- ['vpn-tunnels_list.md', 'Vpn Tunnels', 'List'] -- ['zone-operations_delete.md', 'Zone Operations', 'Delete'] -- ['zone-operations_get.md', 'Zone Operations', 'Get'] -- ['zone-operations_list.md', 'Zone Operations', 'List'] -- ['zone-operations_wait.md', 'Zone Operations', 'Wait'] -- ['zones_get.md', 'Zones', 'Get'] -- ['zones_list.md', 'Zones', 'List'] +nav: +- Home: 'index.md' +- 'Accelerator Types': + - 'Aggregated List': 'accelerator-types_aggregated-list.md' + - 'Get': 'accelerator-types_get.md' + - 'List': 'accelerator-types_list.md' +- 'Addresses': + - 'Aggregated List': 'addresses_aggregated-list.md' + - 'Delete': 'addresses_delete.md' + - 'Get': 'addresses_get.md' + - 'Insert': 'addresses_insert.md' + - 'List': 'addresses_list.md' + - 'Set Labels': 'addresses_set-labels.md' +- 'Autoscalers': + - 'Aggregated List': 'autoscalers_aggregated-list.md' + - 'Delete': 'autoscalers_delete.md' + - 'Get': 'autoscalers_get.md' + - 'Insert': 'autoscalers_insert.md' + - 'List': 'autoscalers_list.md' + - 'Patch': 'autoscalers_patch.md' + - 'Update': 'autoscalers_update.md' +- 'Backend Buckets': + - 'Add Signed Url Key': 'backend-buckets_add-signed-url-key.md' + - 'Delete': 'backend-buckets_delete.md' + - 'Delete Signed Url Key': 'backend-buckets_delete-signed-url-key.md' + - 'Get': 'backend-buckets_get.md' + - 'Insert': 'backend-buckets_insert.md' + - 'List': 'backend-buckets_list.md' + - 'Patch': 'backend-buckets_patch.md' + - 'Set Edge Security Policy': 'backend-buckets_set-edge-security-policy.md' + - 'Update': 'backend-buckets_update.md' +- 'Backend Services': + - 'Add Signed Url Key': 'backend-services_add-signed-url-key.md' + - 'Aggregated List': 'backend-services_aggregated-list.md' + - 'Delete': 'backend-services_delete.md' + - 'Delete Signed Url Key': 'backend-services_delete-signed-url-key.md' + - 'Get': 'backend-services_get.md' + - 'Get Health': 'backend-services_get-health.md' + - 'Get Iam Policy': 'backend-services_get-iam-policy.md' + - 'Insert': 'backend-services_insert.md' + - 'List': 'backend-services_list.md' + - 'Patch': 'backend-services_patch.md' + - 'Set Edge Security Policy': 'backend-services_set-edge-security-policy.md' + - 'Set Iam Policy': 'backend-services_set-iam-policy.md' + - 'Set Security Policy': 'backend-services_set-security-policy.md' + - 'Update': 'backend-services_update.md' +- 'Disk Types': + - 'Aggregated List': 'disk-types_aggregated-list.md' + - 'Get': 'disk-types_get.md' + - 'List': 'disk-types_list.md' +- 'Disks': + - 'Add Resource Policies': 'disks_add-resource-policies.md' + - 'Aggregated List': 'disks_aggregated-list.md' + - 'Create Snapshot': 'disks_create-snapshot.md' + - 'Delete': 'disks_delete.md' + - 'Get': 'disks_get.md' + - 'Get Iam Policy': 'disks_get-iam-policy.md' + - 'Insert': 'disks_insert.md' + - 'List': 'disks_list.md' + - 'Remove Resource Policies': 'disks_remove-resource-policies.md' + - 'Resize': 'disks_resize.md' + - 'Set Iam Policy': 'disks_set-iam-policy.md' + - 'Set Labels': 'disks_set-labels.md' + - 'Test Iam Permissions': 'disks_test-iam-permissions.md' +- 'External Vpn Gateways': + - 'Delete': 'external-vpn-gateways_delete.md' + - 'Get': 'external-vpn-gateways_get.md' + - 'Insert': 'external-vpn-gateways_insert.md' + - 'List': 'external-vpn-gateways_list.md' + - 'Set Labels': 'external-vpn-gateways_set-labels.md' + - 'Test Iam Permissions': 'external-vpn-gateways_test-iam-permissions.md' +- 'Firewall Policies': + - 'Add Association': 'firewall-policies_add-association.md' + - 'Add Rule': 'firewall-policies_add-rule.md' + - 'Clone Rules': 'firewall-policies_clone-rules.md' + - 'Delete': 'firewall-policies_delete.md' + - 'Get': 'firewall-policies_get.md' + - 'Get Association': 'firewall-policies_get-association.md' + - 'Get Iam Policy': 'firewall-policies_get-iam-policy.md' + - 'Get Rule': 'firewall-policies_get-rule.md' + - 'Insert': 'firewall-policies_insert.md' + - 'List': 'firewall-policies_list.md' + - 'List Associations': 'firewall-policies_list-associations.md' + - 'Move': 'firewall-policies_move.md' + - 'Patch': 'firewall-policies_patch.md' + - 'Patch Rule': 'firewall-policies_patch-rule.md' + - 'Remove Association': 'firewall-policies_remove-association.md' + - 'Remove Rule': 'firewall-policies_remove-rule.md' + - 'Set Iam Policy': 'firewall-policies_set-iam-policy.md' + - 'Test Iam Permissions': 'firewall-policies_test-iam-permissions.md' +- 'Firewalls': + - 'Delete': 'firewalls_delete.md' + - 'Get': 'firewalls_get.md' + - 'Insert': 'firewalls_insert.md' + - 'List': 'firewalls_list.md' + - 'Patch': 'firewalls_patch.md' + - 'Update': 'firewalls_update.md' +- 'Forwarding Rules': + - 'Aggregated List': 'forwarding-rules_aggregated-list.md' + - 'Delete': 'forwarding-rules_delete.md' + - 'Get': 'forwarding-rules_get.md' + - 'Insert': 'forwarding-rules_insert.md' + - 'List': 'forwarding-rules_list.md' + - 'Patch': 'forwarding-rules_patch.md' + - 'Set Labels': 'forwarding-rules_set-labels.md' + - 'Set Target': 'forwarding-rules_set-target.md' +- 'Global Addresses': + - 'Delete': 'global-addresses_delete.md' + - 'Get': 'global-addresses_get.md' + - 'Insert': 'global-addresses_insert.md' + - 'List': 'global-addresses_list.md' + - 'Set Labels': 'global-addresses_set-labels.md' +- 'Global Forwarding Rules': + - 'Delete': 'global-forwarding-rules_delete.md' + - 'Get': 'global-forwarding-rules_get.md' + - 'Insert': 'global-forwarding-rules_insert.md' + - 'List': 'global-forwarding-rules_list.md' + - 'Patch': 'global-forwarding-rules_patch.md' + - 'Set Labels': 'global-forwarding-rules_set-labels.md' + - 'Set Target': 'global-forwarding-rules_set-target.md' +- 'Global Network Endpoint Groups': + - 'Attach Network Endpoints': 'global-network-endpoint-groups_attach-network-endpoints.md' + - 'Delete': 'global-network-endpoint-groups_delete.md' + - 'Detach Network Endpoints': 'global-network-endpoint-groups_detach-network-endpoints.md' + - 'Get': 'global-network-endpoint-groups_get.md' + - 'Insert': 'global-network-endpoint-groups_insert.md' + - 'List': 'global-network-endpoint-groups_list.md' + - 'List Network Endpoints': 'global-network-endpoint-groups_list-network-endpoints.md' +- 'Global Operations': + - 'Aggregated List': 'global-operations_aggregated-list.md' + - 'Delete': 'global-operations_delete.md' + - 'Get': 'global-operations_get.md' + - 'List': 'global-operations_list.md' + - 'Wait': 'global-operations_wait.md' +- 'Global Organization Operations': + - 'Delete': 'global-organization-operations_delete.md' + - 'Get': 'global-organization-operations_get.md' + - 'List': 'global-organization-operations_list.md' +- 'Global Public Delegated Prefixes': + - 'Delete': 'global-public-delegated-prefixes_delete.md' + - 'Get': 'global-public-delegated-prefixes_get.md' + - 'Insert': 'global-public-delegated-prefixes_insert.md' + - 'List': 'global-public-delegated-prefixes_list.md' + - 'Patch': 'global-public-delegated-prefixes_patch.md' +- 'Health Checks': + - 'Aggregated List': 'health-checks_aggregated-list.md' + - 'Delete': 'health-checks_delete.md' + - 'Get': 'health-checks_get.md' + - 'Insert': 'health-checks_insert.md' + - 'List': 'health-checks_list.md' + - 'Patch': 'health-checks_patch.md' + - 'Update': 'health-checks_update.md' +- 'Http Health Checks': + - 'Delete': 'http-health-checks_delete.md' + - 'Get': 'http-health-checks_get.md' + - 'Insert': 'http-health-checks_insert.md' + - 'List': 'http-health-checks_list.md' + - 'Patch': 'http-health-checks_patch.md' + - 'Update': 'http-health-checks_update.md' +- 'Https Health Checks': + - 'Delete': 'https-health-checks_delete.md' + - 'Get': 'https-health-checks_get.md' + - 'Insert': 'https-health-checks_insert.md' + - 'List': 'https-health-checks_list.md' + - 'Patch': 'https-health-checks_patch.md' + - 'Update': 'https-health-checks_update.md' +- 'Image Family Views': + - 'Get': 'image-family-views_get.md' +- 'Images': + - 'Delete': 'images_delete.md' + - 'Deprecate': 'images_deprecate.md' + - 'Get': 'images_get.md' + - 'Get From Family': 'images_get-from-family.md' + - 'Get Iam Policy': 'images_get-iam-policy.md' + - 'Insert': 'images_insert.md' + - 'List': 'images_list.md' + - 'Patch': 'images_patch.md' + - 'Set Iam Policy': 'images_set-iam-policy.md' + - 'Set Labels': 'images_set-labels.md' + - 'Test Iam Permissions': 'images_test-iam-permissions.md' +- 'Instance Group Managers': + - 'Abandon Instances': 'instance-group-managers_abandon-instances.md' + - 'Aggregated List': 'instance-group-managers_aggregated-list.md' + - 'Apply Updates To Instances': 'instance-group-managers_apply-updates-to-instances.md' + - 'Create Instances': 'instance-group-managers_create-instances.md' + - 'Delete': 'instance-group-managers_delete.md' + - 'Delete Instances': 'instance-group-managers_delete-instances.md' + - 'Delete Per Instance Configs': 'instance-group-managers_delete-per-instance-configs.md' + - 'Get': 'instance-group-managers_get.md' + - 'Insert': 'instance-group-managers_insert.md' + - 'List': 'instance-group-managers_list.md' + - 'List Errors': 'instance-group-managers_list-errors.md' + - 'List Managed Instances': 'instance-group-managers_list-managed-instances.md' + - 'List Per Instance Configs': 'instance-group-managers_list-per-instance-configs.md' + - 'Patch': 'instance-group-managers_patch.md' + - 'Patch Per Instance Configs': 'instance-group-managers_patch-per-instance-configs.md' + - 'Recreate Instances': 'instance-group-managers_recreate-instances.md' + - 'Resize': 'instance-group-managers_resize.md' + - 'Set Instance Template': 'instance-group-managers_set-instance-template.md' + - 'Set Target Pools': 'instance-group-managers_set-target-pools.md' + - 'Update Per Instance Configs': 'instance-group-managers_update-per-instance-configs.md' +- 'Instance Groups': + - 'Add Instances': 'instance-groups_add-instances.md' + - 'Aggregated List': 'instance-groups_aggregated-list.md' + - 'Delete': 'instance-groups_delete.md' + - 'Get': 'instance-groups_get.md' + - 'Insert': 'instance-groups_insert.md' + - 'List': 'instance-groups_list.md' + - 'List Instances': 'instance-groups_list-instances.md' + - 'Remove Instances': 'instance-groups_remove-instances.md' + - 'Set Named Ports': 'instance-groups_set-named-ports.md' +- 'Instance Templates': + - 'Delete': 'instance-templates_delete.md' + - 'Get': 'instance-templates_get.md' + - 'Get Iam Policy': 'instance-templates_get-iam-policy.md' + - 'Insert': 'instance-templates_insert.md' + - 'List': 'instance-templates_list.md' + - 'Set Iam Policy': 'instance-templates_set-iam-policy.md' + - 'Test Iam Permissions': 'instance-templates_test-iam-permissions.md' +- 'Instances': + - 'Add Access Config': 'instances_add-access-config.md' + - 'Add Resource Policies': 'instances_add-resource-policies.md' + - 'Aggregated List': 'instances_aggregated-list.md' + - 'Attach Disk': 'instances_attach-disk.md' + - 'Bulk Insert': 'instances_bulk-insert.md' + - 'Delete': 'instances_delete.md' + - 'Delete Access Config': 'instances_delete-access-config.md' + - 'Detach Disk': 'instances_detach-disk.md' + - 'Get': 'instances_get.md' + - 'Get Effective Firewalls': 'instances_get-effective-firewalls.md' + - 'Get Guest Attributes': 'instances_get-guest-attributes.md' + - 'Get Iam Policy': 'instances_get-iam-policy.md' + - 'Get Screenshot': 'instances_get-screenshot.md' + - 'Get Serial Port Output': 'instances_get-serial-port-output.md' + - 'Get Shielded Instance Identity': 'instances_get-shielded-instance-identity.md' + - 'Insert': 'instances_insert.md' + - 'List': 'instances_list.md' + - 'List Referrers': 'instances_list-referrers.md' + - 'Remove Resource Policies': 'instances_remove-resource-policies.md' + - 'Reset': 'instances_reset.md' + - 'Resume': 'instances_resume.md' + - 'Send Diagnostic Interrupt': 'instances_send-diagnostic-interrupt.md' + - 'Set Deletion Protection': 'instances_set-deletion-protection.md' + - 'Set Disk Auto Delete': 'instances_set-disk-auto-delete.md' + - 'Set Iam Policy': 'instances_set-iam-policy.md' + - 'Set Labels': 'instances_set-labels.md' + - 'Set Machine Resources': 'instances_set-machine-resources.md' + - 'Set Machine Type': 'instances_set-machine-type.md' + - 'Set Metadata': 'instances_set-metadata.md' + - 'Set Min Cpu Platform': 'instances_set-min-cpu-platform.md' + - 'Set Scheduling': 'instances_set-scheduling.md' + - 'Set Service Account': 'instances_set-service-account.md' + - 'Set Shielded Instance Integrity Policy': 'instances_set-shielded-instance-integrity-policy.md' + - 'Set Tags': 'instances_set-tags.md' + - 'Simulate Maintenance Event': 'instances_simulate-maintenance-event.md' + - 'Start': 'instances_start.md' + - 'Start With Encryption Key': 'instances_start-with-encryption-key.md' + - 'Stop': 'instances_stop.md' + - 'Suspend': 'instances_suspend.md' + - 'Test Iam Permissions': 'instances_test-iam-permissions.md' + - 'Update': 'instances_update.md' + - 'Update Access Config': 'instances_update-access-config.md' + - 'Update Display Device': 'instances_update-display-device.md' + - 'Update Network Interface': 'instances_update-network-interface.md' + - 'Update Shielded Instance Config': 'instances_update-shielded-instance-config.md' +- 'Interconnect Attachments': + - 'Aggregated List': 'interconnect-attachments_aggregated-list.md' + - 'Delete': 'interconnect-attachments_delete.md' + - 'Get': 'interconnect-attachments_get.md' + - 'Insert': 'interconnect-attachments_insert.md' + - 'List': 'interconnect-attachments_list.md' + - 'Patch': 'interconnect-attachments_patch.md' + - 'Set Labels': 'interconnect-attachments_set-labels.md' +- 'Interconnect Locations': + - 'Get': 'interconnect-locations_get.md' + - 'List': 'interconnect-locations_list.md' +- 'Interconnects': + - 'Delete': 'interconnects_delete.md' + - 'Get': 'interconnects_get.md' + - 'Get Diagnostics': 'interconnects_get-diagnostics.md' + - 'Insert': 'interconnects_insert.md' + - 'List': 'interconnects_list.md' + - 'Patch': 'interconnects_patch.md' + - 'Set Labels': 'interconnects_set-labels.md' +- 'License Codes': + - 'Get': 'license-codes_get.md' + - 'Test Iam Permissions': 'license-codes_test-iam-permissions.md' +- 'Licenses': + - 'Delete': 'licenses_delete.md' + - 'Get': 'licenses_get.md' + - 'Get Iam Policy': 'licenses_get-iam-policy.md' + - 'Insert': 'licenses_insert.md' + - 'List': 'licenses_list.md' + - 'Set Iam Policy': 'licenses_set-iam-policy.md' + - 'Test Iam Permissions': 'licenses_test-iam-permissions.md' +- 'Machine Images': + - 'Delete': 'machine-images_delete.md' + - 'Get': 'machine-images_get.md' + - 'Get Iam Policy': 'machine-images_get-iam-policy.md' + - 'Insert': 'machine-images_insert.md' + - 'List': 'machine-images_list.md' + - 'Set Iam Policy': 'machine-images_set-iam-policy.md' + - 'Test Iam Permissions': 'machine-images_test-iam-permissions.md' +- 'Machine Types': + - 'Aggregated List': 'machine-types_aggregated-list.md' + - 'Get': 'machine-types_get.md' + - 'List': 'machine-types_list.md' +- 'Network Attachments': + - 'Aggregated List': 'network-attachments_aggregated-list.md' + - 'Delete': 'network-attachments_delete.md' + - 'Get': 'network-attachments_get.md' + - 'Get Iam Policy': 'network-attachments_get-iam-policy.md' + - 'Insert': 'network-attachments_insert.md' + - 'List': 'network-attachments_list.md' + - 'Set Iam Policy': 'network-attachments_set-iam-policy.md' + - 'Test Iam Permissions': 'network-attachments_test-iam-permissions.md' +- 'Network Edge Security Services': + - 'Aggregated List': 'network-edge-security-services_aggregated-list.md' + - 'Delete': 'network-edge-security-services_delete.md' + - 'Get': 'network-edge-security-services_get.md' + - 'Insert': 'network-edge-security-services_insert.md' + - 'Patch': 'network-edge-security-services_patch.md' +- 'Network Endpoint Groups': + - 'Aggregated List': 'network-endpoint-groups_aggregated-list.md' + - 'Attach Network Endpoints': 'network-endpoint-groups_attach-network-endpoints.md' + - 'Delete': 'network-endpoint-groups_delete.md' + - 'Detach Network Endpoints': 'network-endpoint-groups_detach-network-endpoints.md' + - 'Get': 'network-endpoint-groups_get.md' + - 'Insert': 'network-endpoint-groups_insert.md' + - 'List': 'network-endpoint-groups_list.md' + - 'List Network Endpoints': 'network-endpoint-groups_list-network-endpoints.md' + - 'Test Iam Permissions': 'network-endpoint-groups_test-iam-permissions.md' +- 'Network Firewall Policies': + - 'Add Association': 'network-firewall-policies_add-association.md' + - 'Add Rule': 'network-firewall-policies_add-rule.md' + - 'Clone Rules': 'network-firewall-policies_clone-rules.md' + - 'Delete': 'network-firewall-policies_delete.md' + - 'Get': 'network-firewall-policies_get.md' + - 'Get Association': 'network-firewall-policies_get-association.md' + - 'Get Iam Policy': 'network-firewall-policies_get-iam-policy.md' + - 'Get Rule': 'network-firewall-policies_get-rule.md' + - 'Insert': 'network-firewall-policies_insert.md' + - 'List': 'network-firewall-policies_list.md' + - 'Patch': 'network-firewall-policies_patch.md' + - 'Patch Rule': 'network-firewall-policies_patch-rule.md' + - 'Remove Association': 'network-firewall-policies_remove-association.md' + - 'Remove Rule': 'network-firewall-policies_remove-rule.md' + - 'Set Iam Policy': 'network-firewall-policies_set-iam-policy.md' + - 'Test Iam Permissions': 'network-firewall-policies_test-iam-permissions.md' +- 'Networks': + - 'Add Peering': 'networks_add-peering.md' + - 'Delete': 'networks_delete.md' + - 'Get': 'networks_get.md' + - 'Get Effective Firewalls': 'networks_get-effective-firewalls.md' + - 'Insert': 'networks_insert.md' + - 'List': 'networks_list.md' + - 'List Peering Routes': 'networks_list-peering-routes.md' + - 'Patch': 'networks_patch.md' + - 'Remove Peering': 'networks_remove-peering.md' + - 'Switch To Custom Mode': 'networks_switch-to-custom-mode.md' + - 'Update Peering': 'networks_update-peering.md' +- 'Node Groups': + - 'Add Nodes': 'node-groups_add-nodes.md' + - 'Aggregated List': 'node-groups_aggregated-list.md' + - 'Delete': 'node-groups_delete.md' + - 'Delete Nodes': 'node-groups_delete-nodes.md' + - 'Get': 'node-groups_get.md' + - 'Get Iam Policy': 'node-groups_get-iam-policy.md' + - 'Insert': 'node-groups_insert.md' + - 'List': 'node-groups_list.md' + - 'List Nodes': 'node-groups_list-nodes.md' + - 'Patch': 'node-groups_patch.md' + - 'Set Iam Policy': 'node-groups_set-iam-policy.md' + - 'Set Node Template': 'node-groups_set-node-template.md' + - 'Test Iam Permissions': 'node-groups_test-iam-permissions.md' +- 'Node Templates': + - 'Aggregated List': 'node-templates_aggregated-list.md' + - 'Delete': 'node-templates_delete.md' + - 'Get': 'node-templates_get.md' + - 'Get Iam Policy': 'node-templates_get-iam-policy.md' + - 'Insert': 'node-templates_insert.md' + - 'List': 'node-templates_list.md' + - 'Set Iam Policy': 'node-templates_set-iam-policy.md' + - 'Test Iam Permissions': 'node-templates_test-iam-permissions.md' +- 'Node Types': + - 'Aggregated List': 'node-types_aggregated-list.md' + - 'Get': 'node-types_get.md' + - 'List': 'node-types_list.md' +- 'Packet Mirrorings': + - 'Aggregated List': 'packet-mirrorings_aggregated-list.md' + - 'Delete': 'packet-mirrorings_delete.md' + - 'Get': 'packet-mirrorings_get.md' + - 'Insert': 'packet-mirrorings_insert.md' + - 'List': 'packet-mirrorings_list.md' + - 'Patch': 'packet-mirrorings_patch.md' + - 'Test Iam Permissions': 'packet-mirrorings_test-iam-permissions.md' +- 'Projects': + - 'Disable Xpn Host': 'projects_disable-xpn-host.md' + - 'Disable Xpn Resource': 'projects_disable-xpn-resource.md' + - 'Enable Xpn Host': 'projects_enable-xpn-host.md' + - 'Enable Xpn Resource': 'projects_enable-xpn-resource.md' + - 'Get': 'projects_get.md' + - 'Get Xpn Host': 'projects_get-xpn-host.md' + - 'Get Xpn Resources': 'projects_get-xpn-resources.md' + - 'List Xpn Hosts': 'projects_list-xpn-hosts.md' + - 'Move Disk': 'projects_move-disk.md' + - 'Move Instance': 'projects_move-instance.md' + - 'Set Common Instance Metadata': 'projects_set-common-instance-metadata.md' + - 'Set Default Network Tier': 'projects_set-default-network-tier.md' + - 'Set Usage Export Bucket': 'projects_set-usage-export-bucket.md' +- 'Public Advertised Prefixes': + - 'Delete': 'public-advertised-prefixes_delete.md' + - 'Get': 'public-advertised-prefixes_get.md' + - 'Insert': 'public-advertised-prefixes_insert.md' + - 'List': 'public-advertised-prefixes_list.md' + - 'Patch': 'public-advertised-prefixes_patch.md' +- 'Public Delegated Prefixes': + - 'Aggregated List': 'public-delegated-prefixes_aggregated-list.md' + - 'Delete': 'public-delegated-prefixes_delete.md' + - 'Get': 'public-delegated-prefixes_get.md' + - 'Insert': 'public-delegated-prefixes_insert.md' + - 'List': 'public-delegated-prefixes_list.md' + - 'Patch': 'public-delegated-prefixes_patch.md' +- 'Region Autoscalers': + - 'Delete': 'region-autoscalers_delete.md' + - 'Get': 'region-autoscalers_get.md' + - 'Insert': 'region-autoscalers_insert.md' + - 'List': 'region-autoscalers_list.md' + - 'Patch': 'region-autoscalers_patch.md' + - 'Update': 'region-autoscalers_update.md' +- 'Region Backend Services': + - 'Delete': 'region-backend-services_delete.md' + - 'Get': 'region-backend-services_get.md' + - 'Get Health': 'region-backend-services_get-health.md' + - 'Get Iam Policy': 'region-backend-services_get-iam-policy.md' + - 'Insert': 'region-backend-services_insert.md' + - 'List': 'region-backend-services_list.md' + - 'Patch': 'region-backend-services_patch.md' + - 'Set Iam Policy': 'region-backend-services_set-iam-policy.md' + - 'Update': 'region-backend-services_update.md' +- 'Region Commitments': + - 'Aggregated List': 'region-commitments_aggregated-list.md' + - 'Get': 'region-commitments_get.md' + - 'Insert': 'region-commitments_insert.md' + - 'List': 'region-commitments_list.md' + - 'Update': 'region-commitments_update.md' +- 'Region Disk Types': + - 'Get': 'region-disk-types_get.md' + - 'List': 'region-disk-types_list.md' +- 'Region Disks': + - 'Add Resource Policies': 'region-disks_add-resource-policies.md' + - 'Create Snapshot': 'region-disks_create-snapshot.md' + - 'Delete': 'region-disks_delete.md' + - 'Get': 'region-disks_get.md' + - 'Get Iam Policy': 'region-disks_get-iam-policy.md' + - 'Insert': 'region-disks_insert.md' + - 'List': 'region-disks_list.md' + - 'Remove Resource Policies': 'region-disks_remove-resource-policies.md' + - 'Resize': 'region-disks_resize.md' + - 'Set Iam Policy': 'region-disks_set-iam-policy.md' + - 'Set Labels': 'region-disks_set-labels.md' + - 'Test Iam Permissions': 'region-disks_test-iam-permissions.md' +- 'Region Health Check Services': + - 'Delete': 'region-health-check-services_delete.md' + - 'Get': 'region-health-check-services_get.md' + - 'Insert': 'region-health-check-services_insert.md' + - 'List': 'region-health-check-services_list.md' + - 'Patch': 'region-health-check-services_patch.md' +- 'Region Health Checks': + - 'Delete': 'region-health-checks_delete.md' + - 'Get': 'region-health-checks_get.md' + - 'Insert': 'region-health-checks_insert.md' + - 'List': 'region-health-checks_list.md' + - 'Patch': 'region-health-checks_patch.md' + - 'Update': 'region-health-checks_update.md' +- 'Region Instance Group Managers': + - 'Abandon Instances': 'region-instance-group-managers_abandon-instances.md' + - 'Apply Updates To Instances': 'region-instance-group-managers_apply-updates-to-instances.md' + - 'Create Instances': 'region-instance-group-managers_create-instances.md' + - 'Delete': 'region-instance-group-managers_delete.md' + - 'Delete Instances': 'region-instance-group-managers_delete-instances.md' + - 'Delete Per Instance Configs': 'region-instance-group-managers_delete-per-instance-configs.md' + - 'Get': 'region-instance-group-managers_get.md' + - 'Insert': 'region-instance-group-managers_insert.md' + - 'List': 'region-instance-group-managers_list.md' + - 'List Errors': 'region-instance-group-managers_list-errors.md' + - 'List Managed Instances': 'region-instance-group-managers_list-managed-instances.md' + - 'List Per Instance Configs': 'region-instance-group-managers_list-per-instance-configs.md' + - 'Patch': 'region-instance-group-managers_patch.md' + - 'Patch Per Instance Configs': 'region-instance-group-managers_patch-per-instance-configs.md' + - 'Recreate Instances': 'region-instance-group-managers_recreate-instances.md' + - 'Resize': 'region-instance-group-managers_resize.md' + - 'Set Instance Template': 'region-instance-group-managers_set-instance-template.md' + - 'Set Target Pools': 'region-instance-group-managers_set-target-pools.md' + - 'Update Per Instance Configs': 'region-instance-group-managers_update-per-instance-configs.md' +- 'Region Instance Groups': + - 'Get': 'region-instance-groups_get.md' + - 'List': 'region-instance-groups_list.md' + - 'List Instances': 'region-instance-groups_list-instances.md' + - 'Set Named Ports': 'region-instance-groups_set-named-ports.md' +- 'Region Instances': + - 'Bulk Insert': 'region-instances_bulk-insert.md' +- 'Region Network Endpoint Groups': + - 'Delete': 'region-network-endpoint-groups_delete.md' + - 'Get': 'region-network-endpoint-groups_get.md' + - 'Insert': 'region-network-endpoint-groups_insert.md' + - 'List': 'region-network-endpoint-groups_list.md' +- 'Region Network Firewall Policies': + - 'Add Association': 'region-network-firewall-policies_add-association.md' + - 'Add Rule': 'region-network-firewall-policies_add-rule.md' + - 'Clone Rules': 'region-network-firewall-policies_clone-rules.md' + - 'Delete': 'region-network-firewall-policies_delete.md' + - 'Get': 'region-network-firewall-policies_get.md' + - 'Get Association': 'region-network-firewall-policies_get-association.md' + - 'Get Effective Firewalls': 'region-network-firewall-policies_get-effective-firewalls.md' + - 'Get Iam Policy': 'region-network-firewall-policies_get-iam-policy.md' + - 'Get Rule': 'region-network-firewall-policies_get-rule.md' + - 'Insert': 'region-network-firewall-policies_insert.md' + - 'List': 'region-network-firewall-policies_list.md' + - 'Patch': 'region-network-firewall-policies_patch.md' + - 'Patch Rule': 'region-network-firewall-policies_patch-rule.md' + - 'Remove Association': 'region-network-firewall-policies_remove-association.md' + - 'Remove Rule': 'region-network-firewall-policies_remove-rule.md' + - 'Set Iam Policy': 'region-network-firewall-policies_set-iam-policy.md' + - 'Test Iam Permissions': 'region-network-firewall-policies_test-iam-permissions.md' +- 'Region Notification Endpoints': + - 'Delete': 'region-notification-endpoints_delete.md' + - 'Get': 'region-notification-endpoints_get.md' + - 'Insert': 'region-notification-endpoints_insert.md' + - 'List': 'region-notification-endpoints_list.md' +- 'Region Operations': + - 'Delete': 'region-operations_delete.md' + - 'Get': 'region-operations_get.md' + - 'List': 'region-operations_list.md' + - 'Wait': 'region-operations_wait.md' +- 'Region Security Policies': + - 'Delete': 'region-security-policies_delete.md' + - 'Get': 'region-security-policies_get.md' + - 'Insert': 'region-security-policies_insert.md' + - 'List': 'region-security-policies_list.md' + - 'Patch': 'region-security-policies_patch.md' +- 'Region Ssl Certificates': + - 'Delete': 'region-ssl-certificates_delete.md' + - 'Get': 'region-ssl-certificates_get.md' + - 'Insert': 'region-ssl-certificates_insert.md' + - 'List': 'region-ssl-certificates_list.md' +- 'Region Ssl Policies': + - 'Delete': 'region-ssl-policies_delete.md' + - 'Get': 'region-ssl-policies_get.md' + - 'Insert': 'region-ssl-policies_insert.md' + - 'List': 'region-ssl-policies_list.md' + - 'List Available Features': 'region-ssl-policies_list-available-features.md' + - 'Patch': 'region-ssl-policies_patch.md' +- 'Region Target Http Proxies': + - 'Delete': 'region-target-http-proxies_delete.md' + - 'Get': 'region-target-http-proxies_get.md' + - 'Insert': 'region-target-http-proxies_insert.md' + - 'List': 'region-target-http-proxies_list.md' + - 'Set Url Map': 'region-target-http-proxies_set-url-map.md' +- 'Region Target Https Proxies': + - 'Delete': 'region-target-https-proxies_delete.md' + - 'Get': 'region-target-https-proxies_get.md' + - 'Insert': 'region-target-https-proxies_insert.md' + - 'List': 'region-target-https-proxies_list.md' + - 'Patch': 'region-target-https-proxies_patch.md' + - 'Set Ssl Certificates': 'region-target-https-proxies_set-ssl-certificates.md' + - 'Set Url Map': 'region-target-https-proxies_set-url-map.md' +- 'Region Target Tcp Proxies': + - 'Delete': 'region-target-tcp-proxies_delete.md' + - 'Get': 'region-target-tcp-proxies_get.md' + - 'Insert': 'region-target-tcp-proxies_insert.md' + - 'List': 'region-target-tcp-proxies_list.md' +- 'Region Url Maps': + - 'Delete': 'region-url-maps_delete.md' + - 'Get': 'region-url-maps_get.md' + - 'Insert': 'region-url-maps_insert.md' + - 'List': 'region-url-maps_list.md' + - 'Patch': 'region-url-maps_patch.md' + - 'Update': 'region-url-maps_update.md' + - 'Validate': 'region-url-maps_validate.md' +- 'Regions': + - 'Get': 'regions_get.md' + - 'List': 'regions_list.md' +- 'Reservations': + - 'Aggregated List': 'reservations_aggregated-list.md' + - 'Delete': 'reservations_delete.md' + - 'Get': 'reservations_get.md' + - 'Get Iam Policy': 'reservations_get-iam-policy.md' + - 'Insert': 'reservations_insert.md' + - 'List': 'reservations_list.md' + - 'Resize': 'reservations_resize.md' + - 'Set Iam Policy': 'reservations_set-iam-policy.md' + - 'Test Iam Permissions': 'reservations_test-iam-permissions.md' + - 'Update': 'reservations_update.md' +- 'Resource Policies': + - 'Aggregated List': 'resource-policies_aggregated-list.md' + - 'Delete': 'resource-policies_delete.md' + - 'Get': 'resource-policies_get.md' + - 'Get Iam Policy': 'resource-policies_get-iam-policy.md' + - 'Insert': 'resource-policies_insert.md' + - 'List': 'resource-policies_list.md' + - 'Set Iam Policy': 'resource-policies_set-iam-policy.md' + - 'Test Iam Permissions': 'resource-policies_test-iam-permissions.md' +- 'Routers': + - 'Aggregated List': 'routers_aggregated-list.md' + - 'Delete': 'routers_delete.md' + - 'Get': 'routers_get.md' + - 'Get Nat Mapping Info': 'routers_get-nat-mapping-info.md' + - 'Get Router Status': 'routers_get-router-status.md' + - 'Insert': 'routers_insert.md' + - 'List': 'routers_list.md' + - 'Patch': 'routers_patch.md' + - 'Preview': 'routers_preview.md' + - 'Update': 'routers_update.md' +- 'Routes': + - 'Delete': 'routes_delete.md' + - 'Get': 'routes_get.md' + - 'Insert': 'routes_insert.md' + - 'List': 'routes_list.md' +- 'Security Policies': + - 'Add Rule': 'security-policies_add-rule.md' + - 'Aggregated List': 'security-policies_aggregated-list.md' + - 'Delete': 'security-policies_delete.md' + - 'Get': 'security-policies_get.md' + - 'Get Rule': 'security-policies_get-rule.md' + - 'Insert': 'security-policies_insert.md' + - 'List': 'security-policies_list.md' + - 'List Preconfigured Expression Sets': 'security-policies_list-preconfigured-expression-sets.md' + - 'Patch': 'security-policies_patch.md' + - 'Patch Rule': 'security-policies_patch-rule.md' + - 'Remove Rule': 'security-policies_remove-rule.md' + - 'Set Labels': 'security-policies_set-labels.md' +- 'Service Attachments': + - 'Aggregated List': 'service-attachments_aggregated-list.md' + - 'Delete': 'service-attachments_delete.md' + - 'Get': 'service-attachments_get.md' + - 'Get Iam Policy': 'service-attachments_get-iam-policy.md' + - 'Insert': 'service-attachments_insert.md' + - 'List': 'service-attachments_list.md' + - 'Patch': 'service-attachments_patch.md' + - 'Set Iam Policy': 'service-attachments_set-iam-policy.md' + - 'Test Iam Permissions': 'service-attachments_test-iam-permissions.md' +- 'Snapshots': + - 'Delete': 'snapshots_delete.md' + - 'Get': 'snapshots_get.md' + - 'Get Iam Policy': 'snapshots_get-iam-policy.md' + - 'Insert': 'snapshots_insert.md' + - 'List': 'snapshots_list.md' + - 'Set Iam Policy': 'snapshots_set-iam-policy.md' + - 'Set Labels': 'snapshots_set-labels.md' + - 'Test Iam Permissions': 'snapshots_test-iam-permissions.md' +- 'Ssl Certificates': + - 'Aggregated List': 'ssl-certificates_aggregated-list.md' + - 'Delete': 'ssl-certificates_delete.md' + - 'Get': 'ssl-certificates_get.md' + - 'Insert': 'ssl-certificates_insert.md' + - 'List': 'ssl-certificates_list.md' +- 'Ssl Policies': + - 'Aggregated List': 'ssl-policies_aggregated-list.md' + - 'Delete': 'ssl-policies_delete.md' + - 'Get': 'ssl-policies_get.md' + - 'Insert': 'ssl-policies_insert.md' + - 'List': 'ssl-policies_list.md' + - 'List Available Features': 'ssl-policies_list-available-features.md' + - 'Patch': 'ssl-policies_patch.md' +- 'Subnetworks': + - 'Aggregated List': 'subnetworks_aggregated-list.md' + - 'Delete': 'subnetworks_delete.md' + - 'Expand Ip Cidr Range': 'subnetworks_expand-ip-cidr-range.md' + - 'Get': 'subnetworks_get.md' + - 'Get Iam Policy': 'subnetworks_get-iam-policy.md' + - 'Insert': 'subnetworks_insert.md' + - 'List': 'subnetworks_list.md' + - 'List Usable': 'subnetworks_list-usable.md' + - 'Patch': 'subnetworks_patch.md' + - 'Set Iam Policy': 'subnetworks_set-iam-policy.md' + - 'Set Private Ip Google Access': 'subnetworks_set-private-ip-google-access.md' + - 'Test Iam Permissions': 'subnetworks_test-iam-permissions.md' +- 'Target Grpc Proxies': + - 'Delete': 'target-grpc-proxies_delete.md' + - 'Get': 'target-grpc-proxies_get.md' + - 'Insert': 'target-grpc-proxies_insert.md' + - 'List': 'target-grpc-proxies_list.md' + - 'Patch': 'target-grpc-proxies_patch.md' +- 'Target Http Proxies': + - 'Aggregated List': 'target-http-proxies_aggregated-list.md' + - 'Delete': 'target-http-proxies_delete.md' + - 'Get': 'target-http-proxies_get.md' + - 'Insert': 'target-http-proxies_insert.md' + - 'List': 'target-http-proxies_list.md' + - 'Patch': 'target-http-proxies_patch.md' + - 'Set Url Map': 'target-http-proxies_set-url-map.md' +- 'Target Https Proxies': + - 'Aggregated List': 'target-https-proxies_aggregated-list.md' + - 'Delete': 'target-https-proxies_delete.md' + - 'Get': 'target-https-proxies_get.md' + - 'Insert': 'target-https-proxies_insert.md' + - 'List': 'target-https-proxies_list.md' + - 'Patch': 'target-https-proxies_patch.md' + - 'Set Certificate Map': 'target-https-proxies_set-certificate-map.md' + - 'Set Quic Override': 'target-https-proxies_set-quic-override.md' + - 'Set Ssl Certificates': 'target-https-proxies_set-ssl-certificates.md' + - 'Set Ssl Policy': 'target-https-proxies_set-ssl-policy.md' + - 'Set Url Map': 'target-https-proxies_set-url-map.md' +- 'Target Instances': + - 'Aggregated List': 'target-instances_aggregated-list.md' + - 'Delete': 'target-instances_delete.md' + - 'Get': 'target-instances_get.md' + - 'Insert': 'target-instances_insert.md' + - 'List': 'target-instances_list.md' +- 'Target Pools': + - 'Add Health Check': 'target-pools_add-health-check.md' + - 'Add Instance': 'target-pools_add-instance.md' + - 'Aggregated List': 'target-pools_aggregated-list.md' + - 'Delete': 'target-pools_delete.md' + - 'Get': 'target-pools_get.md' + - 'Get Health': 'target-pools_get-health.md' + - 'Insert': 'target-pools_insert.md' + - 'List': 'target-pools_list.md' + - 'Remove Health Check': 'target-pools_remove-health-check.md' + - 'Remove Instance': 'target-pools_remove-instance.md' + - 'Set Backup': 'target-pools_set-backup.md' +- 'Target Ssl Proxies': + - 'Delete': 'target-ssl-proxies_delete.md' + - 'Get': 'target-ssl-proxies_get.md' + - 'Insert': 'target-ssl-proxies_insert.md' + - 'List': 'target-ssl-proxies_list.md' + - 'Set Backend Service': 'target-ssl-proxies_set-backend-service.md' + - 'Set Certificate Map': 'target-ssl-proxies_set-certificate-map.md' + - 'Set Proxy Header': 'target-ssl-proxies_set-proxy-header.md' + - 'Set Ssl Certificates': 'target-ssl-proxies_set-ssl-certificates.md' + - 'Set Ssl Policy': 'target-ssl-proxies_set-ssl-policy.md' +- 'Target Tcp Proxies': + - 'Aggregated List': 'target-tcp-proxies_aggregated-list.md' + - 'Delete': 'target-tcp-proxies_delete.md' + - 'Get': 'target-tcp-proxies_get.md' + - 'Insert': 'target-tcp-proxies_insert.md' + - 'List': 'target-tcp-proxies_list.md' + - 'Set Backend Service': 'target-tcp-proxies_set-backend-service.md' + - 'Set Proxy Header': 'target-tcp-proxies_set-proxy-header.md' +- 'Target Vpn Gateways': + - 'Aggregated List': 'target-vpn-gateways_aggregated-list.md' + - 'Delete': 'target-vpn-gateways_delete.md' + - 'Get': 'target-vpn-gateways_get.md' + - 'Insert': 'target-vpn-gateways_insert.md' + - 'List': 'target-vpn-gateways_list.md' + - 'Set Labels': 'target-vpn-gateways_set-labels.md' +- 'Url Maps': + - 'Aggregated List': 'url-maps_aggregated-list.md' + - 'Delete': 'url-maps_delete.md' + - 'Get': 'url-maps_get.md' + - 'Insert': 'url-maps_insert.md' + - 'Invalidate Cache': 'url-maps_invalidate-cache.md' + - 'List': 'url-maps_list.md' + - 'Patch': 'url-maps_patch.md' + - 'Update': 'url-maps_update.md' + - 'Validate': 'url-maps_validate.md' +- 'Vpn Gateways': + - 'Aggregated List': 'vpn-gateways_aggregated-list.md' + - 'Delete': 'vpn-gateways_delete.md' + - 'Get': 'vpn-gateways_get.md' + - 'Get Status': 'vpn-gateways_get-status.md' + - 'Insert': 'vpn-gateways_insert.md' + - 'List': 'vpn-gateways_list.md' + - 'Set Labels': 'vpn-gateways_set-labels.md' + - 'Test Iam Permissions': 'vpn-gateways_test-iam-permissions.md' +- 'Vpn Tunnels': + - 'Aggregated List': 'vpn-tunnels_aggregated-list.md' + - 'Delete': 'vpn-tunnels_delete.md' + - 'Get': 'vpn-tunnels_get.md' + - 'Insert': 'vpn-tunnels_insert.md' + - 'List': 'vpn-tunnels_list.md' + - 'Set Labels': 'vpn-tunnels_set-labels.md' +- 'Zone Operations': + - 'Delete': 'zone-operations_delete.md' + - 'Get': 'zone-operations_get.md' + - 'List': 'zone-operations_list.md' + - 'Wait': 'zone-operations_wait.md' +- 'Zones': + - 'Get': 'zones_get.md' + - 'List': 'zones_list.md' theme: readthedocs diff --git a/gen/compute1-cli/src/client.rs b/gen/compute1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/compute1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/compute1-cli/src/main.rs b/gen/compute1-cli/src/main.rs index ebe04dabc7..7a6fa3b089 100644 --- a/gen/compute1-cli/src/main.rs +++ b/gen/compute1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_compute1::{api, Error, oauth2}; +use google_compute1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -67,10 +66,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -181,7 +180,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -190,7 +189,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -249,7 +248,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -258,10 +257,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -450,6 +449,7 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ip-version" => Some(("ipVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ipv6-endpoint-type" => Some(("ipv6EndpointType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -462,7 +462,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "address-type", "creation-timestamp", "description", "id", "ip-version", "kind", "name", "network", "network-tier", "prefix-length", "purpose", "region", "self-link", "status", "subnetwork", "users"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "address-type", "creation-timestamp", "description", "id", "ip-version", "ipv6-endpoint-type", "kind", "name", "network", "network-tier", "prefix-length", "purpose", "region", "self-link", "status", "subnetwork", "users"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -533,7 +533,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -542,7 +542,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -594,6 +594,96 @@ where } } + async fn _addresses_set_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["label-fingerprint", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RegionSetLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.addresses().set_labels(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _autoscalers_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.autoscalers().aggregated_list(opt.value_of("project").unwrap_or("")); @@ -601,7 +691,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -610,10 +700,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -890,7 +980,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -899,7 +989,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1466,6 +1556,7 @@ where "cdn-policy.serve-while-stale" => Some(("cdnPolicy.serveWhileStale", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cdn-policy.signed-url-cache-max-age-sec" => Some(("cdnPolicy.signedUrlCacheMaxAgeSec", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cdn-policy.signed-url-key-names" => Some(("cdnPolicy.signedUrlKeyNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-response-headers" => Some(("customResponseHeaders", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1476,7 +1567,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket-name", "cache-key-policy", "cache-mode", "cdn-policy", "client-ttl", "creation-timestamp", "custom-response-headers", "default-ttl", "description", "edge-security-policy", "enable-cdn", "id", "include-http-headers", "kind", "max-ttl", "name", "negative-caching", "query-string-whitelist", "request-coalescing", "self-link", "serve-while-stale", "signed-url-cache-max-age-sec", "signed-url-key-names"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket-name", "cache-key-policy", "cache-mode", "cdn-policy", "client-ttl", "compression-mode", "creation-timestamp", "custom-response-headers", "default-ttl", "description", "edge-security-policy", "enable-cdn", "id", "include-http-headers", "kind", "max-ttl", "name", "negative-caching", "query-string-whitelist", "request-coalescing", "self-link", "serve-while-stale", "signed-url-cache-max-age-sec", "signed-url-key-names"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1547,7 +1638,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -1556,7 +1647,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1643,6 +1734,7 @@ where "cdn-policy.serve-while-stale" => Some(("cdnPolicy.serveWhileStale", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cdn-policy.signed-url-cache-max-age-sec" => Some(("cdnPolicy.signedUrlCacheMaxAgeSec", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cdn-policy.signed-url-key-names" => Some(("cdnPolicy.signedUrlKeyNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-response-headers" => Some(("customResponseHeaders", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1653,7 +1745,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket-name", "cache-key-policy", "cache-mode", "cdn-policy", "client-ttl", "creation-timestamp", "custom-response-headers", "default-ttl", "description", "edge-security-policy", "enable-cdn", "id", "include-http-headers", "kind", "max-ttl", "name", "negative-caching", "query-string-whitelist", "request-coalescing", "self-link", "serve-while-stale", "signed-url-cache-max-age-sec", "signed-url-key-names"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket-name", "cache-key-policy", "cache-mode", "cdn-policy", "client-ttl", "compression-mode", "creation-timestamp", "custom-response-headers", "default-ttl", "description", "edge-security-policy", "enable-cdn", "id", "include-http-headers", "kind", "max-ttl", "name", "negative-caching", "query-string-whitelist", "request-coalescing", "self-link", "serve-while-stale", "signed-url-cache-max-age-sec", "signed-url-key-names"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1841,6 +1933,7 @@ where "cdn-policy.serve-while-stale" => Some(("cdnPolicy.serveWhileStale", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cdn-policy.signed-url-cache-max-age-sec" => Some(("cdnPolicy.signedUrlCacheMaxAgeSec", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cdn-policy.signed-url-key-names" => Some(("cdnPolicy.signedUrlKeyNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-response-headers" => Some(("customResponseHeaders", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1851,7 +1944,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket-name", "cache-key-policy", "cache-mode", "cdn-policy", "client-ttl", "creation-timestamp", "custom-response-headers", "default-ttl", "description", "edge-security-policy", "enable-cdn", "id", "include-http-headers", "kind", "max-ttl", "name", "negative-caching", "query-string-whitelist", "request-coalescing", "self-link", "serve-while-stale", "signed-url-cache-max-age-sec", "signed-url-key-names"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket-name", "cache-key-policy", "cache-mode", "cdn-policy", "client-ttl", "compression-mode", "creation-timestamp", "custom-response-headers", "default-ttl", "description", "edge-security-policy", "enable-cdn", "id", "include-http-headers", "kind", "max-ttl", "name", "negative-caching", "query-string-whitelist", "request-coalescing", "self-link", "serve-while-stale", "signed-url-cache-max-age-sec", "signed-url-key-names"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2012,7 +2105,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -2021,10 +2114,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2325,6 +2418,62 @@ where } } + async fn _backend_services_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.backend_services().get_iam_policy(opt.value_of("project").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _backend_services_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2370,6 +2519,7 @@ where "circuit-breakers.max-requests" => Some(("circuitBreakers.maxRequests", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-requests-per-connection" => Some(("circuitBreakers.maxRequestsPerConnection", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-retries" => Some(("circuitBreakers.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-draining.draining-timeout-sec" => Some(("connectionDraining.drainingTimeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "connection-tracking-policy.connection-persistence-on-unhealthy-backends" => Some(("connectionTrackingPolicy.connectionPersistenceOnUnhealthyBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-tracking-policy.enable-strong-affinity" => Some(("connectionTrackingPolicy.enableStrongAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2427,11 +2577,12 @@ where "security-settings.client-tls-policy" => Some(("securitySettings.clientTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings.subject-alt-names" => Some(("securitySettings.subjectAltNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-bindings" => Some(("serviceBindings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "session-affinity" => Some(("sessionAffinity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subsetting.policy" => Some(("subsetting.policy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "timeout-sec" => Some(("timeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "compression-mode", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "service-bindings", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2502,7 +2653,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -2511,7 +2662,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2608,6 +2759,7 @@ where "circuit-breakers.max-requests" => Some(("circuitBreakers.maxRequests", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-requests-per-connection" => Some(("circuitBreakers.maxRequestsPerConnection", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-retries" => Some(("circuitBreakers.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-draining.draining-timeout-sec" => Some(("connectionDraining.drainingTimeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "connection-tracking-policy.connection-persistence-on-unhealthy-backends" => Some(("connectionTrackingPolicy.connectionPersistenceOnUnhealthyBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-tracking-policy.enable-strong-affinity" => Some(("connectionTrackingPolicy.enableStrongAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2665,11 +2817,12 @@ where "security-settings.client-tls-policy" => Some(("securitySettings.clientTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings.subject-alt-names" => Some(("securitySettings.subjectAltNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-bindings" => Some(("serviceBindings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "session-affinity" => Some(("sessionAffinity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subsetting.policy" => Some(("subsetting.policy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "timeout-sec" => Some(("timeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "compression-mode", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "service-bindings", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2822,6 +2975,93 @@ where } } + async fn _backend_services_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GlobalSetPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.backend_services().set_iam_policy(request, opt.value_of("project").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _backend_services_set_security_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2956,6 +3196,7 @@ where "circuit-breakers.max-requests" => Some(("circuitBreakers.maxRequests", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-requests-per-connection" => Some(("circuitBreakers.maxRequestsPerConnection", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-retries" => Some(("circuitBreakers.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-draining.draining-timeout-sec" => Some(("connectionDraining.drainingTimeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "connection-tracking-policy.connection-persistence-on-unhealthy-backends" => Some(("connectionTrackingPolicy.connectionPersistenceOnUnhealthyBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-tracking-policy.enable-strong-affinity" => Some(("connectionTrackingPolicy.enableStrongAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3013,11 +3254,12 @@ where "security-settings.client-tls-policy" => Some(("securitySettings.clientTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings.subject-alt-names" => Some(("securitySettings.subjectAltNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-bindings" => Some(("serviceBindings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "session-affinity" => Some(("sessionAffinity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subsetting.policy" => Some(("subsetting.policy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "timeout-sec" => Some(("timeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "compression-mode", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "service-bindings", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3088,7 +3330,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -3097,10 +3339,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3211,7 +3453,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -3220,7 +3462,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3368,7 +3610,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -3377,10 +3619,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3455,8 +3697,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auto-created" => Some(("autoCreated", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "chain-name" => Some(("chainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creation-size-bytes" => Some(("creationSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-size-gb" => Some(("diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3476,6 +3720,7 @@ where "snapshot-encryption-key.raw-key" => Some(("snapshotEncryptionKey.rawKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-encryption-key.rsa-encrypted-key" => Some(("snapshotEncryptionKey.rsaEncryptedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-encryption-key.sha256" => Some(("snapshotEncryptionKey.sha256", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snapshot-type" => Some(("snapshotType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk" => Some(("sourceDisk", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.kms-key-name" => Some(("sourceDiskEncryptionKey.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.kms-key-service-account" => Some(("sourceDiskEncryptionKey.kmsKeyServiceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3483,12 +3728,14 @@ where "source-disk-encryption-key.rsa-encrypted-key" => Some(("sourceDiskEncryptionKey.rsaEncryptedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.sha256" => Some(("sourceDiskEncryptionKey.sha256", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-id" => Some(("sourceDiskId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-snapshot-schedule-policy" => Some(("sourceSnapshotSchedulePolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-snapshot-schedule-policy-id" => Some(("sourceSnapshotSchedulePolicyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes" => Some(("storageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes-status" => Some(("storageBytesStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-locations" => Some(("storageLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-created", "chain-name", "creation-timestamp", "description", "disk-size-gb", "download-bytes", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "location-hint", "name", "raw-key", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "snapshot-encryption-key", "source-disk", "source-disk-encryption-key", "source-disk-id", "status", "storage-bytes", "storage-bytes-status", "storage-locations"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "auto-created", "chain-name", "creation-size-bytes", "creation-timestamp", "description", "disk-size-gb", "download-bytes", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "location-hint", "name", "raw-key", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "snapshot-encryption-key", "snapshot-type", "source-disk", "source-disk-encryption-key", "source-disk-id", "source-snapshot-schedule-policy", "source-snapshot-schedule-policy-id", "status", "storage-bytes", "storage-bytes-status", "storage-locations"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3506,7 +3753,7 @@ where call = call.request_id(value.unwrap_or("")); }, "guest-flush" => { - call = call.guest_flush(arg_from_str(value.unwrap_or("false"), err, "guest-flush", "boolean")); + call = call.guest_flush( value.map(|v| arg_from_str(v, err, "guest-flush", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3670,7 +3917,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3742,6 +3989,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-encryption-key.kms-key-name" => Some(("diskEncryptionKey.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3760,6 +4008,7 @@ where "location-hint" => Some(("locationHint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "options" => Some(("options", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "params.resource-manager-tags" => Some(("params.resourceManagerTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "physical-block-size-bytes" => Some(("physicalBlockSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "provisioned-iops" => Some(("provisionedIops", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3790,7 +4039,7 @@ where "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "disk-encryption-key", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-attach-timestamp", "last-detach-timestamp", "license-codes", "licenses", "location-hint", "name", "options", "physical-block-size-bytes", "provisioned-iops", "raw-key", "region", "replica-zones", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "size-gb", "source-disk", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-storage-object", "status", "type", "users", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "creation-timestamp", "description", "disk-encryption-key", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-attach-timestamp", "last-detach-timestamp", "license-codes", "licenses", "location-hint", "name", "options", "params", "physical-block-size-bytes", "provisioned-iops", "raw-key", "region", "replica-zones", "resource-manager-tags", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "size-gb", "source-disk", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-storage-object", "status", "type", "users", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3864,7 +4113,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -3873,7 +4122,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4577,7 +4826,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -4586,7 +4835,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4856,7 +5105,7 @@ where call = call.request_id(value.unwrap_or("")); }, "replace-existing-association" => { - call = call.replace_existing_association(arg_from_str(value.unwrap_or("false"), err, "replace-existing-association", "boolean")); + call = call.replace_existing_association( value.map(|v| arg_from_str(v, err, "replace-existing-association", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4937,11 +5186,12 @@ where "match.dest-ip-ranges" => Some(("match.destIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "match.src-ip-ranges" => Some(("match.srcIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rule-name" => Some(("ruleName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rule-tuple-count" => Some(("ruleTupleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "target-resources" => Some(("targetResources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "target-service-accounts" => Some(("targetServiceAccounts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-name", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5235,7 +5485,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5291,7 +5541,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5451,7 +5701,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "parent-id" => { call = call.parent_id(value.unwrap_or("")); @@ -5463,7 +5713,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5763,11 +6013,12 @@ where "match.dest-ip-ranges" => Some(("match.destIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "match.src-ip-ranges" => Some(("match.srcIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rule-name" => Some(("ruleName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rule-tuple-count" => Some(("ruleTupleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "target-resources" => Some(("targetResources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "target-service-accounts" => Some(("targetServiceAccounts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-name", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5785,7 +6036,7 @@ where call = call.request_id(value.unwrap_or("")); }, "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5903,7 +6154,7 @@ where call = call.request_id(value.unwrap_or("")); }, "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6345,7 +6596,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -6354,7 +6605,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6625,7 +6876,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -6634,10 +6885,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6838,6 +7089,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-tier" => Some(("networkTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "no-automate-dns-zone" => Some(("noAutomateDnsZone", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "port-range" => Some(("portRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ports" => Some(("ports", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "psc-connection-id" => Some(("pscConnectionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -6849,7 +7101,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target" => Some(("target", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "no-automate-dns-zone", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6920,7 +7172,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -6929,7 +7181,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7022,6 +7274,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-tier" => Some(("networkTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "no-automate-dns-zone" => Some(("noAutomateDnsZone", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "port-range" => Some(("portRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ports" => Some(("ports", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "psc-connection-id" => Some(("pscConnectionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -7033,7 +7286,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target" => Some(("target", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "no-automate-dns-zone", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7413,6 +7666,7 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ip-version" => Some(("ipVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ipv6-endpoint-type" => Some(("ipv6EndpointType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -7425,7 +7679,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "address-type", "creation-timestamp", "description", "id", "ip-version", "kind", "name", "network", "network-tier", "prefix-length", "purpose", "region", "self-link", "status", "subnetwork", "users"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "address-type", "creation-timestamp", "description", "id", "ip-version", "ipv6-endpoint-type", "kind", "name", "network", "network-tier", "prefix-length", "purpose", "region", "self-link", "status", "subnetwork", "users"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7496,7 +7750,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -7505,7 +7759,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7557,6 +7811,92 @@ where } } + async fn _global_addresses_set_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["label-fingerprint", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GlobalSetLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.global_addresses().set_labels(request, opt.value_of("project").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _global_forwarding_rules_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.global_forwarding_rules().delete(opt.value_of("project").unwrap_or(""), opt.value_of("forwarding-rule").unwrap_or("")); @@ -7706,6 +8046,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-tier" => Some(("networkTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "no-automate-dns-zone" => Some(("noAutomateDnsZone", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "port-range" => Some(("portRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ports" => Some(("ports", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "psc-connection-id" => Some(("pscConnectionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -7717,7 +8058,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target" => Some(("target", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "no-automate-dns-zone", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7788,7 +8129,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -7797,7 +8138,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7890,6 +8231,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-tier" => Some(("networkTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "no-automate-dns-zone" => Some(("noAutomateDnsZone", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "port-range" => Some(("portRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ports" => Some(("ports", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "psc-connection-id" => Some(("pscConnectionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -7901,7 +8243,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target" => Some(("target", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ip-address", "ip-protocol", "all-ports", "allow-global-access", "backend-service", "creation-timestamp", "description", "fingerprint", "id", "ip-version", "is-mirroring-collector", "kind", "label-fingerprint", "labels", "load-balancing-scheme", "name", "network", "network-tier", "no-automate-dns-zone", "port-range", "ports", "psc-connection-id", "psc-connection-status", "region", "self-link", "service-label", "service-name", "subnetwork", "target"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -8464,6 +8806,9 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-endpoint-type" => Some(("networkEndpointType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.consumer-psc-address" => Some(("pscData.consumerPscAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.psc-connection-id" => Some(("pscData.pscConnectionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.psc-connection-status" => Some(("pscData.pscConnectionStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "psc-target-service" => Some(("pscTargetService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -8471,7 +8816,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "app-engine", "cloud-function", "cloud-run", "creation-timestamp", "default-port", "description", "function", "id", "kind", "name", "network", "network-endpoint-type", "psc-target-service", "region", "self-link", "service", "size", "subnetwork", "tag", "url-mask", "version", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "app-engine", "cloud-function", "cloud-run", "consumer-psc-address", "creation-timestamp", "default-port", "description", "function", "id", "kind", "name", "network", "network-endpoint-type", "psc-connection-id", "psc-connection-status", "psc-data", "psc-target-service", "region", "self-link", "service", "size", "subnetwork", "tag", "url-mask", "version", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -8542,7 +8887,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -8551,7 +8896,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8610,7 +8955,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -8619,7 +8964,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8678,7 +9023,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -8687,10 +9032,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8845,7 +9190,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -8854,7 +9199,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -9069,7 +9414,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "parent-id" => { call = call.parent_id(value.unwrap_or("")); @@ -9081,7 +9426,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -9348,7 +9693,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -9357,7 +9702,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -9516,7 +9861,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -9525,10 +9870,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -9833,7 +10178,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -9842,7 +10187,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -10386,7 +10731,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -10395,7 +10740,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -10865,7 +11210,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -10874,7 +11219,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -11440,7 +11785,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11512,6 +11857,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "archive-size-bytes" => Some(("archiveSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deprecated.deleted" => Some(("deprecated.deleted", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -11566,7 +11912,7 @@ where "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-locations" => Some(("storageLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-size-bytes", "container-type", "content", "creation-timestamp", "deleted", "deprecated", "description", "disk-size-gb", "family", "file-type", "id", "image-encryption-key", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "name", "obsolete", "pk", "raw-disk", "raw-key", "replacement", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha1-checksum", "sha256", "shielded-instance-initial-state", "source", "source-disk", "source-disk-encryption-key", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-type", "state", "status", "storage-locations"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "archive-size-bytes", "container-type", "content", "creation-timestamp", "deleted", "deprecated", "description", "disk-size-gb", "family", "file-type", "id", "image-encryption-key", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "name", "obsolete", "pk", "raw-disk", "raw-key", "replacement", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha1-checksum", "sha256", "shielded-instance-initial-state", "source", "source-disk", "source-disk-encryption-key", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-type", "state", "status", "storage-locations"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -11584,7 +11930,7 @@ where call = call.request_id(value.unwrap_or("")); }, "force-create" => { - call = call.force_create(arg_from_str(value.unwrap_or("false"), err, "force-create", "boolean")); + call = call.force_create( value.map(|v| arg_from_str(v, err, "force-create", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -11640,7 +11986,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -11649,7 +11995,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -11724,6 +12070,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "archive-size-bytes" => Some(("archiveSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deprecated.deleted" => Some(("deprecated.deleted", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -11778,7 +12125,7 @@ where "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-locations" => Some(("storageLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-size-bytes", "container-type", "content", "creation-timestamp", "deleted", "deprecated", "description", "disk-size-gb", "family", "file-type", "id", "image-encryption-key", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "name", "obsolete", "pk", "raw-disk", "raw-key", "replacement", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha1-checksum", "sha256", "shielded-instance-initial-state", "source", "source-disk", "source-disk-encryption-key", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-type", "state", "status", "storage-locations"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "archive-size-bytes", "container-type", "content", "creation-timestamp", "deleted", "deprecated", "description", "disk-size-gb", "family", "file-type", "id", "image-encryption-key", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "name", "obsolete", "pk", "raw-disk", "raw-key", "replacement", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha1-checksum", "sha256", "shielded-instance-initial-state", "source", "source-disk", "source-disk-encryption-key", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-type", "state", "status", "storage-locations"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -12196,7 +12543,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -12205,10 +12552,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -12764,6 +13111,7 @@ where "instance-group" => Some(("instanceGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-template" => Some(("instanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "list-managed-instances-results" => Some(("listManagedInstancesResults", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -12787,7 +13135,7 @@ where "update-policy.type" => Some(("updatePolicy.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "list-managed-instances-results", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -12858,7 +13206,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -12867,7 +13215,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -12926,7 +13274,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -12935,7 +13283,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -12994,7 +13342,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -13003,7 +13351,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -13062,7 +13410,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -13071,7 +13419,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -13168,6 +13516,7 @@ where "instance-group" => Some(("instanceGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-template" => Some(("instanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "list-managed-instances-results" => Some(("listManagedInstancesResults", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -13191,7 +13540,7 @@ where "update-policy.type" => Some(("updatePolicy.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "list-managed-instances-results", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13851,7 +14200,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -13860,10 +14209,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -14130,7 +14479,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -14139,7 +14488,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -14231,7 +14580,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -14240,7 +14589,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -14584,7 +14933,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14664,9 +15013,11 @@ where "properties.advanced-machine-features.enable-nested-virtualization" => Some(("properties.advancedMachineFeatures.enableNestedVirtualization", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "properties.advanced-machine-features.enable-uefi-networking" => Some(("properties.advancedMachineFeatures.enableUefiNetworking", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "properties.advanced-machine-features.threads-per-core" => Some(("properties.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "properties.advanced-machine-features.visible-core-count" => Some(("properties.advancedMachineFeatures.visibleCoreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "properties.can-ip-forward" => Some(("properties.canIpForward", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "properties.confidential-instance-config.enable-confidential-compute" => Some(("properties.confidentialInstanceConfig.enableConfidentialCompute", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "properties.description" => Some(("properties.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "properties.key-revocation-action-type" => Some(("properties.keyRevocationActionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "properties.labels" => Some(("properties.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "properties.machine-type" => Some(("properties.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "properties.metadata.fingerprint" => Some(("properties.metadata.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -14694,7 +15045,7 @@ where "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-instance" => Some(("sourceInstance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "creation-timestamp", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "id", "instance-termination-action", "items", "key", "kind", "labels", "location-hint", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "properties", "provisioning-model", "reservation-affinity", "resource-manager-tags", "resource-policies", "scheduling", "self-link", "shielded-instance-config", "source-instance", "tags", "threads-per-core", "total-egress-bandwidth-tier", "values"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "creation-timestamp", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "id", "instance-termination-action", "items", "key", "key-revocation-action-type", "kind", "labels", "location-hint", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "properties", "provisioning-model", "reservation-affinity", "resource-manager-tags", "resource-policies", "scheduling", "self-link", "shielded-instance-config", "source-instance", "tags", "threads-per-core", "total-egress-bandwidth-tier", "values", "visible-core-count"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14765,7 +15116,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -14774,7 +15125,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -15191,7 +15542,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -15200,10 +15551,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -15278,6 +15629,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auto-delete" => Some(("autoDelete", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "boot" => Some(("boot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device-name" => Some(("deviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -15287,7 +15639,9 @@ where "disk-encryption-key.rsa-encrypted-key" => Some(("diskEncryptionKey.rsaEncryptedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-encryption-key.sha256" => Some(("diskEncryptionKey.sha256", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-size-gb" => Some(("diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "force-attach" => Some(("forceAttach", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "index" => Some(("index", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "initialize-params.architecture" => Some(("initializeParams.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "initialize-params.description" => Some(("initializeParams.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "initialize-params.disk-name" => Some(("initializeParams.diskName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "initialize-params.disk-size-gb" => Some(("initializeParams.diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -15296,6 +15650,7 @@ where "initialize-params.licenses" => Some(("initializeParams.licenses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "initialize-params.on-update-action" => Some(("initializeParams.onUpdateAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "initialize-params.provisioned-iops" => Some(("initializeParams.provisionedIops", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "initialize-params.resource-manager-tags" => Some(("initializeParams.resourceManagerTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "initialize-params.resource-policies" => Some(("initializeParams.resourcePolicies", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "initialize-params.source-image" => Some(("initializeParams.sourceImage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "initialize-params.source-image-encryption-key.kms-key-name" => Some(("initializeParams.sourceImageEncryptionKey.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -15318,7 +15673,7 @@ where "source" => Some(("source", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete", "boot", "content", "description", "device-name", "disk-encryption-key", "disk-name", "disk-size-gb", "disk-type", "file-type", "index", "initialize-params", "interface", "kind", "kms-key-name", "kms-key-service-account", "labels", "licenses", "mode", "on-update-action", "pk", "provisioned-iops", "raw-key", "resource-policies", "rsa-encrypted-key", "sha256", "shielded-instance-initial-state", "source", "source-image", "source-image-encryption-key", "source-snapshot", "source-snapshot-encryption-key", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "auto-delete", "boot", "content", "description", "device-name", "disk-encryption-key", "disk-name", "disk-size-gb", "disk-type", "file-type", "force-attach", "index", "initialize-params", "interface", "kind", "kms-key-name", "kms-key-service-account", "labels", "licenses", "mode", "on-update-action", "pk", "provisioned-iops", "raw-key", "resource-manager-tags", "resource-policies", "rsa-encrypted-key", "sha256", "shielded-instance-initial-state", "source", "source-image", "source-image-encryption-key", "source-snapshot", "source-snapshot-encryption-key", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -15336,7 +15691,7 @@ where call = call.request_id(value.unwrap_or("")); }, "force-attach" => { - call = call.force_attach(arg_from_str(value.unwrap_or("false"), err, "force-attach", "boolean")); + call = call.force_attach( value.map(|v| arg_from_str(v, err, "force-attach", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15412,9 +15767,11 @@ where "instance-properties.advanced-machine-features.enable-nested-virtualization" => Some(("instanceProperties.advancedMachineFeatures.enableNestedVirtualization", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.advanced-machine-features.enable-uefi-networking" => Some(("instanceProperties.advancedMachineFeatures.enableUefiNetworking", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.advanced-machine-features.threads-per-core" => Some(("instanceProperties.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "instance-properties.advanced-machine-features.visible-core-count" => Some(("instanceProperties.advancedMachineFeatures.visibleCoreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "instance-properties.can-ip-forward" => Some(("instanceProperties.canIpForward", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.confidential-instance-config.enable-confidential-compute" => Some(("instanceProperties.confidentialInstanceConfig.enableConfidentialCompute", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.description" => Some(("instanceProperties.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "instance-properties.key-revocation-action-type" => Some(("instanceProperties.keyRevocationActionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.labels" => Some(("instanceProperties.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "instance-properties.machine-type" => Some(("instanceProperties.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.metadata.fingerprint" => Some(("instanceProperties.metadata.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -15439,11 +15796,12 @@ where "instance-properties.shielded-instance-config.enable-vtpm" => Some(("instanceProperties.shieldedInstanceConfig.enableVtpm", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.tags.fingerprint" => Some(("instanceProperties.tags.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.tags.items" => Some(("instanceProperties.tags.items", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "location-policy.target-shape" => Some(("locationPolicy.targetShape", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "min-count" => Some(("minCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "name-pattern" => Some(("namePattern", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-instance-template" => Some(("sourceInstanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "count", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "instance-properties", "instance-termination-action", "items", "key", "kind", "labels", "location-hint", "machine-type", "metadata", "min-count", "min-cpu-platform", "min-node-cpus", "name-pattern", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "provisioning-model", "reservation-affinity", "resource-manager-tags", "resource-policies", "scheduling", "shielded-instance-config", "source-instance-template", "tags", "threads-per-core", "total-egress-bandwidth-tier", "values"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "count", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "instance-properties", "instance-termination-action", "items", "key", "key-revocation-action-type", "kind", "labels", "location-hint", "location-policy", "machine-type", "metadata", "min-count", "min-cpu-platform", "min-node-cpus", "name-pattern", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "provisioning-model", "reservation-affinity", "resource-manager-tags", "resource-policies", "scheduling", "shielded-instance-config", "source-instance-template", "tags", "target-shape", "threads-per-core", "total-egress-bandwidth-tier", "values", "visible-core-count"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -15845,7 +16203,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15953,10 +16311,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start" => { - call = call.start(value.unwrap_or("")); + call = call.start( value.map(|v| arg_from_str(v, err, "start", "int64")).unwrap_or(-0)); }, "port" => { - call = call.port(arg_from_str(value.unwrap_or("-0"), err, "port", "integer")); + call = call.port( value.map(|v| arg_from_str(v, err, "port", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16083,6 +16441,7 @@ where "advanced-machine-features.enable-nested-virtualization" => Some(("advancedMachineFeatures.enableNestedVirtualization", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "advanced-machine-features.enable-uefi-networking" => Some(("advancedMachineFeatures.enableUefiNetworking", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "advanced-machine-features.threads-per-core" => Some(("advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "advanced-machine-features.visible-core-count" => Some(("advancedMachineFeatures.visibleCoreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "can-ip-forward" => Some(("canIpForward", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "confidential-instance-config.enable-confidential-compute" => Some(("confidentialInstanceConfig.enableConfidentialCompute", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cpu-platform" => Some(("cpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -16093,6 +16452,7 @@ where "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "hostname" => Some(("hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "key-revocation-action-type" => Some(("keyRevocationActionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -16111,6 +16471,7 @@ where "reservation-affinity.key" => Some(("reservationAffinity.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "reservation-affinity.values" => Some(("reservationAffinity.values", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "resource-policies" => Some(("resourcePolicies", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "resource-status.physical-host" => Some(("resourceStatus.physicalHost", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "scheduling.automatic-restart" => Some(("scheduling.automaticRestart", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "scheduling.instance-termination-action" => Some(("scheduling.instanceTerminationAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -16137,7 +16498,7 @@ where "tags.items" => Some(("tags.items", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "cpu-platform", "creation-timestamp", "deletion-protection", "description", "display-device", "enable-confidential-compute", "enable-display", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "hostname", "id", "instance-termination-action", "items", "key", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-start-timestamp", "last-stop-timestamp", "last-suspended-timestamp", "location-hint", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "params", "preemptible", "private-ipv6-google-access", "provisioning-model", "raw-key", "reservation-affinity", "resource-manager-tags", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "scheduling", "self-link", "sha256", "shielded-instance-config", "shielded-instance-integrity-policy", "source-machine-image", "source-machine-image-encryption-key", "start-restricted", "status", "status-message", "tags", "threads-per-core", "total-egress-bandwidth-tier", "update-auto-learn-policy", "values", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "cpu-platform", "creation-timestamp", "deletion-protection", "description", "display-device", "enable-confidential-compute", "enable-display", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "hostname", "id", "instance-termination-action", "items", "key", "key-revocation-action-type", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-start-timestamp", "last-stop-timestamp", "last-suspended-timestamp", "location-hint", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "params", "physical-host", "preemptible", "private-ipv6-google-access", "provisioning-model", "raw-key", "reservation-affinity", "resource-manager-tags", "resource-policies", "resource-status", "rsa-encrypted-key", "satisfies-pzs", "scheduling", "self-link", "sha256", "shielded-instance-config", "shielded-instance-integrity-policy", "source-machine-image", "source-machine-image-encryption-key", "start-restricted", "status", "status-message", "tags", "threads-per-core", "total-egress-bandwidth-tier", "update-auto-learn-policy", "values", "visible-core-count", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -16214,7 +16575,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -16223,7 +16584,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -16282,7 +16643,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -16291,7 +16652,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -16598,7 +16959,7 @@ where call = call.request_id(value.unwrap_or("")); }, "deletion-protection" => { - call = call.deletion_protection(arg_from_str(value.unwrap_or("false"), err, "deletion-protection", "boolean")); + call = call.deletion_protection( value.map(|v| arg_from_str(v, err, "deletion-protection", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -17806,6 +18167,9 @@ where "request-id" => { call = call.request_id(value.unwrap_or("")); }, + "discard-local-ssd" => { + call = call.discard_local_ssd( value.map(|v| arg_from_str(v, err, "discard-local-ssd", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -17819,7 +18183,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["request-id"].iter().map(|v|*v)); + v.extend(["discard-local-ssd", "request-id"].iter().map(|v|*v)); v } )); } } @@ -17862,6 +18226,9 @@ where "request-id" => { call = call.request_id(value.unwrap_or("")); }, + "discard-local-ssd" => { + call = call.discard_local_ssd( value.map(|v| arg_from_str(v, err, "discard-local-ssd", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -17875,7 +18242,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["request-id"].iter().map(|v|*v)); + v.extend(["discard-local-ssd", "request-id"].iter().map(|v|*v)); v } )); } } @@ -18020,6 +18387,7 @@ where "advanced-machine-features.enable-nested-virtualization" => Some(("advancedMachineFeatures.enableNestedVirtualization", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "advanced-machine-features.enable-uefi-networking" => Some(("advancedMachineFeatures.enableUefiNetworking", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "advanced-machine-features.threads-per-core" => Some(("advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "advanced-machine-features.visible-core-count" => Some(("advancedMachineFeatures.visibleCoreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "can-ip-forward" => Some(("canIpForward", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "confidential-instance-config.enable-confidential-compute" => Some(("confidentialInstanceConfig.enableConfidentialCompute", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cpu-platform" => Some(("cpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -18030,6 +18398,7 @@ where "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "hostname" => Some(("hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "key-revocation-action-type" => Some(("keyRevocationActionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -18048,6 +18417,7 @@ where "reservation-affinity.key" => Some(("reservationAffinity.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "reservation-affinity.values" => Some(("reservationAffinity.values", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "resource-policies" => Some(("resourcePolicies", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "resource-status.physical-host" => Some(("resourceStatus.physicalHost", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "scheduling.automatic-restart" => Some(("scheduling.automaticRestart", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "scheduling.instance-termination-action" => Some(("scheduling.instanceTerminationAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -18074,7 +18444,7 @@ where "tags.items" => Some(("tags.items", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "cpu-platform", "creation-timestamp", "deletion-protection", "description", "display-device", "enable-confidential-compute", "enable-display", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "hostname", "id", "instance-termination-action", "items", "key", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-start-timestamp", "last-stop-timestamp", "last-suspended-timestamp", "location-hint", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "params", "preemptible", "private-ipv6-google-access", "provisioning-model", "raw-key", "reservation-affinity", "resource-manager-tags", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "scheduling", "self-link", "sha256", "shielded-instance-config", "shielded-instance-integrity-policy", "source-machine-image", "source-machine-image-encryption-key", "start-restricted", "status", "status-message", "tags", "threads-per-core", "total-egress-bandwidth-tier", "update-auto-learn-policy", "values", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "cpu-platform", "creation-timestamp", "deletion-protection", "description", "display-device", "enable-confidential-compute", "enable-display", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "hostname", "id", "instance-termination-action", "items", "key", "key-revocation-action-type", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-start-timestamp", "last-stop-timestamp", "last-suspended-timestamp", "location-hint", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "params", "physical-host", "preemptible", "private-ipv6-google-access", "provisioning-model", "raw-key", "reservation-affinity", "resource-manager-tags", "resource-policies", "resource-status", "rsa-encrypted-key", "satisfies-pzs", "scheduling", "self-link", "sha256", "shielded-instance-config", "shielded-instance-integrity-policy", "source-machine-image", "source-machine-image-encryption-key", "start-restricted", "status", "status-message", "tags", "threads-per-core", "total-egress-bandwidth-tier", "update-auto-learn-policy", "values", "visible-core-count", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -18360,13 +18730,14 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network-attachment" => Some(("networkAttachment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-ip" => Some(("networkIP", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "nic-type" => Some(("nicType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "queue-count" => Some(("queueCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "stack-type" => Some(("stackType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["fingerprint", "internal-ipv6-prefix-length", "ipv6-access-type", "ipv6-address", "kind", "name", "network", "network-ip", "nic-type", "queue-count", "stack-type", "subnetwork"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["fingerprint", "internal-ipv6-prefix-length", "ipv6-access-type", "ipv6-address", "kind", "name", "network", "network-attachment", "network-ip", "nic-type", "queue-count", "stack-type", "subnetwork"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -18528,7 +18899,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -18537,10 +18908,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -18776,7 +19147,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -18835,7 +19206,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -18844,7 +19215,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -19021,6 +19392,96 @@ where } } + async fn _interconnect_attachments_set_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["label-fingerprint", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RegionSetLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.interconnect_attachments().set_labels(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _interconnect_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.interconnect_locations().get(opt.value_of("project").unwrap_or(""), opt.value_of("interconnect-location").unwrap_or("")); @@ -19080,7 +19541,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -19089,7 +19550,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -19417,7 +19878,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -19426,7 +19887,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -19587,6 +20048,92 @@ where } } + async fn _interconnects_set_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["label-fingerprint", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GlobalSetLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.interconnects().set_labels(request, opt.value_of("project").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _license_codes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.license_codes().get(opt.value_of("project").unwrap_or(""), opt.value_of("license-code").unwrap_or("")); @@ -19839,7 +20386,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -19994,7 +20541,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -20003,7 +20550,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -20342,7 +20889,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -20421,9 +20968,11 @@ where "instance-properties.advanced-machine-features.enable-nested-virtualization" => Some(("instanceProperties.advancedMachineFeatures.enableNestedVirtualization", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.advanced-machine-features.enable-uefi-networking" => Some(("instanceProperties.advancedMachineFeatures.enableUefiNetworking", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.advanced-machine-features.threads-per-core" => Some(("instanceProperties.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "instance-properties.advanced-machine-features.visible-core-count" => Some(("instanceProperties.advancedMachineFeatures.visibleCoreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "instance-properties.can-ip-forward" => Some(("instanceProperties.canIpForward", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.confidential-instance-config.enable-confidential-compute" => Some(("instanceProperties.confidentialInstanceConfig.enableConfidentialCompute", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.description" => Some(("instanceProperties.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "instance-properties.key-revocation-action-type" => Some(("instanceProperties.keyRevocationActionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.labels" => Some(("instanceProperties.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "instance-properties.machine-type" => Some(("instanceProperties.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.metadata.fingerprint" => Some(("instanceProperties.metadata.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -20461,6 +21010,7 @@ where "source-instance-properties.can-ip-forward" => Some(("sourceInstanceProperties.canIpForward", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "source-instance-properties.deletion-protection" => Some(("sourceInstanceProperties.deletionProtection", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "source-instance-properties.description" => Some(("sourceInstanceProperties.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-instance-properties.key-revocation-action-type" => Some(("sourceInstanceProperties.keyRevocationActionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-instance-properties.labels" => Some(("sourceInstanceProperties.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "source-instance-properties.machine-type" => Some(("sourceInstanceProperties.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-instance-properties.metadata.fingerprint" => Some(("sourceInstanceProperties.metadata.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -20479,7 +21029,7 @@ where "storage-locations" => Some(("storageLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "total-storage-bytes" => Some(("totalStorageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "creation-timestamp", "deletion-protection", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "guest-flush", "id", "instance-properties", "instance-termination-action", "items", "key", "kind", "kms-key-name", "kms-key-service-account", "labels", "location-hint", "machine-image-encryption-key", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "provisioning-model", "raw-key", "reservation-affinity", "resource-manager-tags", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "scheduling", "self-link", "sha256", "shielded-instance-config", "source-instance", "source-instance-properties", "status", "storage-locations", "tags", "threads-per-core", "total-egress-bandwidth-tier", "total-storage-bytes", "values"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "creation-timestamp", "deletion-protection", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "guest-flush", "id", "instance-properties", "instance-termination-action", "items", "key", "key-revocation-action-type", "kind", "kms-key-name", "kms-key-service-account", "labels", "location-hint", "machine-image-encryption-key", "machine-type", "metadata", "min-cpu-platform", "min-node-cpus", "name", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "provisioning-model", "raw-key", "reservation-affinity", "resource-manager-tags", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "scheduling", "self-link", "sha256", "shielded-instance-config", "source-instance", "source-instance-properties", "status", "storage-locations", "tags", "threads-per-core", "total-egress-bandwidth-tier", "total-storage-bytes", "values", "visible-core-count"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -20553,7 +21103,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -20562,7 +21112,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -20793,7 +21343,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -20802,10 +21352,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -20916,7 +21466,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -20925,7 +21475,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -20977,14 +21527,14 @@ where } } - async fn _network_endpoint_groups_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _network_attachments_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { - let mut call = self.hub.network_endpoint_groups().aggregated_list(opt.value_of("project").unwrap_or("")); + let mut call = self.hub.network_attachments().aggregated_list(opt.value_of("project").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -20993,10 +21543,971 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "include-all-scopes", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_attachments_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_attachments().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("network-attachment").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_attachments_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_attachments().get(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("network-attachment").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_attachments_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_attachments().get_iam_policy(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_attachments_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "connection-preference" => Some(("connectionPreference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "producer-accept-lists" => Some(("producerAcceptLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "producer-reject-lists" => Some(("producerRejectLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link-with-id" => Some(("selfLinkWithId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "subnetworks" => Some(("subnetworks", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["connection-preference", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "network", "producer-accept-lists", "producer-reject-lists", "region", "self-link", "self-link-with-id", "subnetworks"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NetworkAttachment = json::value::from_value(object).unwrap(); + let mut call = self.hub.network_attachments().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_attachments_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_attachments().list(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_attachments_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RegionSetPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.network_attachments().set_iam_policy(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_attachments_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.network_attachments().test_iam_permissions(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_edge_security_services_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_edge_security_services().aggregated_list(opt.value_of("project").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "include-all-scopes" => { + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "include-all-scopes", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_edge_security_services_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_edge_security_services().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("network-edge-security-service").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_edge_security_services_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_edge_security_services().get(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("network-edge-security-service").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_edge_security_services_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "security-policy" => Some(("securityPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link-with-id" => Some(("selfLinkWithId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "fingerprint", "id", "kind", "name", "region", "security-policy", "self-link", "self-link-with-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NetworkEdgeSecurityService = json::value::from_value(object).unwrap(); + let mut call = self.hub.network_edge_security_services().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_edge_security_services_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "security-policy" => Some(("securityPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link-with-id" => Some(("selfLinkWithId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "fingerprint", "id", "kind", "name", "region", "security-policy", "self-link", "self-link-with-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NetworkEdgeSecurityService = json::value::from_value(object).unwrap(); + let mut call = self.hub.network_edge_security_services().patch(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("network-edge-security-service").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + "paths" => { + call = call.add_paths(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["paths", "request-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _network_endpoint_groups_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.network_endpoint_groups().aggregated_list(opt.value_of("project").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "include-all-scopes" => { + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -21372,6 +22883,9 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-endpoint-type" => Some(("networkEndpointType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.consumer-psc-address" => Some(("pscData.consumerPscAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.psc-connection-id" => Some(("pscData.pscConnectionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.psc-connection-status" => Some(("pscData.pscConnectionStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "psc-target-service" => Some(("pscTargetService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -21379,7 +22893,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "app-engine", "cloud-function", "cloud-run", "creation-timestamp", "default-port", "description", "function", "id", "kind", "name", "network", "network-endpoint-type", "psc-target-service", "region", "self-link", "service", "size", "subnetwork", "tag", "url-mask", "version", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "app-engine", "cloud-function", "cloud-run", "consumer-psc-address", "creation-timestamp", "default-port", "description", "function", "id", "kind", "name", "network", "network-endpoint-type", "psc-connection-id", "psc-connection-status", "psc-data", "psc-target-service", "region", "self-link", "service", "size", "subnetwork", "tag", "url-mask", "version", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -21450,7 +22964,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -21459,7 +22973,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -21551,7 +23065,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -21560,7 +23074,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -21744,7 +23258,7 @@ where call = call.request_id(value.unwrap_or("")); }, "replace-existing-association" => { - call = call.replace_existing_association(arg_from_str(value.unwrap_or("false"), err, "replace-existing-association", "boolean")); + call = call.replace_existing_association( value.map(|v| arg_from_str(v, err, "replace-existing-association", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -21825,11 +23339,12 @@ where "match.dest-ip-ranges" => Some(("match.destIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "match.src-ip-ranges" => Some(("match.srcIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rule-name" => Some(("ruleName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rule-tuple-count" => Some(("ruleTupleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "target-resources" => Some(("targetResources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "target-service-accounts" => Some(("targetServiceAccounts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-name", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -21847,10 +23362,10 @@ where call = call.request_id(value.unwrap_or("")); }, "min-priority" => { - call = call.min_priority(arg_from_str(value.unwrap_or("-0"), err, "min-priority", "integer")); + call = call.min_priority( value.map(|v| arg_from_str(v, err, "min-priority", "int32")).unwrap_or(-0)); }, "max-priority" => { - call = call.max_priority(arg_from_str(value.unwrap_or("-0"), err, "max-priority", "integer")); + call = call.max_priority( value.map(|v| arg_from_str(v, err, "max-priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -22129,7 +23644,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -22185,7 +23700,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -22342,7 +23857,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -22351,7 +23866,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -22536,11 +24051,12 @@ where "match.dest-ip-ranges" => Some(("match.destIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "match.src-ip-ranges" => Some(("match.srcIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rule-name" => Some(("ruleName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rule-tuple-count" => Some(("ruleTupleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "target-resources" => Some(("targetResources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "target-service-accounts" => Some(("targetServiceAccounts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-name", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -22558,7 +24074,7 @@ where call = call.request_id(value.unwrap_or("")); }, "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -22676,7 +24192,7 @@ where call = call.request_id(value.unwrap_or("")); }, "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -22931,11 +24447,12 @@ where "network-peering.name" => Some(("networkPeering.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.network" => Some(("networkPeering.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.peer-mtu" => Some(("networkPeering.peerMtu", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "network-peering.stack-type" => Some(("networkPeering.stackType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.state" => Some(("networkPeering.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.state-details" => Some(("networkPeering.stateDetails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "peer-network" => Some(("peerNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-create-routes", "exchange-subnet-routes", "export-custom-routes", "export-subnet-routes-with-public-ip", "import-custom-routes", "import-subnet-routes-with-public-ip", "name", "network", "network-peering", "peer-mtu", "peer-network", "state", "state-details"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-create-routes", "exchange-subnet-routes", "export-custom-routes", "export-subnet-routes-with-public-ip", "import-custom-routes", "import-subnet-routes-with-public-ip", "name", "network", "network-peering", "peer-mtu", "peer-network", "stack-type", "state", "state-details"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -23187,6 +24704,7 @@ where "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "enable-ula-internal-ipv6" => Some(("enableUlaInternalIpv6", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "firewall-policy" => Some(("firewallPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gateway-i-pv4" => Some(("gatewayIPv4", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "internal-ipv6-range" => Some(("internalIpv6Range", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -23199,7 +24717,7 @@ where "self-link-with-id" => Some(("selfLinkWithId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subnetworks" => Some(("subnetworks", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["i-pv4-range", "auto-create-subnetworks", "creation-timestamp", "description", "enable-ula-internal-ipv6", "gateway-i-pv4", "id", "internal-ipv6-range", "kind", "mtu", "name", "network-firewall-policy-enforcement-order", "routing-config", "routing-mode", "self-link", "self-link-with-id", "subnetworks"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["i-pv4-range", "auto-create-subnetworks", "creation-timestamp", "description", "enable-ula-internal-ipv6", "firewall-policy", "gateway-i-pv4", "id", "internal-ipv6-range", "kind", "mtu", "name", "network-firewall-policy-enforcement-order", "routing-config", "routing-mode", "self-link", "self-link-with-id", "subnetworks"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -23270,7 +24788,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -23279,7 +24797,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -23338,7 +24856,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "region" => { call = call.region(value.unwrap_or("")); @@ -23353,7 +24871,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -23436,6 +24954,7 @@ where "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "enable-ula-internal-ipv6" => Some(("enableUlaInternalIpv6", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "firewall-policy" => Some(("firewallPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gateway-i-pv4" => Some(("gatewayIPv4", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "internal-ipv6-range" => Some(("internalIpv6Range", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -23448,7 +24967,7 @@ where "self-link-with-id" => Some(("selfLinkWithId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subnetworks" => Some(("subnetworks", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["i-pv4-range", "auto-create-subnetworks", "creation-timestamp", "description", "enable-ula-internal-ipv6", "gateway-i-pv4", "id", "internal-ipv6-range", "kind", "mtu", "name", "network-firewall-policy-enforcement-order", "routing-config", "routing-mode", "self-link", "self-link-with-id", "subnetworks"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["i-pv4-range", "auto-create-subnetworks", "creation-timestamp", "description", "enable-ula-internal-ipv6", "firewall-policy", "gateway-i-pv4", "id", "internal-ipv6-range", "kind", "mtu", "name", "network-firewall-policy-enforcement-order", "routing-config", "routing-mode", "self-link", "self-link-with-id", "subnetworks"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -23689,10 +25208,11 @@ where "network-peering.name" => Some(("networkPeering.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.network" => Some(("networkPeering.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.peer-mtu" => Some(("networkPeering.peerMtu", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "network-peering.stack-type" => Some(("networkPeering.stackType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.state" => Some(("networkPeering.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-peering.state-details" => Some(("networkPeering.stateDetails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-create-routes", "exchange-subnet-routes", "export-custom-routes", "export-subnet-routes-with-public-ip", "import-custom-routes", "import-subnet-routes-with-public-ip", "name", "network", "network-peering", "peer-mtu", "state", "state-details"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-create-routes", "exchange-subnet-routes", "export-custom-routes", "export-subnet-routes-with-public-ip", "import-custom-routes", "import-subnet-routes-with-public-ip", "name", "network", "network-peering", "peer-mtu", "stack-type", "state", "state-details"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -23852,7 +25372,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -23861,10 +25381,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -24120,7 +25640,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -24208,11 +25728,12 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-template" => Some(("nodeTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "share-settings.share-type" => Some(("shareSettings.shareType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "size" => Some(("size", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-policy", "creation-timestamp", "description", "fingerprint", "id", "kind", "location-hint", "maintenance-duration", "maintenance-policy", "maintenance-window", "max-nodes", "min-nodes", "mode", "name", "nanos", "node-template", "seconds", "self-link", "size", "start-time", "status", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-policy", "creation-timestamp", "description", "fingerprint", "id", "kind", "location-hint", "maintenance-duration", "maintenance-policy", "maintenance-window", "max-nodes", "min-nodes", "mode", "name", "nanos", "node-template", "seconds", "self-link", "share-settings", "share-type", "size", "start-time", "status", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -24284,7 +25805,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -24293,7 +25814,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -24352,7 +25873,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -24361,7 +25882,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -24452,11 +25973,12 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-template" => Some(("nodeTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "share-settings.share-type" => Some(("shareSettings.shareType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "size" => Some(("size", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-policy", "creation-timestamp", "description", "fingerprint", "id", "kind", "location-hint", "maintenance-duration", "maintenance-policy", "maintenance-window", "max-nodes", "min-nodes", "mode", "name", "nanos", "node-template", "seconds", "self-link", "size", "start-time", "status", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["autoscaling-policy", "creation-timestamp", "description", "fingerprint", "id", "kind", "location-hint", "maintenance-duration", "maintenance-policy", "maintenance-window", "max-nodes", "min-nodes", "mode", "name", "nanos", "node-template", "seconds", "self-link", "share-settings", "share-type", "size", "start-time", "status", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -24788,7 +26310,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -24797,10 +26319,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -24967,7 +26489,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -25127,7 +26649,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -25136,7 +26658,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -25367,7 +26889,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -25376,10 +26898,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -25490,7 +27012,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -25499,7 +27021,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -25558,7 +27080,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -25567,10 +27089,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -25842,7 +27364,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -25851,7 +27373,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -26496,7 +28018,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -26505,7 +28027,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -26597,7 +28119,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -26606,7 +28128,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -27321,7 +28843,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -27330,7 +28852,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -27488,7 +29010,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -27497,10 +29019,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -27767,7 +29289,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -27776,7 +29298,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -28153,7 +29675,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -28162,7 +29684,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -28633,6 +30155,62 @@ where } } + async fn _region_backend_services_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_backend_services().get_iam_policy(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _region_backend_services_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -28678,6 +30256,7 @@ where "circuit-breakers.max-requests" => Some(("circuitBreakers.maxRequests", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-requests-per-connection" => Some(("circuitBreakers.maxRequestsPerConnection", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-retries" => Some(("circuitBreakers.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-draining.draining-timeout-sec" => Some(("connectionDraining.drainingTimeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "connection-tracking-policy.connection-persistence-on-unhealthy-backends" => Some(("connectionTrackingPolicy.connectionPersistenceOnUnhealthyBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-tracking-policy.enable-strong-affinity" => Some(("connectionTrackingPolicy.enableStrongAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -28735,11 +30314,12 @@ where "security-settings.client-tls-policy" => Some(("securitySettings.clientTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings.subject-alt-names" => Some(("securitySettings.subjectAltNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-bindings" => Some(("serviceBindings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "session-affinity" => Some(("sessionAffinity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subsetting.policy" => Some(("subsetting.policy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "timeout-sec" => Some(("timeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "compression-mode", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "service-bindings", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -28810,7 +30390,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -28819,7 +30399,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -28916,6 +30496,7 @@ where "circuit-breakers.max-requests" => Some(("circuitBreakers.maxRequests", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-requests-per-connection" => Some(("circuitBreakers.maxRequestsPerConnection", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-retries" => Some(("circuitBreakers.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-draining.draining-timeout-sec" => Some(("connectionDraining.drainingTimeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "connection-tracking-policy.connection-persistence-on-unhealthy-backends" => Some(("connectionTrackingPolicy.connectionPersistenceOnUnhealthyBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-tracking-policy.enable-strong-affinity" => Some(("connectionTrackingPolicy.enableStrongAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -28973,11 +30554,12 @@ where "security-settings.client-tls-policy" => Some(("securitySettings.clientTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings.subject-alt-names" => Some(("securitySettings.subjectAltNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-bindings" => Some(("serviceBindings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "session-affinity" => Some(("sessionAffinity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subsetting.policy" => Some(("subsetting.policy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "timeout-sec" => Some(("timeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "compression-mode", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "service-bindings", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -29041,6 +30623,93 @@ where } } + async fn _region_backend_services_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RegionSetPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.region_backend_services().set_iam_policy(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _region_backend_services_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -29086,6 +30755,7 @@ where "circuit-breakers.max-requests" => Some(("circuitBreakers.maxRequests", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-requests-per-connection" => Some(("circuitBreakers.maxRequestsPerConnection", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "circuit-breakers.max-retries" => Some(("circuitBreakers.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "compression-mode" => Some(("compressionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-draining.draining-timeout-sec" => Some(("connectionDraining.drainingTimeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "connection-tracking-policy.connection-persistence-on-unhealthy-backends" => Some(("connectionTrackingPolicy.connectionPersistenceOnUnhealthyBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-tracking-policy.enable-strong-affinity" => Some(("connectionTrackingPolicy.enableStrongAffinity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -29143,11 +30813,12 @@ where "security-settings.client-tls-policy" => Some(("securitySettings.clientTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings.subject-alt-names" => Some(("securitySettings.subjectAltNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-bindings" => Some(("serviceBindings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "session-affinity" => Some(("sessionAffinity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subsetting.policy" => Some(("subsetting.policy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "timeout-sec" => Some(("timeoutSec", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["affinity-cookie-ttl-sec", "base-ejection-time", "cache-key-policy", "cache-mode", "cdn-policy", "circuit-breakers", "client-tls-policy", "client-ttl", "compression-mode", "connection-draining", "connection-persistence-on-unhealthy-backends", "connection-tracking-policy", "consecutive-errors", "consecutive-gateway-failure", "consistent-hash", "creation-timestamp", "custom-request-headers", "custom-response-headers", "default-ttl", "description", "disable-connection-drain-on-failover", "draining-timeout-sec", "drop-traffic-if-unhealthy", "edge-security-policy", "enable", "enable-cdn", "enable-strong-affinity", "enabled", "enforcing-consecutive-errors", "enforcing-consecutive-gateway-failure", "enforcing-success-rate", "failover-policy", "failover-ratio", "fingerprint", "health-checks", "http-cookie", "http-header-name", "iap", "id", "idle-timeout-sec", "include-host", "include-http-headers", "include-named-cookies", "include-protocol", "include-query-string", "interval", "kind", "load-balancing-scheme", "locality-lb-policy", "log-config", "max-connections", "max-ejection-percent", "max-pending-requests", "max-requests", "max-requests-per-connection", "max-retries", "max-stream-duration", "max-ttl", "minimum-ring-size", "name", "nanos", "negative-caching", "network", "oauth2-client-id", "oauth2-client-secret", "oauth2-client-secret-sha256", "outlier-detection", "path", "policy", "port", "port-name", "protocol", "query-string-blacklist", "query-string-whitelist", "region", "request-coalescing", "sample-rate", "seconds", "security-policy", "security-settings", "self-link", "serve-while-stale", "service-bindings", "session-affinity", "signed-url-cache-max-age-sec", "signed-url-key-names", "subject-alt-names", "subsetting", "success-rate-minimum-hosts", "success-rate-request-volume", "success-rate-stdev-factor", "timeout-sec", "tracking-mode", "ttl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -29218,7 +30889,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -29227,10 +30898,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -29367,16 +31038,18 @@ where "license-resource.amount" => Some(("licenseResource.amount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "license-resource.cores-per-license" => Some(("licenseResource.coresPerLicense", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "license-resource.license" => Some(("licenseResource.license", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "merge-source-commitments" => Some(("mergeSourceCommitments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "plan" => Some(("plan", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "split-source-commitment" => Some(("splitSourceCommitment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "start-timestamp" => Some(("startTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["amount", "auto-renew", "category", "cores-per-license", "creation-timestamp", "description", "end-timestamp", "id", "kind", "license", "license-resource", "name", "plan", "region", "self-link", "start-timestamp", "status", "status-message", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["amount", "auto-renew", "category", "cores-per-license", "creation-timestamp", "description", "end-timestamp", "id", "kind", "license", "license-resource", "merge-source-commitments", "name", "plan", "region", "self-link", "split-source-commitment", "start-timestamp", "status", "status-message", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -29447,7 +31120,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -29456,7 +31129,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -29541,16 +31214,18 @@ where "license-resource.amount" => Some(("licenseResource.amount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "license-resource.cores-per-license" => Some(("licenseResource.coresPerLicense", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "license-resource.license" => Some(("licenseResource.license", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "merge-source-commitments" => Some(("mergeSourceCommitments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "plan" => Some(("plan", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "split-source-commitment" => Some(("splitSourceCommitment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "start-timestamp" => Some(("startTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["amount", "auto-renew", "category", "cores-per-license", "creation-timestamp", "description", "end-timestamp", "id", "kind", "license", "license-resource", "name", "plan", "region", "self-link", "start-timestamp", "status", "status-message", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["amount", "auto-renew", "category", "cores-per-license", "creation-timestamp", "description", "end-timestamp", "id", "kind", "license", "license-resource", "merge-source-commitments", "name", "plan", "region", "self-link", "split-source-commitment", "start-timestamp", "status", "status-message", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -29565,7 +31240,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -29679,7 +31354,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -29688,7 +31363,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -29852,8 +31527,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auto-created" => Some(("autoCreated", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "chain-name" => Some(("chainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creation-size-bytes" => Some(("creationSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-size-gb" => Some(("diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -29873,6 +31550,7 @@ where "snapshot-encryption-key.raw-key" => Some(("snapshotEncryptionKey.rawKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-encryption-key.rsa-encrypted-key" => Some(("snapshotEncryptionKey.rsaEncryptedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-encryption-key.sha256" => Some(("snapshotEncryptionKey.sha256", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snapshot-type" => Some(("snapshotType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk" => Some(("sourceDisk", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.kms-key-name" => Some(("sourceDiskEncryptionKey.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.kms-key-service-account" => Some(("sourceDiskEncryptionKey.kmsKeyServiceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -29880,12 +31558,14 @@ where "source-disk-encryption-key.rsa-encrypted-key" => Some(("sourceDiskEncryptionKey.rsaEncryptedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.sha256" => Some(("sourceDiskEncryptionKey.sha256", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-id" => Some(("sourceDiskId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-snapshot-schedule-policy" => Some(("sourceSnapshotSchedulePolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-snapshot-schedule-policy-id" => Some(("sourceSnapshotSchedulePolicyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes" => Some(("storageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes-status" => Some(("storageBytesStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-locations" => Some(("storageLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-created", "chain-name", "creation-timestamp", "description", "disk-size-gb", "download-bytes", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "location-hint", "name", "raw-key", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "snapshot-encryption-key", "source-disk", "source-disk-encryption-key", "source-disk-id", "status", "storage-bytes", "storage-bytes-status", "storage-locations"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "auto-created", "chain-name", "creation-size-bytes", "creation-timestamp", "description", "disk-size-gb", "download-bytes", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "location-hint", "name", "raw-key", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "snapshot-encryption-key", "snapshot-type", "source-disk", "source-disk-encryption-key", "source-disk-id", "source-snapshot-schedule-policy", "source-snapshot-schedule-policy-id", "status", "storage-bytes", "storage-bytes-status", "storage-locations"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -30064,7 +31744,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -30136,6 +31816,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-encryption-key.kms-key-name" => Some(("diskEncryptionKey.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -30154,6 +31835,7 @@ where "location-hint" => Some(("locationHint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "options" => Some(("options", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "params.resource-manager-tags" => Some(("params.resourceManagerTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "physical-block-size-bytes" => Some(("physicalBlockSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "provisioned-iops" => Some(("provisionedIops", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -30184,7 +31866,7 @@ where "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "disk-encryption-key", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-attach-timestamp", "last-detach-timestamp", "license-codes", "licenses", "location-hint", "name", "options", "physical-block-size-bytes", "provisioned-iops", "raw-key", "region", "replica-zones", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "size-gb", "source-disk", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-storage-object", "status", "type", "users", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "creation-timestamp", "description", "disk-encryption-key", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "last-attach-timestamp", "last-detach-timestamp", "license-codes", "licenses", "location-hint", "name", "options", "params", "physical-block-size-bytes", "provisioned-iops", "raw-key", "region", "replica-zones", "resource-manager-tags", "resource-policies", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "size-gb", "source-disk", "source-disk-id", "source-image", "source-image-encryption-key", "source-image-id", "source-snapshot", "source-snapshot-encryption-key", "source-snapshot-id", "source-storage-object", "status", "type", "users", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -30258,7 +31940,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -30267,7 +31949,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -30974,7 +32656,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -30983,7 +32665,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -31388,7 +33070,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -31397,7 +33079,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -32318,6 +34000,7 @@ where "instance-group" => Some(("instanceGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-template" => Some(("instanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "list-managed-instances-results" => Some(("listManagedInstancesResults", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -32341,7 +34024,7 @@ where "update-policy.type" => Some(("updatePolicy.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "list-managed-instances-results", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -32412,7 +34095,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -32421,7 +34104,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -32480,7 +34163,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -32489,7 +34172,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -32548,7 +34231,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -32557,7 +34240,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -32616,7 +34299,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -32625,7 +34308,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -32722,6 +34405,7 @@ where "instance-group" => Some(("instanceGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-template" => Some(("instanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "list-managed-instances-results" => Some(("listManagedInstancesResults", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -32745,7 +34429,7 @@ where "update-policy.type" => Some(("updatePolicy.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["abandoning", "all-effective", "autoscaler", "base-instance-name", "calculated", "creating", "creating-without-retries", "creation-timestamp", "current-actions", "deleting", "description", "distribution-policy", "fingerprint", "fixed", "has-stateful-config", "id", "instance-group", "instance-redistribution-type", "instance-template", "is-reached", "is-stable", "kind", "list-managed-instances-results", "max-surge", "max-unavailable", "minimal-action", "most-disruptive-allowed-action", "name", "none", "per-instance-configs", "percent", "recreating", "refreshing", "region", "replacement-method", "restarting", "resuming", "self-link", "starting", "stateful", "status", "stopping", "suspending", "target-pools", "target-shape", "target-size", "type", "update-policy", "verifying", "version-target", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -33369,7 +35053,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -33378,7 +35062,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -33471,7 +35155,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -33480,7 +35164,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -33648,9 +35332,11 @@ where "instance-properties.advanced-machine-features.enable-nested-virtualization" => Some(("instanceProperties.advancedMachineFeatures.enableNestedVirtualization", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.advanced-machine-features.enable-uefi-networking" => Some(("instanceProperties.advancedMachineFeatures.enableUefiNetworking", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.advanced-machine-features.threads-per-core" => Some(("instanceProperties.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "instance-properties.advanced-machine-features.visible-core-count" => Some(("instanceProperties.advancedMachineFeatures.visibleCoreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "instance-properties.can-ip-forward" => Some(("instanceProperties.canIpForward", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.confidential-instance-config.enable-confidential-compute" => Some(("instanceProperties.confidentialInstanceConfig.enableConfidentialCompute", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.description" => Some(("instanceProperties.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "instance-properties.key-revocation-action-type" => Some(("instanceProperties.keyRevocationActionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.labels" => Some(("instanceProperties.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "instance-properties.machine-type" => Some(("instanceProperties.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.metadata.fingerprint" => Some(("instanceProperties.metadata.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -33675,11 +35361,12 @@ where "instance-properties.shielded-instance-config.enable-vtpm" => Some(("instanceProperties.shieldedInstanceConfig.enableVtpm", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "instance-properties.tags.fingerprint" => Some(("instanceProperties.tags.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance-properties.tags.items" => Some(("instanceProperties.tags.items", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "location-policy.target-shape" => Some(("locationPolicy.targetShape", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "min-count" => Some(("minCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "name-pattern" => Some(("namePattern", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-instance-template" => Some(("sourceInstanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "count", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "instance-properties", "instance-termination-action", "items", "key", "kind", "labels", "location-hint", "machine-type", "metadata", "min-count", "min-cpu-platform", "min-node-cpus", "name-pattern", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "provisioning-model", "reservation-affinity", "resource-manager-tags", "resource-policies", "scheduling", "shielded-instance-config", "source-instance-template", "tags", "threads-per-core", "total-egress-bandwidth-tier", "values"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "automatic-restart", "can-ip-forward", "confidential-instance-config", "consume-reservation-type", "count", "description", "enable-confidential-compute", "enable-integrity-monitoring", "enable-nested-virtualization", "enable-secure-boot", "enable-uefi-networking", "enable-vtpm", "fingerprint", "instance-properties", "instance-termination-action", "items", "key", "key-revocation-action-type", "kind", "labels", "location-hint", "location-policy", "machine-type", "metadata", "min-count", "min-cpu-platform", "min-node-cpus", "name-pattern", "network-performance-config", "on-host-maintenance", "preemptible", "private-ipv6-google-access", "provisioning-model", "reservation-affinity", "resource-manager-tags", "resource-policies", "scheduling", "shielded-instance-config", "source-instance-template", "tags", "target-shape", "threads-per-core", "total-egress-bandwidth-tier", "values", "visible-core-count"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -33891,6 +35578,9 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network-endpoint-type" => Some(("networkEndpointType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.consumer-psc-address" => Some(("pscData.consumerPscAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.psc-connection-id" => Some(("pscData.pscConnectionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "psc-data.psc-connection-status" => Some(("pscData.pscConnectionStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "psc-target-service" => Some(("pscTargetService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -33898,7 +35588,7 @@ where "subnetwork" => Some(("subnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "app-engine", "cloud-function", "cloud-run", "creation-timestamp", "default-port", "description", "function", "id", "kind", "name", "network", "network-endpoint-type", "psc-target-service", "region", "self-link", "service", "size", "subnetwork", "tag", "url-mask", "version", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "app-engine", "cloud-function", "cloud-run", "consumer-psc-address", "creation-timestamp", "default-port", "description", "function", "id", "kind", "name", "network", "network-endpoint-type", "psc-connection-id", "psc-connection-status", "psc-data", "psc-target-service", "region", "self-link", "service", "size", "subnetwork", "tag", "url-mask", "version", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -33969,7 +35659,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -33978,7 +35668,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -34077,7 +35767,7 @@ where call = call.request_id(value.unwrap_or("")); }, "replace-existing-association" => { - call = call.replace_existing_association(arg_from_str(value.unwrap_or("false"), err, "replace-existing-association", "boolean")); + call = call.replace_existing_association( value.map(|v| arg_from_str(v, err, "replace-existing-association", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -34158,11 +35848,12 @@ where "match.dest-ip-ranges" => Some(("match.destIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "match.src-ip-ranges" => Some(("match.srcIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rule-name" => Some(("ruleName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rule-tuple-count" => Some(("ruleTupleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "target-resources" => Some(("targetResources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "target-service-accounts" => Some(("targetServiceAccounts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-name", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -34180,10 +35871,10 @@ where call = call.request_id(value.unwrap_or("")); }, "min-priority" => { - call = call.min_priority(arg_from_str(value.unwrap_or("-0"), err, "min-priority", "integer")); + call = call.min_priority( value.map(|v| arg_from_str(v, err, "min-priority", "int32")).unwrap_or(-0)); }, "max-priority" => { - call = call.max_priority(arg_from_str(value.unwrap_or("-0"), err, "max-priority", "integer")); + call = call.max_priority( value.map(|v| arg_from_str(v, err, "max-priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -34514,7 +36205,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -34570,7 +36261,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -34727,7 +36418,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -34736,7 +36427,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -34921,11 +36612,12 @@ where "match.dest-ip-ranges" => Some(("match.destIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "match.src-ip-ranges" => Some(("match.srcIpRanges", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rule-name" => Some(("ruleName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rule-tuple-count" => Some(("ruleTupleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "target-resources" => Some(("targetResources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "target-service-accounts" => Some(("targetServiceAccounts", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["action", "description", "dest-ip-ranges", "direction", "disabled", "enable-logging", "kind", "match", "priority", "rule-name", "rule-tuple-count", "src-ip-ranges", "target-resources", "target-service-accounts"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -34943,7 +36635,7 @@ where call = call.request_id(value.unwrap_or("")); }, "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -35061,7 +36753,7 @@ where call = call.request_id(value.unwrap_or("")); }, "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -35498,7 +37190,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -35507,7 +37199,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -35662,7 +37354,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -35671,7 +37363,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -35775,6 +37467,393 @@ where } } + async fn _region_security_policies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_security_policies().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("security-policy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_security_policies_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_security_policies().get(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("security-policy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_security_policies_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "adaptive-protection-config.layer7-ddos-defense-config.enable" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.enable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "adaptive-protection-config.layer7-ddos-defense-config.rule-visibility" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "advanced-options-config.json-custom-config.content-types" => Some(("advancedOptionsConfig.jsonCustomConfig.contentTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "advanced-options-config.json-parsing" => Some(("advancedOptionsConfig.jsonParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "advanced-options-config.log-level" => Some(("advancedOptionsConfig.logLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ddos-protection-config.ddos-protection" => Some(("ddosProtectionConfig.ddosProtection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "recaptcha-options-config.redirect-site-key" => Some(("recaptchaOptionsConfig.redirectSiteKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptive-protection-config", "advanced-options-config", "content-types", "creation-timestamp", "ddos-protection", "ddos-protection-config", "description", "enable", "fingerprint", "id", "json-custom-config", "json-parsing", "kind", "layer7-ddos-defense-config", "log-level", "name", "recaptcha-options-config", "redirect-site-key", "region", "rule-visibility", "self-link", "type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SecurityPolicy = json::value::from_value(object).unwrap(); + let mut call = self.hub.region_security_policies().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_security_policies_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_security_policies().list(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_security_policies_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "adaptive-protection-config.layer7-ddos-defense-config.enable" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.enable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "adaptive-protection-config.layer7-ddos-defense-config.rule-visibility" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "advanced-options-config.json-custom-config.content-types" => Some(("advancedOptionsConfig.jsonCustomConfig.contentTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "advanced-options-config.json-parsing" => Some(("advancedOptionsConfig.jsonParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "advanced-options-config.log-level" => Some(("advancedOptionsConfig.logLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ddos-protection-config.ddos-protection" => Some(("ddosProtectionConfig.ddosProtection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "recaptcha-options-config.redirect-site-key" => Some(("recaptchaOptionsConfig.redirectSiteKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptive-protection-config", "advanced-options-config", "content-types", "creation-timestamp", "ddos-protection", "ddos-protection-config", "description", "enable", "fingerprint", "id", "json-custom-config", "json-parsing", "kind", "layer7-ddos-defense-config", "log-level", "name", "recaptcha-options-config", "redirect-site-key", "region", "rule-visibility", "self-link", "type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SecurityPolicy = json::value::from_value(object).unwrap(); + let mut call = self.hub.region_security_policies().patch(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("security-policy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _region_ssl_certificates_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.region_ssl_certificates().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("ssl-certificate").unwrap_or("")); @@ -35995,7 +38074,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -36004,7 +38083,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -36056,6 +38135,450 @@ where } } + async fn _region_ssl_policies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_ssl_policies().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("ssl-policy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_ssl_policies_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_ssl_policies().get(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("ssl-policy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_ssl_policies_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "custom-features" => Some(("customFeatures", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enabled-features" => Some(("enabledFeatures", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "min-tls-version" => Some(("minTlsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "profile" => Some(("profile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "custom-features", "description", "enabled-features", "fingerprint", "id", "kind", "min-tls-version", "name", "profile", "region", "self-link"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SslPolicy = json::value::from_value(object).unwrap(); + let mut call = self.hub.region_ssl_policies().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_ssl_policies_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_ssl_policies().list(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_ssl_policies_list_available_features(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_ssl_policies().list_available_features(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_ssl_policies_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "custom-features" => Some(("customFeatures", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enabled-features" => Some(("enabledFeatures", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "min-tls-version" => Some(("minTlsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "profile" => Some(("profile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "custom-features", "description", "enabled-features", "fingerprint", "id", "kind", "min-tls-version", "name", "profile", "region", "self-link"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SslPolicy = json::value::from_value(object).unwrap(); + let mut call = self.hub.region_ssl_policies().patch(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("ssl-policy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _region_target_http_proxies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.region_target_http_proxies().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("target-http-proxy").unwrap_or("")); @@ -36269,7 +38792,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -36278,7 +38801,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -36551,6 +39074,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "authorization-policy" => Some(("authorizationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "certificate-map" => Some(("certificateMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -36566,7 +39090,7 @@ where "ssl-policy" => Some(("sslPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url-map" => Some(("urlMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["authorization-policy", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "proxy-bind", "quic-override", "region", "self-link", "server-tls-policy", "ssl-certificates", "ssl-policy", "url-map"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorization-policy", "certificate-map", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "proxy-bind", "quic-override", "region", "self-link", "server-tls-policy", "ssl-certificates", "ssl-policy", "url-map"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -36637,7 +39161,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -36646,7 +39170,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -36698,6 +39222,110 @@ where } } + async fn _region_target_https_proxies_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "authorization-policy" => Some(("authorizationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "certificate-map" => Some(("certificateMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "proxy-bind" => Some(("proxyBind", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "quic-override" => Some(("quicOverride", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "server-tls-policy" => Some(("serverTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ssl-certificates" => Some(("sslCertificates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "ssl-policy" => Some(("sslPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "url-map" => Some(("urlMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorization-policy", "certificate-map", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "proxy-bind", "quic-override", "region", "self-link", "server-tls-policy", "ssl-certificates", "ssl-policy", "url-map"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TargetHttpsProxy = json::value::from_value(object).unwrap(); + let mut call = self.hub.region_target_https_proxies().patch(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("target-https-proxy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _region_target_https_proxies_set_ssl_certificates(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -36876,6 +39504,280 @@ where } } + async fn _region_target_tcp_proxies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_target_tcp_proxies().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("target-tcp-proxy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_target_tcp_proxies_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_target_tcp_proxies().get(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("target-tcp-proxy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_target_tcp_proxies_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "proxy-bind" => Some(("proxyBind", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "proxy-header" => Some(("proxyHeader", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service" => Some(("service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "id", "kind", "name", "proxy-bind", "proxy-header", "region", "self-link", "service"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TargetTcpProxy = json::value::from_value(object).unwrap(); + let mut call = self.hub.region_target_tcp_proxies().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _region_target_tcp_proxies_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.region_target_tcp_proxies().list(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _region_url_maps_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.region_url_maps().delete(opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("url-map").unwrap_or("")); @@ -37120,7 +40022,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -37129,7 +40031,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -37623,7 +40525,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -37632,7 +40534,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -37691,7 +40593,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -37700,10 +40602,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -37870,7 +40772,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -37948,19 +40850,22 @@ where "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-status.specific-sku-allocation.source-instance-template-id" => Some(("resourceStatus.specificSkuAllocation.sourceInstanceTemplateId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "share-settings.share-type" => Some(("shareSettings.shareType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "specific-reservation.assured-count" => Some(("specificReservation.assuredCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "specific-reservation.count" => Some(("specificReservation.count", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation.in-use-count" => Some(("specificReservation.inUseCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "specific-reservation.instance-properties.location-hint" => Some(("specificReservation.instanceProperties.locationHint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation.instance-properties.machine-type" => Some(("specificReservation.instanceProperties.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation.instance-properties.min-cpu-platform" => Some(("specificReservation.instanceProperties.minCpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "specific-reservation.source-instance-template" => Some(("specificReservation.sourceInstanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation-required" => Some(("specificReservationRequired", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["commitment", "count", "creation-timestamp", "description", "id", "in-use-count", "instance-properties", "kind", "location-hint", "machine-type", "min-cpu-platform", "name", "satisfies-pzs", "self-link", "share-settings", "share-type", "specific-reservation", "specific-reservation-required", "status", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["assured-count", "commitment", "count", "creation-timestamp", "description", "id", "in-use-count", "instance-properties", "kind", "location-hint", "machine-type", "min-cpu-platform", "name", "resource-status", "satisfies-pzs", "self-link", "share-settings", "share-type", "source-instance-template", "source-instance-template-id", "specific-reservation", "specific-reservation-required", "specific-sku-allocation", "status", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -38031,7 +40936,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -38040,7 +40945,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -38382,19 +41287,22 @@ where "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-status.specific-sku-allocation.source-instance-template-id" => Some(("resourceStatus.specificSkuAllocation.sourceInstanceTemplateId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "share-settings.share-type" => Some(("shareSettings.shareType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "specific-reservation.assured-count" => Some(("specificReservation.assuredCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "specific-reservation.count" => Some(("specificReservation.count", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation.in-use-count" => Some(("specificReservation.inUseCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "specific-reservation.instance-properties.location-hint" => Some(("specificReservation.instanceProperties.locationHint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation.instance-properties.machine-type" => Some(("specificReservation.instanceProperties.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation.instance-properties.min-cpu-platform" => Some(("specificReservation.instanceProperties.minCpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "specific-reservation.source-instance-template" => Some(("specificReservation.sourceInstanceTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "specific-reservation-required" => Some(("specificReservationRequired", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["commitment", "count", "creation-timestamp", "description", "id", "in-use-count", "instance-properties", "kind", "location-hint", "machine-type", "min-cpu-platform", "name", "satisfies-pzs", "self-link", "share-settings", "share-type", "specific-reservation", "specific-reservation-required", "status", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["assured-count", "commitment", "count", "creation-timestamp", "description", "id", "in-use-count", "instance-properties", "kind", "location-hint", "machine-type", "min-cpu-platform", "name", "resource-status", "satisfies-pzs", "self-link", "share-settings", "share-type", "source-instance-template", "source-instance-template-id", "specific-reservation", "specific-reservation-required", "specific-sku-allocation", "status", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -38409,7 +41317,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -38471,7 +41379,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -38480,10 +41388,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -38650,7 +41558,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -38824,7 +41732,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -38833,7 +41741,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -39064,7 +41972,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -39073,10 +41981,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -39243,7 +42151,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -39252,7 +42160,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -39464,7 +42372,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -39473,7 +42381,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -39970,11 +42878,12 @@ where "next-hop-peering" => Some(("nextHopPeering", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "next-hop-vpn-tunnel" => Some(("nextHopVpnTunnel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "route-status" => Some(("routeStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "route-type" => Some(("routeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tags" => Some(("tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "dest-range", "id", "kind", "name", "network", "next-hop-gateway", "next-hop-ilb", "next-hop-instance", "next-hop-ip", "next-hop-network", "next-hop-peering", "next-hop-vpn-tunnel", "priority", "route-type", "self-link", "tags"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "dest-range", "id", "kind", "name", "network", "next-hop-gateway", "next-hop-ilb", "next-hop-instance", "next-hop-ip", "next-hop-network", "next-hop-peering", "next-hop-vpn-tunnel", "priority", "route-status", "route-type", "self-link", "tags"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -40045,7 +42954,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -40054,7 +42963,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -40168,6 +43077,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -40181,6 +43093,78 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _security_policies_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.security_policies().aggregated_list(opt.value_of("project").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "include-all-scopes" => { + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "include-all-scopes", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); v } )); } } @@ -40329,7 +43313,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -40403,19 +43387,22 @@ where match &temp_cursor.to_string()[..] { "adaptive-protection-config.layer7-ddos-defense-config.enable" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.enable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "adaptive-protection-config.layer7-ddos-defense-config.rule-visibility" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "advanced-options-config.json-custom-config.content-types" => Some(("advancedOptionsConfig.jsonCustomConfig.contentTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "advanced-options-config.json-parsing" => Some(("advancedOptionsConfig.jsonParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "advanced-options-config.log-level" => Some(("advancedOptionsConfig.logLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ddos-protection-config.ddos-protection" => Some(("ddosProtectionConfig.ddosProtection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "recaptcha-options-config.redirect-site-key" => Some(("recaptchaOptionsConfig.redirectSiteKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptive-protection-config", "advanced-options-config", "creation-timestamp", "description", "enable", "fingerprint", "id", "json-parsing", "kind", "layer7-ddos-defense-config", "log-level", "name", "recaptcha-options-config", "redirect-site-key", "rule-visibility", "self-link", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptive-protection-config", "advanced-options-config", "content-types", "creation-timestamp", "ddos-protection", "ddos-protection-config", "description", "enable", "fingerprint", "id", "json-custom-config", "json-parsing", "kind", "layer7-ddos-defense-config", "log-level", "name", "recaptcha-options-config", "redirect-site-key", "region", "rule-visibility", "self-link", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -40429,6 +43416,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, @@ -40445,7 +43435,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["request-id"].iter().map(|v|*v)); + v.extend(["request-id", "validate-only"].iter().map(|v|*v)); v } )); } } @@ -40486,7 +43476,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -40495,7 +43485,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -40554,7 +43544,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -40563,7 +43553,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -40640,19 +43630,22 @@ where match &temp_cursor.to_string()[..] { "adaptive-protection-config.layer7-ddos-defense-config.enable" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.enable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "adaptive-protection-config.layer7-ddos-defense-config.rule-visibility" => Some(("adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "advanced-options-config.json-custom-config.content-types" => Some(("advancedOptionsConfig.jsonCustomConfig.contentTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "advanced-options-config.json-parsing" => Some(("advancedOptionsConfig.jsonParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "advanced-options-config.log-level" => Some(("advancedOptionsConfig.logLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ddos-protection-config.ddos-protection" => Some(("ddosProtectionConfig.ddosProtection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "recaptcha-options-config.redirect-site-key" => Some(("recaptchaOptionsConfig.redirectSiteKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptive-protection-config", "advanced-options-config", "creation-timestamp", "description", "enable", "fingerprint", "id", "json-parsing", "kind", "layer7-ddos-defense-config", "log-level", "name", "recaptcha-options-config", "redirect-site-key", "rule-visibility", "self-link", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptive-protection-config", "advanced-options-config", "content-types", "creation-timestamp", "ddos-protection", "ddos-protection-config", "description", "enable", "fingerprint", "id", "json-custom-config", "json-parsing", "kind", "layer7-ddos-defense-config", "log-level", "name", "recaptcha-options-config", "redirect-site-key", "region", "rule-visibility", "self-link", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -40778,8 +43771,11 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -40794,7 +43790,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["priority"].iter().map(|v|*v)); + v.extend(["priority", "validate-only"].iter().map(|v|*v)); v } )); } } @@ -40835,7 +43831,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "priority" => { - call = call.priority(arg_from_str(value.unwrap_or("-0"), err, "priority", "integer")); + call = call.priority( value.map(|v| arg_from_str(v, err, "priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -40884,6 +43880,92 @@ where } } + async fn _security_policies_set_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["label-fingerprint", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GlobalSetLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.security_policies().set_labels(request, opt.value_of("project").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _service_attachments_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.service_attachments().aggregated_list(opt.value_of("project").unwrap_or("")); @@ -40891,7 +43973,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -40900,10 +43982,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -41070,7 +44152,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -41231,7 +44313,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -41240,7 +44322,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -41684,7 +44766,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -41756,8 +44838,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "architecture" => Some(("architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auto-created" => Some(("autoCreated", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "chain-name" => Some(("chainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creation-size-bytes" => Some(("creationSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disk-size-gb" => Some(("diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -41777,6 +44861,7 @@ where "snapshot-encryption-key.raw-key" => Some(("snapshotEncryptionKey.rawKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-encryption-key.rsa-encrypted-key" => Some(("snapshotEncryptionKey.rsaEncryptedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snapshot-encryption-key.sha256" => Some(("snapshotEncryptionKey.sha256", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snapshot-type" => Some(("snapshotType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk" => Some(("sourceDisk", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.kms-key-name" => Some(("sourceDiskEncryptionKey.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.kms-key-service-account" => Some(("sourceDiskEncryptionKey.kmsKeyServiceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -41784,12 +44869,14 @@ where "source-disk-encryption-key.rsa-encrypted-key" => Some(("sourceDiskEncryptionKey.rsaEncryptedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-encryption-key.sha256" => Some(("sourceDiskEncryptionKey.sha256", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-disk-id" => Some(("sourceDiskId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-snapshot-schedule-policy" => Some(("sourceSnapshotSchedulePolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-snapshot-schedule-policy-id" => Some(("sourceSnapshotSchedulePolicyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes" => Some(("storageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes-status" => Some(("storageBytesStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-locations" => Some(("storageLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-created", "chain-name", "creation-timestamp", "description", "disk-size-gb", "download-bytes", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "location-hint", "name", "raw-key", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "snapshot-encryption-key", "source-disk", "source-disk-encryption-key", "source-disk-id", "status", "storage-bytes", "storage-bytes-status", "storage-locations"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["architecture", "auto-created", "chain-name", "creation-size-bytes", "creation-timestamp", "description", "disk-size-gb", "download-bytes", "id", "kind", "kms-key-name", "kms-key-service-account", "label-fingerprint", "labels", "license-codes", "licenses", "location-hint", "name", "raw-key", "rsa-encrypted-key", "satisfies-pzs", "self-link", "sha256", "snapshot-encryption-key", "snapshot-type", "source-disk", "source-disk-encryption-key", "source-disk-id", "source-snapshot-schedule-policy", "source-snapshot-schedule-policy-id", "status", "storage-bytes", "storage-bytes-status", "storage-locations"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -41860,7 +44947,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -41869,7 +44956,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -42186,7 +45273,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -42195,10 +45282,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -42470,7 +45557,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -42479,7 +45566,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -42531,6 +45618,77 @@ where } } + async fn _ssl_policies_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.ssl_policies().aggregated_list(opt.value_of("project").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "include-all-scopes" => { + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "include-all-scopes", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _ssl_policies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.ssl_policies().delete(opt.value_of("project").unwrap_or(""), opt.value_of("ssl-policy").unwrap_or("")); @@ -42672,9 +45830,10 @@ where "min-tls-version" => Some(("minTlsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "profile" => Some(("profile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "custom-features", "description", "enabled-features", "fingerprint", "id", "kind", "min-tls-version", "name", "profile", "self-link"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "custom-features", "description", "enabled-features", "fingerprint", "id", "kind", "min-tls-version", "name", "profile", "region", "self-link"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -42745,7 +45904,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -42754,7 +45913,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -42813,7 +45972,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -42822,7 +45981,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -42907,9 +46066,10 @@ where "min-tls-version" => Some(("minTlsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "profile" => Some(("profile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "custom-features", "description", "enabled-features", "fingerprint", "id", "kind", "min-tls-version", "name", "profile", "self-link"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "custom-features", "description", "enabled-features", "fingerprint", "id", "kind", "min-tls-version", "name", "profile", "region", "self-link"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -42980,7 +46140,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -42989,10 +46149,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -43248,7 +46408,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -43420,7 +46580,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -43429,7 +46589,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -43488,7 +46648,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -43497,7 +46657,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -43619,7 +46779,7 @@ where call = call.request_id(value.unwrap_or("")); }, "drain-timeout-seconds" => { - call = call.drain_timeout_seconds(arg_from_str(value.unwrap_or("-0"), err, "drain-timeout-seconds", "integer")); + call = call.drain_timeout_seconds( value.map(|v| arg_from_str(v, err, "drain-timeout-seconds", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -44142,7 +47302,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -44151,7 +47311,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -44308,7 +47468,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -44317,10 +47477,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -44585,7 +47745,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -44594,7 +47754,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -44840,7 +48000,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -44849,10 +48009,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -45036,6 +48196,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "authorization-policy" => Some(("authorizationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "certificate-map" => Some(("certificateMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -45051,7 +48212,7 @@ where "ssl-policy" => Some(("sslPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url-map" => Some(("urlMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["authorization-policy", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "proxy-bind", "quic-override", "region", "self-link", "server-tls-policy", "ssl-certificates", "ssl-policy", "url-map"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorization-policy", "certificate-map", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "proxy-bind", "quic-override", "region", "self-link", "server-tls-policy", "ssl-certificates", "ssl-policy", "url-map"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -45122,7 +48283,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -45131,7 +48292,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -45207,6 +48368,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "authorization-policy" => Some(("authorizationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "certificate-map" => Some(("certificateMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -45222,7 +48384,7 @@ where "ssl-policy" => Some(("sslPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "url-map" => Some(("urlMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["authorization-policy", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "proxy-bind", "quic-override", "region", "self-link", "server-tls-policy", "ssl-certificates", "ssl-policy", "url-map"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorization-policy", "certificate-map", "creation-timestamp", "description", "fingerprint", "id", "kind", "name", "proxy-bind", "quic-override", "region", "self-link", "server-tls-policy", "ssl-certificates", "ssl-policy", "url-map"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -45286,6 +48448,95 @@ where } } + async fn _target_https_proxies_set_certificate_map(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "certificate-map" => Some(("certificateMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["certificate-map"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TargetHttpsProxiesSetCertificateMapRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.target_https_proxies().set_certificate_map(request, opt.value_of("project").unwrap_or(""), opt.value_of("target-https-proxy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _target_https_proxies_set_quic_override(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -45649,7 +48900,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -45658,10 +48909,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -45926,7 +49177,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -45935,7 +49186,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -46170,7 +49421,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -46179,10 +49430,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -46534,7 +49785,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -46543,7 +49794,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -46814,7 +50065,7 @@ where call = call.request_id(value.unwrap_or("")); }, "failover-ratio" => { - call = call.failover_ratio(arg_from_str(value.unwrap_or("0.0"), err, "failover-ratio", "number")); + call = call.failover_ratio( value.map(|v| arg_from_str(v, err, "failover-ratio", "float")).unwrap_or(0.0)); }, _ => { let mut found = false; @@ -46994,6 +50245,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "certificate-map" => Some(("certificateMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creation-timestamp" => Some(("creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -47005,7 +50257,7 @@ where "ssl-certificates" => Some(("sslCertificates", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "ssl-policy" => Some(("sslPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "id", "kind", "name", "proxy-header", "self-link", "service", "ssl-certificates", "ssl-policy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["certificate-map", "creation-timestamp", "description", "id", "kind", "name", "proxy-header", "self-link", "service", "ssl-certificates", "ssl-policy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -47076,7 +50328,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -47085,7 +50337,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -47226,6 +50478,95 @@ where } } + async fn _target_ssl_proxies_set_certificate_map(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "certificate-map" => Some(("certificateMap", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["certificate-map"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TargetSslProxiesSetCertificateMapRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.target_ssl_proxies().set_certificate_map(request, opt.value_of("project").unwrap_or(""), opt.value_of("target-ssl-proxy").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _target_ssl_proxies_set_proxy_header(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -47493,6 +50834,77 @@ where } } + async fn _target_tcp_proxies_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.target_tcp_proxies().aggregated_list(opt.value_of("project").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-partial-success" => { + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); + }, + "include-all-scopes" => { + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "include-all-scopes", "max-results", "order-by", "page-token", "return-partial-success"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _target_tcp_proxies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.target_tcp_proxies().delete(opt.value_of("project").unwrap_or(""), opt.value_of("target-tcp-proxy").unwrap_or("")); @@ -47631,10 +51043,11 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proxy-bind" => Some(("proxyBind", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "proxy-header" => Some(("proxyHeader", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service" => Some(("service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "id", "kind", "name", "proxy-bind", "proxy-header", "self-link", "service"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-timestamp", "description", "id", "kind", "name", "proxy-bind", "proxy-header", "region", "self-link", "service"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -47705,7 +51118,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -47714,7 +51127,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -47951,7 +51364,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -47960,10 +51373,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -48229,7 +51642,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -48238,7 +51651,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -48290,6 +51703,96 @@ where } } + async fn _target_vpn_gateways_set_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["label-fingerprint", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RegionSetLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.target_vpn_gateways().set_labels(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _url_maps_aggregated_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.url_maps().aggregated_list(opt.value_of("project").unwrap_or("")); @@ -48297,7 +51800,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -48306,10 +51809,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -48695,7 +52198,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -48704,7 +52207,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -49147,7 +52650,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -49156,10 +52659,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -49477,7 +52980,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -49486,7 +52989,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -49720,7 +53223,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -49729,10 +53232,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-all-scopes" => { - call = call.include_all_scopes(arg_from_str(value.unwrap_or("false"), err, "include-all-scopes", "boolean")); + call = call.include_all_scopes( value.map(|v| arg_from_str(v, err, "include-all-scopes", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -50009,7 +53512,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -50018,7 +53521,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -50070,6 +53573,96 @@ where } } + async fn _vpn_tunnels_set_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "label-fingerprint" => Some(("labelFingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["label-fingerprint", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RegionSetLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.vpn_tunnels().set_labels(request, opt.value_of("project").unwrap_or(""), opt.value_of("region").unwrap_or(""), opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _zone_operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.zone_operations().delete(opt.value_of("project").unwrap_or(""), opt.value_of("zone").unwrap_or(""), opt.value_of("operation").unwrap_or("")); @@ -50173,7 +53766,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -50182,7 +53775,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -50345,7 +53938,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-partial-success" => { - call = call.return_partial_success(arg_from_str(value.unwrap_or("false"), err, "return-partial-success", "boolean")); + call = call.return_partial_success( value.map(|v| arg_from_str(v, err, "return-partial-success", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -50354,7 +53947,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -50445,6 +54038,9 @@ where ("list", Some(opt)) => { call_result = self._addresses_list(opt, dry_run, &mut err).await; }, + ("set-labels", Some(opt)) => { + call_result = self._addresses_set_labels(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("addresses".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -50535,6 +54131,9 @@ where ("get-health", Some(opt)) => { call_result = self._backend_services_get_health(opt, dry_run, &mut err).await; }, + ("get-iam-policy", Some(opt)) => { + call_result = self._backend_services_get_iam_policy(opt, dry_run, &mut err).await; + }, ("insert", Some(opt)) => { call_result = self._backend_services_insert(opt, dry_run, &mut err).await; }, @@ -50547,6 +54146,9 @@ where ("set-edge-security-policy", Some(opt)) => { call_result = self._backend_services_set_edge_security_policy(opt, dry_run, &mut err).await; }, + ("set-iam-policy", Some(opt)) => { + call_result = self._backend_services_set_iam_policy(opt, dry_run, &mut err).await; + }, ("set-security-policy", Some(opt)) => { call_result = self._backend_services_set_security_policy(opt, dry_run, &mut err).await; }, @@ -50783,6 +54385,9 @@ where ("list", Some(opt)) => { call_result = self._global_addresses_list(opt, dry_run, &mut err).await; }, + ("set-labels", Some(opt)) => { + call_result = self._global_addresses_set_labels(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("global-addresses".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -51338,6 +54943,9 @@ where ("patch", Some(opt)) => { call_result = self._interconnect_attachments_patch(opt, dry_run, &mut err).await; }, + ("set-labels", Some(opt)) => { + call_result = self._interconnect_attachments_set_labels(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("interconnect-attachments".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -51378,6 +54986,9 @@ where ("patch", Some(opt)) => { call_result = self._interconnects_patch(opt, dry_run, &mut err).await; }, + ("set-labels", Some(opt)) => { + call_result = self._interconnects_set_labels(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("interconnects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -51473,6 +55084,61 @@ where } } }, + ("network-attachments", Some(opt)) => { + match opt.subcommand() { + ("aggregated-list", Some(opt)) => { + call_result = self._network_attachments_aggregated_list(opt, dry_run, &mut err).await; + }, + ("delete", Some(opt)) => { + call_result = self._network_attachments_delete(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._network_attachments_get(opt, dry_run, &mut err).await; + }, + ("get-iam-policy", Some(opt)) => { + call_result = self._network_attachments_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("insert", Some(opt)) => { + call_result = self._network_attachments_insert(opt, dry_run, &mut err).await; + }, + ("list", Some(opt)) => { + call_result = self._network_attachments_list(opt, dry_run, &mut err).await; + }, + ("set-iam-policy", Some(opt)) => { + call_result = self._network_attachments_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("test-iam-permissions", Some(opt)) => { + call_result = self._network_attachments_test_iam_permissions(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("network-attachments".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, + ("network-edge-security-services", Some(opt)) => { + match opt.subcommand() { + ("aggregated-list", Some(opt)) => { + call_result = self._network_edge_security_services_aggregated_list(opt, dry_run, &mut err).await; + }, + ("delete", Some(opt)) => { + call_result = self._network_edge_security_services_delete(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._network_edge_security_services_get(opt, dry_run, &mut err).await; + }, + ("insert", Some(opt)) => { + call_result = self._network_edge_security_services_insert(opt, dry_run, &mut err).await; + }, + ("patch", Some(opt)) => { + call_result = self._network_edge_security_services_patch(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("network-edge-security-services".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("network-endpoint-groups", Some(opt)) => { match opt.subcommand() { ("aggregated-list", Some(opt)) => { @@ -51863,6 +55529,9 @@ where ("get-health", Some(opt)) => { call_result = self._region_backend_services_get_health(opt, dry_run, &mut err).await; }, + ("get-iam-policy", Some(opt)) => { + call_result = self._region_backend_services_get_iam_policy(opt, dry_run, &mut err).await; + }, ("insert", Some(opt)) => { call_result = self._region_backend_services_insert(opt, dry_run, &mut err).await; }, @@ -51872,6 +55541,9 @@ where ("patch", Some(opt)) => { call_result = self._region_backend_services_patch(opt, dry_run, &mut err).await; }, + ("set-iam-policy", Some(opt)) => { + call_result = self._region_backend_services_set_iam_policy(opt, dry_run, &mut err).await; + }, ("update", Some(opt)) => { call_result = self._region_backend_services_update(opt, dry_run, &mut err).await; }, @@ -52226,6 +55898,29 @@ where } } }, + ("region-security-policies", Some(opt)) => { + match opt.subcommand() { + ("delete", Some(opt)) => { + call_result = self._region_security_policies_delete(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._region_security_policies_get(opt, dry_run, &mut err).await; + }, + ("insert", Some(opt)) => { + call_result = self._region_security_policies_insert(opt, dry_run, &mut err).await; + }, + ("list", Some(opt)) => { + call_result = self._region_security_policies_list(opt, dry_run, &mut err).await; + }, + ("patch", Some(opt)) => { + call_result = self._region_security_policies_patch(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("region-security-policies".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("region-ssl-certificates", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { @@ -52246,6 +55941,32 @@ where } } }, + ("region-ssl-policies", Some(opt)) => { + match opt.subcommand() { + ("delete", Some(opt)) => { + call_result = self._region_ssl_policies_delete(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._region_ssl_policies_get(opt, dry_run, &mut err).await; + }, + ("insert", Some(opt)) => { + call_result = self._region_ssl_policies_insert(opt, dry_run, &mut err).await; + }, + ("list", Some(opt)) => { + call_result = self._region_ssl_policies_list(opt, dry_run, &mut err).await; + }, + ("list-available-features", Some(opt)) => { + call_result = self._region_ssl_policies_list_available_features(opt, dry_run, &mut err).await; + }, + ("patch", Some(opt)) => { + call_result = self._region_ssl_policies_patch(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("region-ssl-policies".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("region-target-http-proxies", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { @@ -52283,6 +56004,9 @@ where ("list", Some(opt)) => { call_result = self._region_target_https_proxies_list(opt, dry_run, &mut err).await; }, + ("patch", Some(opt)) => { + call_result = self._region_target_https_proxies_patch(opt, dry_run, &mut err).await; + }, ("set-ssl-certificates", Some(opt)) => { call_result = self._region_target_https_proxies_set_ssl_certificates(opt, dry_run, &mut err).await; }, @@ -52295,6 +56019,26 @@ where } } }, + ("region-target-tcp-proxies", Some(opt)) => { + match opt.subcommand() { + ("delete", Some(opt)) => { + call_result = self._region_target_tcp_proxies_delete(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._region_target_tcp_proxies_get(opt, dry_run, &mut err).await; + }, + ("insert", Some(opt)) => { + call_result = self._region_target_tcp_proxies_insert(opt, dry_run, &mut err).await; + }, + ("list", Some(opt)) => { + call_result = self._region_target_tcp_proxies_list(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("region-target-tcp-proxies".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("region-url-maps", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { @@ -52471,6 +56215,9 @@ where ("add-rule", Some(opt)) => { call_result = self._security_policies_add_rule(opt, dry_run, &mut err).await; }, + ("aggregated-list", Some(opt)) => { + call_result = self._security_policies_aggregated_list(opt, dry_run, &mut err).await; + }, ("delete", Some(opt)) => { call_result = self._security_policies_delete(opt, dry_run, &mut err).await; }, @@ -52498,6 +56245,9 @@ where ("remove-rule", Some(opt)) => { call_result = self._security_policies_remove_rule(opt, dry_run, &mut err).await; }, + ("set-labels", Some(opt)) => { + call_result = self._security_policies_set_labels(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("security-policies".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -52596,6 +56346,9 @@ where }, ("ssl-policies", Some(opt)) => { match opt.subcommand() { + ("aggregated-list", Some(opt)) => { + call_result = self._ssl_policies_aggregated_list(opt, dry_run, &mut err).await; + }, ("delete", Some(opt)) => { call_result = self._ssl_policies_delete(opt, dry_run, &mut err).await; }, @@ -52736,6 +56489,9 @@ where ("patch", Some(opt)) => { call_result = self._target_https_proxies_patch(opt, dry_run, &mut err).await; }, + ("set-certificate-map", Some(opt)) => { + call_result = self._target_https_proxies_set_certificate_map(opt, dry_run, &mut err).await; + }, ("set-quic-override", Some(opt)) => { call_result = self._target_https_proxies_set_quic_override(opt, dry_run, &mut err).await; }, @@ -52835,6 +56591,9 @@ where ("set-backend-service", Some(opt)) => { call_result = self._target_ssl_proxies_set_backend_service(opt, dry_run, &mut err).await; }, + ("set-certificate-map", Some(opt)) => { + call_result = self._target_ssl_proxies_set_certificate_map(opt, dry_run, &mut err).await; + }, ("set-proxy-header", Some(opt)) => { call_result = self._target_ssl_proxies_set_proxy_header(opt, dry_run, &mut err).await; }, @@ -52852,6 +56611,9 @@ where }, ("target-tcp-proxies", Some(opt)) => { match opt.subcommand() { + ("aggregated-list", Some(opt)) => { + call_result = self._target_tcp_proxies_aggregated_list(opt, dry_run, &mut err).await; + }, ("delete", Some(opt)) => { call_result = self._target_tcp_proxies_delete(opt, dry_run, &mut err).await; }, @@ -52893,6 +56655,9 @@ where ("list", Some(opt)) => { call_result = self._target_vpn_gateways_list(opt, dry_run, &mut err).await; }, + ("set-labels", Some(opt)) => { + call_result = self._target_vpn_gateways_set_labels(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("target-vpn-gateways".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -52983,6 +56748,9 @@ where ("list", Some(opt)) => { call_result = self._vpn_tunnels_list(opt, dry_run, &mut err).await; }, + ("set-labels", Some(opt)) => { + call_result = self._vpn_tunnels_set_labels(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("vpn-tunnels".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -53184,7 +56952,7 @@ async fn main() { ]), ]), - ("addresses", "methods: 'aggregated-list', 'delete', 'get', 'insert' and 'list'", vec![ + ("addresses", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list' and 'set-labels'", vec![ ("aggregated-list", Some(r##"Retrieves an aggregated list of addresses."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/addresses_aggregated-list", @@ -53331,6 +57099,46 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-labels", + Some(r##"Sets the labels on an Address. To learn more about labels, read the Labeling Resources documentation."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/addresses_set-labels", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -53841,7 +57649,7 @@ async fn main() { ]), ]), - ("backend-services", "methods: 'add-signed-url-key', 'aggregated-list', 'delete', 'delete-signed-url-key', 'get', 'get-health', 'insert', 'list', 'patch', 'set-edge-security-policy', 'set-security-policy' and 'update'", vec![ + ("backend-services", "methods: 'add-signed-url-key', 'aggregated-list', 'delete', 'delete-signed-url-key', 'get', 'get-health', 'get-iam-policy', 'insert', 'list', 'patch', 'set-edge-security-policy', 'set-iam-policy', 'set-security-policy' and 'update'", vec![ ("add-signed-url-key", Some(r##"Adds a key for validating requests with signed URLs for this backend service."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/backend-services_add-signed-url-key", @@ -54016,6 +57824,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-iam-policy", + Some(r##"Gets the access control policy for a resource. May be empty if no such policy or resource exists."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/backend-services_get-iam-policy", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -54134,6 +57970,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/backend-services_set-iam-policy", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -55818,7 +59688,7 @@ async fn main() { ]), ]), - ("global-addresses", "methods: 'delete', 'get', 'insert' and 'list'", vec![ + ("global-addresses", "methods: 'delete', 'get', 'insert', 'list' and 'set-labels'", vec![ ("delete", Some(r##"Deletes the specified address resource."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/global-addresses_delete", @@ -55919,6 +59789,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-labels", + Some(r##"Sets the labels on a GlobalAddress. To learn more about labels, read the Labeling Resources documentation."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/global-addresses_set-labels", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -57697,7 +61601,7 @@ async fn main() { Some(false)), ]), ("create-instances", - Some(r##"Creates instances with per-instance configs in this managed instance group. Instances are created using the current instance template. The create instances operation is marked DONE if the createInstances request is successful. The underlying actions take additional time. You must separately verify the status of the creating or actions with the listmanagedinstances method."##), + Some(r##"Creates instances with per-instance configurations in this managed instance group. Instances are created using the current instance template. The create instances operation is marked DONE if the createInstances request is successful. The underlying actions take additional time. You must separately verify the status of the creating or actions with the listmanagedinstances method."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instance-group-managers_create-instances", vec![ (Some(r##"project"##), @@ -57811,7 +61715,7 @@ async fn main() { Some(false)), ]), ("delete-per-instance-configs", - Some(r##"Deletes selected per-instance configs for the managed instance group."##), + Some(r##"Deletes selected per-instance configurations for the managed instance group."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instance-group-managers_delete-per-instance-configs", vec![ (Some(r##"project"##), @@ -57981,7 +61885,7 @@ async fn main() { Some(false)), ]), ("list-managed-instances", - Some(r##"Lists all of the instances in the managed instance group. Each instance in the list has a currentAction, which indicates the action that the managed instance group is performing on the instance. For example, if the group is still creating an instance, the currentAction is CREATING. If a previous action failed, the list displays the errors for that failed action. The orderBy query parameter is not supported."##), + Some(r##"Lists all of the instances in the managed instance group. Each instance in the list has a currentAction, which indicates the action that the managed instance group is performing on the instance. For example, if the group is still creating an instance, the currentAction is CREATING. If a previous action failed, the list displays the errors for that failed action. The orderBy query parameter is not supported. The `pageToken` query parameter is supported only in the alpha and beta API and only if the group's `listManagedInstancesResults` field is set to `PAGINATED`."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instance-group-managers_list-managed-instances", vec![ (Some(r##"project"##), @@ -58015,7 +61919,7 @@ async fn main() { Some(false)), ]), ("list-per-instance-configs", - Some(r##"Lists all of the per-instance configs defined for the managed instance group. The orderBy query parameter is not supported."##), + Some(r##"Lists all of the per-instance configurations defined for the managed instance group. The orderBy query parameter is not supported."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instance-group-managers_list-per-instance-configs", vec![ (Some(r##"project"##), @@ -58089,7 +61993,7 @@ async fn main() { Some(false)), ]), ("patch-per-instance-configs", - Some(r##"Inserts or patches per-instance configs for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), + Some(r##"Inserts or patches per-instance configurations for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instance-group-managers_patch-per-instance-configs", vec![ (Some(r##"project"##), @@ -58289,7 +62193,7 @@ async fn main() { Some(false)), ]), ("update-per-instance-configs", - Some(r##"Inserts or updates per-instance configs for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), + Some(r##"Inserts or updates per-instance configurations for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instance-group-managers_update-per-instance-configs", vec![ (Some(r##"project"##), @@ -58524,7 +62428,7 @@ async fn main() { Some(false)), ]), ("list-instances", - Some(r##"Lists the instances in the specified instance group. The orderBy query parameter is not supported."##), + Some(r##"Lists the instances in the specified instance group. The orderBy query parameter is not supported. The filter query parameter is supported, but only for expressions that use `eq` (equal) or `ne` (not equal) operators."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instance-groups_list-instances", vec![ (Some(r##"project"##), @@ -59000,7 +62904,7 @@ async fn main() { Some(false)), ]), ("bulk-insert", - Some(r##"Creates multiple instances. Count specifies the number of instances to create."##), + Some(r##"Creates multiple instances. Count specifies the number of instances to create. For more information, see About bulk creation of VMs."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instances_bulk-insert", vec![ (Some(r##"project"##), @@ -59534,7 +63438,7 @@ async fn main() { Some(false)), ]), ("reset", - Some(r##"Performs a reset on the instance. This is a hard reset the VM does not do a graceful shutdown. For more information, see Resetting an instance."##), + Some(r##"Performs a reset on the instance. This is a hard reset. The VM does not do a graceful shutdown. For more information, see Resetting an instance."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instances_reset", vec![ (Some(r##"project"##), @@ -59950,7 +63854,7 @@ async fn main() { Some(false)), ]), ("set-scheduling", - Some(r##"Sets an instance's scheduling options. You can only call this method on a stopped instance, that is, a VM instance that is in a `TERMINATED` state. See Instance Life Cycle for more information on the possible instance states. For more information about setting scheduling options for a VM, see Set VM availability policies."##), + Some(r##"Sets an instance's scheduling options. You can only call this method on a stopped instance, that is, a VM instance that is in a `TERMINATED` state. See Instance Life Cycle for more information on the possible instance states. For more information about setting scheduling options for a VM, see Set VM host maintenance policy."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/instances_set-scheduling", vec![ (Some(r##"project"##), @@ -60539,7 +64443,7 @@ async fn main() { ]), ]), - ("interconnect-attachments", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list' and 'patch'", vec![ + ("interconnect-attachments", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list', 'patch' and 'set-labels'", vec![ ("aggregated-list", Some(r##"Retrieves an aggregated list of interconnect attachments."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnect-attachments_aggregated-list", @@ -60726,6 +64630,46 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-labels", + Some(r##"Sets the labels on an InterconnectAttachment. To learn more about labels, read the Labeling Resources documentation."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnect-attachments_set-labels", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -60787,9 +64731,9 @@ async fn main() { ]), ]), - ("interconnects", "methods: 'delete', 'get', 'get-diagnostics', 'insert', 'list' and 'patch'", vec![ + ("interconnects", "methods: 'delete', 'get', 'get-diagnostics', 'insert', 'list', 'patch' and 'set-labels'", vec![ ("delete", - Some(r##"Deletes the specified interconnect."##), + Some(r##"Deletes the specified Interconnect."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnects_delete", vec![ (Some(r##"project"##), @@ -60817,7 +64761,7 @@ async fn main() { Some(false)), ]), ("get", - Some(r##"Returns the specified interconnect. Get a list of available interconnects by making a list() request."##), + Some(r##"Returns the specified Interconnect. Get a list of available Interconnects by making a list() request."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnects_get", vec![ (Some(r##"project"##), @@ -60845,7 +64789,7 @@ async fn main() { Some(false)), ]), ("get-diagnostics", - Some(r##"Returns the interconnectDiagnostics for the specified interconnect."##), + Some(r##"Returns the interconnectDiagnostics for the specified Interconnect."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnects_get-diagnostics", vec![ (Some(r##"project"##), @@ -60873,7 +64817,7 @@ async fn main() { Some(false)), ]), ("insert", - Some(r##"Creates a Interconnect in the specified project using the data included in the request."##), + Some(r##"Creates an Interconnect in the specified project using the data included in the request."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnects_insert", vec![ (Some(r##"project"##), @@ -60901,7 +64845,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"Retrieves the list of interconnect available to the specified project."##), + Some(r##"Retrieves the list of Interconnects available to the specified project."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnects_list", vec![ (Some(r##"project"##), @@ -60923,7 +64867,7 @@ async fn main() { Some(false)), ]), ("patch", - Some(r##"Updates the specified interconnect with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules."##), + Some(r##"Updates the specified Interconnect with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnects_patch", vec![ (Some(r##"project"##), @@ -60950,6 +64894,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-labels", + Some(r##"Sets the labels on an Interconnect. To learn more about labels, read the Labeling Resources documentation."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/interconnects_set-labels", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -61520,6 +65498,442 @@ async fn main() { ]), ]), + ("network-attachments", "methods: 'aggregated-list', 'delete', 'get', 'get-iam-policy', 'insert', 'list', 'set-iam-policy' and 'test-iam-permissions'", vec![ + ("aggregated-list", + Some(r##"Retrieves the list of all NetworkAttachment resources, regional and global, available to the specified project."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_aggregated-list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("delete", + Some(r##"Deletes the specified NetworkAttachment in the given scope"##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_delete", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region of this request."##), + Some(true), + Some(false)), + + (Some(r##"network-attachment"##), + None, + Some(r##"Name of the NetworkAttachment resource to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Returns the specified NetworkAttachment resource in the given scope."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_get", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region of this request."##), + Some(true), + Some(false)), + + (Some(r##"network-attachment"##), + None, + Some(r##"Name of the NetworkAttachment resource to return."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-iam-policy", + Some(r##"Gets the access control policy for a resource. May be empty if no such policy or resource exists."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_get-iam-policy", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The name of the region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("insert", + Some(r##"Creates a NetworkAttachment in the specified project in the given scope using the parameters that are included in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_insert", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region of this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list", + Some(r##"Lists the NetworkAttachments for a project in the given scope."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region of this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_set-iam-policy", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The name of the region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-attachments_test-iam-permissions", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The name of the region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + + ("network-edge-security-services", "methods: 'aggregated-list', 'delete', 'get', 'insert' and 'patch'", vec![ + ("aggregated-list", + Some(r##"Retrieves the list of all NetworkEdgeSecurityService resources available to the specified project."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-edge-security-services_aggregated-list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Name of the project scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("delete", + Some(r##"Deletes the specified service."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-edge-security-services_delete", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"network-edge-security-service"##), + None, + Some(r##"Name of the network edge security service to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Gets a specified NetworkEdgeSecurityService."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-edge-security-services_get", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"network-edge-security-service"##), + None, + Some(r##"Name of the network edge security service to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("insert", + Some(r##"Creates a new service in the specified project using the data included in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-edge-security-services_insert", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Patches the specified policy with the data included in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/network-edge-security-services_patch", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"network-edge-security-service"##), + None, + Some(r##"Name of the network edge security service to update."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("network-endpoint-groups", "methods: 'aggregated-list', 'attach-network-endpoints', 'delete', 'detach-network-endpoints', 'get', 'insert', 'list', 'list-network-endpoints' and 'test-iam-permissions'", vec![ ("aggregated-list", Some(r##"Retrieves the list of network endpoint groups and sorts them by zone."##), @@ -64581,7 +68995,7 @@ async fn main() { ]), ]), - ("region-backend-services", "methods: 'delete', 'get', 'get-health', 'insert', 'list', 'patch' and 'update'", vec![ + ("region-backend-services", "methods: 'delete', 'get', 'get-health', 'get-iam-policy', 'insert', 'list', 'patch', 'set-iam-policy' and 'update'", vec![ ("delete", Some(r##"Deletes the specified regional BackendService resource."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-backend-services_delete", @@ -64684,6 +69098,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-iam-policy", + Some(r##"Gets the access control policy for a resource. May be empty if no such policy or resource exists."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-backend-services_get-iam-policy", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The name of the region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -64786,6 +69234,46 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-backend-services_set-iam-policy", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The name of the region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -65975,7 +70463,7 @@ async fn main() { Some(false)), ]), ("create-instances", - Some(r##"Creates instances with per-instance configs in this regional managed instance group. Instances are created using the current instance template. The create instances operation is marked DONE if the createInstances request is successful. The underlying actions take additional time. You must separately verify the status of the creating or actions with the listmanagedinstances method."##), + Some(r##"Creates instances with per-instance configurations in this regional managed instance group. Instances are created using the current instance template. The create instances operation is marked DONE if the createInstances request is successful. The underlying actions take additional time. You must separately verify the status of the creating or actions with the listmanagedinstances method."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-instance-group-managers_create-instances", vec![ (Some(r##"project"##), @@ -66089,7 +70577,7 @@ async fn main() { Some(false)), ]), ("delete-per-instance-configs", - Some(r##"Deletes selected per-instance configs for the managed instance group."##), + Some(r##"Deletes selected per-instance configurations for the managed instance group."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-instance-group-managers_delete-per-instance-configs", vec![ (Some(r##"project"##), @@ -66259,7 +70747,7 @@ async fn main() { Some(false)), ]), ("list-managed-instances", - Some(r##"Lists the instances in the managed instance group and instances that are scheduled to be created. The list includes any current actions that the group has scheduled for its instances. The orderBy query parameter is not supported."##), + Some(r##"Lists the instances in the managed instance group and instances that are scheduled to be created. The list includes any current actions that the group has scheduled for its instances. The orderBy query parameter is not supported. The `pageToken` query parameter is supported only in the alpha and beta API and only if the group's `listManagedInstancesResults` field is set to `PAGINATED`."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-instance-group-managers_list-managed-instances", vec![ (Some(r##"project"##), @@ -66293,7 +70781,7 @@ async fn main() { Some(false)), ]), ("list-per-instance-configs", - Some(r##"Lists all of the per-instance configs defined for the managed instance group. The orderBy query parameter is not supported."##), + Some(r##"Lists all of the per-instance configurations defined for the managed instance group. The orderBy query parameter is not supported."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-instance-group-managers_list-per-instance-configs", vec![ (Some(r##"project"##), @@ -66367,7 +70855,7 @@ async fn main() { Some(false)), ]), ("patch-per-instance-configs", - Some(r##"Inserts or patches per-instance configs for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), + Some(r##"Inserts or patches per-instance configurations for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-instance-group-managers_patch-per-instance-configs", vec![ (Some(r##"project"##), @@ -66567,7 +71055,7 @@ async fn main() { Some(false)), ]), ("update-per-instance-configs", - Some(r##"Inserts or updates per-instance configs for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), + Some(r##"Inserts or updates per-instance configurations for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-instance-group-managers_update-per-instance-configs", vec![ (Some(r##"project"##), @@ -67794,6 +72282,179 @@ async fn main() { ]), ]), + ("region-security-policies", "methods: 'delete', 'get', 'insert', 'list' and 'patch'", vec![ + ("delete", + Some(r##"Deletes the specified policy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-security-policies_delete", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"security-policy"##), + None, + Some(r##"Name of the security policy to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"List all of the ordered rules present in a single specified policy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-security-policies_get", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"security-policy"##), + None, + Some(r##"Name of the security policy to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("insert", + Some(r##"Creates a new policy in the specified project using the data included in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-security-policies_insert", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list", + Some(r##"List all the policies that have been configured for the specified project and region."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-security-policies_list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Patches the specified policy with the data included in the request. To clear fields in the rule, leave the fields empty and specify them in the updateMask. This cannot be used to be update the rules in the policy. Please use the per rule methods like addRule, patchRule, and removeRule instead."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-security-policies_patch", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"security-policy"##), + None, + Some(r##"Name of the security policy to update."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("region-ssl-certificates", "methods: 'delete', 'get', 'insert' and 'list'", vec![ ("delete", Some(r##"Deletes the specified SslCertificate resource in the region."##), @@ -67927,6 +72588,207 @@ async fn main() { ]), ]), + ("region-ssl-policies", "methods: 'delete', 'get', 'insert', 'list', 'list-available-features' and 'patch'", vec![ + ("delete", + Some(r##"Deletes the specified SSL policy. The SSL policy resource can be deleted only if it is not in use by any TargetHttpsProxy or TargetSslProxy resources."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-ssl-policies_delete", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"ssl-policy"##), + None, + Some(r##"Name of the SSL policy to delete. The name must be 1-63 characters long, and comply with RFC1035."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Lists all of the ordered rules present in a single specified policy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-ssl-policies_get", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"ssl-policy"##), + None, + Some(r##"Name of the SSL policy to update. The name must be 1-63 characters long, and comply with RFC1035."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("insert", + Some(r##"Creates a new policy in the specified project and region using the data included in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-ssl-policies_insert", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list", + Some(r##"Lists all the SSL policies that have been configured for the specified project and region."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-ssl-policies_list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list-available-features", + Some(r##"Lists all features that can be specified in the SSL policy when using custom profile."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-ssl-policies_list-available-features", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Patches the specified SSL policy with the data included in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-ssl-policies_patch", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"ssl-policy"##), + None, + Some(r##"Name of the SSL policy to update. The name must be 1-63 characters long, and comply with RFC1035."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("region-target-http-proxies", "methods: 'delete', 'get', 'insert', 'list' and 'set-url-map'", vec![ ("delete", Some(r##"Deletes the specified TargetHttpProxy resource."##), @@ -68100,7 +72962,7 @@ async fn main() { ]), ]), - ("region-target-https-proxies", "methods: 'delete', 'get', 'insert', 'list', 'set-ssl-certificates' and 'set-url-map'", vec![ + ("region-target-https-proxies", "methods: 'delete', 'get', 'insert', 'list', 'patch', 'set-ssl-certificates' and 'set-url-map'", vec![ ("delete", Some(r##"Deletes the specified TargetHttpsProxy resource."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-target-https-proxies_delete", @@ -68225,6 +73087,46 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Patches the specified regional TargetHttpsProxy resource with the data included in the request. This method supports PATCH semantics and uses JSON merge patch format and processing rules."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-target-https-proxies_patch", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region for this request."##), + Some(true), + Some(false)), + + (Some(r##"target-https-proxy"##), + None, + Some(r##"Name of the TargetHttpsProxy resource to patch."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -68313,6 +73215,139 @@ async fn main() { ]), ]), + ("region-target-tcp-proxies", "methods: 'delete', 'get', 'insert' and 'list'", vec![ + ("delete", + Some(r##"Deletes the specified TargetTcpProxy resource."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-target-tcp-proxies_delete", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"target-tcp-proxy"##), + None, + Some(r##"Name of the TargetTcpProxy resource to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Returns the specified TargetTcpProxy resource."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-target-tcp-proxies_get", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"target-tcp-proxy"##), + None, + Some(r##"Name of the TargetTcpProxy resource to return."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("insert", + Some(r##"Creates a TargetTcpProxy resource in the specified project and region using the data included in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-target-tcp-proxies_insert", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list", + Some(r##"Retrieves a list of TargetTcpProxy resources available to the specified project in a given region."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/region-target-tcp-proxies_list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"Name of the region scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("region-url-maps", "methods: 'delete', 'get', 'insert', 'list', 'patch', 'update' and 'validate'", vec![ ("delete", Some(r##"Deletes the specified UrlMap resource."##), @@ -69689,7 +74724,7 @@ async fn main() { ]), ]), - ("security-policies", "methods: 'add-rule', 'delete', 'get', 'get-rule', 'insert', 'list', 'list-preconfigured-expression-sets', 'patch', 'patch-rule' and 'remove-rule'", vec![ + ("security-policies", "methods: 'add-rule', 'aggregated-list', 'delete', 'get', 'get-rule', 'insert', 'list', 'list-preconfigured-expression-sets', 'patch', 'patch-rule', 'remove-rule' and 'set-labels'", vec![ ("add-rule", Some(r##"Inserts a rule into a security policy."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/security-policies_add-rule", @@ -69718,6 +74753,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("aggregated-list", + Some(r##"Retrieves the list of all SecurityPolicy resources, regional and global, available to the specified project."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/security-policies_aggregated-list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Name of the project scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -69881,7 +74938,7 @@ async fn main() { Some(false)), ]), ("patch", - Some(r##"Patches the specified policy with the data included in the request. This cannot be used to be update the rules in the policy. Please use the per rule methods like addRule, patchRule, and removeRule instead."##), + Some(r##"Patches the specified policy with the data included in the request. To clear fields in the rule, leave the fields empty and specify them in the updateMask. This cannot be used to be update the rules in the policy. Please use the per rule methods like addRule, patchRule, and removeRule instead."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/security-policies_patch", vec![ (Some(r##"project"##), @@ -69970,6 +75027,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-labels", + Some(r##"Sets the labels on a security policy. To learn more about labels, read the Labeling Resources documentation."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/security-policies_set-labels", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -70657,7 +75748,29 @@ async fn main() { ]), ]), - ("ssl-policies", "methods: 'delete', 'get', 'insert', 'list', 'list-available-features' and 'patch'", vec![ + ("ssl-policies", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list', 'list-available-features' and 'patch'", vec![ + ("aggregated-list", + Some(r##"Retrieves the list of all SslPolicy resources, regional and global, available to the specified project."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/ssl-policies_aggregated-list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Name of the project scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("delete", Some(r##"Deletes the specified SSL policy. The SSL policy resource can be deleted only if it is not in use by any TargetHttpsProxy or TargetSslProxy resources."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/ssl-policies_delete", @@ -71575,7 +76688,7 @@ async fn main() { ]), ]), - ("target-https-proxies", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list', 'patch', 'set-quic-override', 'set-ssl-certificates', 'set-ssl-policy' and 'set-url-map'", vec![ + ("target-https-proxies", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list', 'patch', 'set-certificate-map', 'set-quic-override', 'set-ssl-certificates', 'set-ssl-policy' and 'set-url-map'", vec![ ("aggregated-list", Some(r##"Retrieves the list of all TargetHttpsProxy resources, regional and global, available to the specified project."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-https-proxies_aggregated-list", @@ -71732,6 +76845,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-certificate-map", + Some(r##"Changes the Certificate Map for TargetHttpsProxy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-https-proxies_set-certificate-map", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"target-https-proxy"##), + None, + Some(r##"Name of the TargetHttpsProxy resource whose CertificateMap is to be set. The name must be 1-63 characters long, and comply with RFC1035."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -72426,7 +77573,7 @@ async fn main() { ]), ]), - ("target-ssl-proxies", "methods: 'delete', 'get', 'insert', 'list', 'set-backend-service', 'set-proxy-header', 'set-ssl-certificates' and 'set-ssl-policy'", vec![ + ("target-ssl-proxies", "methods: 'delete', 'get', 'insert', 'list', 'set-backend-service', 'set-certificate-map', 'set-proxy-header', 'set-ssl-certificates' and 'set-ssl-policy'", vec![ ("delete", Some(r##"Deletes the specified TargetSslProxy resource."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-ssl-proxies_delete", @@ -72561,6 +77708,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-certificate-map", + Some(r##"Changes the Certificate Map for TargetSslProxy."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-ssl-proxies_set-certificate-map", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"target-ssl-proxy"##), + None, + Some(r##"Name of the TargetSslProxy resource whose CertificateMap is to be set. The name must be 1-63 characters long, and comply with RFC1035."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -72671,7 +77852,29 @@ async fn main() { ]), ]), - ("target-tcp-proxies", "methods: 'delete', 'get', 'insert', 'list', 'set-backend-service' and 'set-proxy-header'", vec![ + ("target-tcp-proxies", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list', 'set-backend-service' and 'set-proxy-header'", vec![ + ("aggregated-list", + Some(r##"Retrieves the list of all TargetTcpProxy resources, regional and global, available to the specified project."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-tcp-proxies_aggregated-list", + vec![ + (Some(r##"project"##), + None, + Some(r##"Name of the project scoping this request."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("delete", Some(r##"Deletes the specified TargetTcpProxy resource."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-tcp-proxies_delete", @@ -72848,7 +78051,7 @@ async fn main() { ]), ]), - ("target-vpn-gateways", "methods: 'aggregated-list', 'delete', 'get', 'insert' and 'list'", vec![ + ("target-vpn-gateways", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list' and 'set-labels'", vec![ ("aggregated-list", Some(r##"Retrieves an aggregated list of target VPN gateways."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-vpn-gateways_aggregated-list", @@ -72995,6 +78198,46 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-labels", + Some(r##"Sets the labels on a TargetVpnGateway. To learn more about labels, read the Labeling Resources documentation."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/target-vpn-gateways_set-labels", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -73539,7 +78782,7 @@ async fn main() { ]), ]), - ("vpn-tunnels", "methods: 'aggregated-list', 'delete', 'get', 'insert' and 'list'", vec![ + ("vpn-tunnels", "methods: 'aggregated-list', 'delete', 'get', 'insert', 'list' and 'set-labels'", vec![ ("aggregated-list", Some(r##"Retrieves an aggregated list of VPN tunnels."##), "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/vpn-tunnels_aggregated-list", @@ -73686,6 +78929,46 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-labels", + Some(r##"Sets the labels on a VpnTunnel. To learn more about labels, read the Labeling Resources documentation."##), + "Details at http://byron.github.io/google-apis-rs/google_compute1_cli/vpn-tunnels_set-labels", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID for this request."##), + Some(true), + Some(false)), + + (Some(r##"region"##), + None, + Some(r##"The region for this request."##), + Some(true), + Some(false)), + + (Some(r##"resource"##), + None, + Some(r##"Name or id of the resource for this request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -73878,7 +79161,7 @@ async fn main() { let mut app = App::new("compute1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230103") .about("Creates and runs virtual machines on Google Cloud Platform. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_compute1_cli") .arg(Arg::with_name("url") diff --git a/gen/compute1/Cargo.toml b/gen/compute1/Cargo.toml index a560fea57b..4c5f517e9b 100644 --- a/gen/compute1/Cargo.toml +++ b/gen/compute1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-compute1" -version = "5.0.2-beta-1+20230103" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with compute (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/compute1" homepage = "https://cloud.google.com/compute/" -documentation = "https://docs.rs/google-compute1/5.0.2-beta-1+20230103" +documentation = "https://docs.rs/google-compute1/5.0.2+20230103" license = "MIT" keywords = ["compute", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/compute1/README.md b/gen/compute1/README.md index 29f9ade64a..979c67208e 100644 --- a/gen/compute1/README.md +++ b/gen/compute1/README.md @@ -5,194 +5,194 @@ DO NOT EDIT ! --> The `google-compute1` library allows access to all features of the *Google compute* service. -This documentation was generated from *compute* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *compute:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *compute* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *compute:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *compute* *v1* API can be found at the [official documentation site](https://cloud.google.com/compute/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/Compute) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/Compute) ... -* [accelerator types](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AcceleratorType) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AcceleratorTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AcceleratorTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AcceleratorTypeListCall) -* [addresses](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Address) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AddressAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AddressDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AddressGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AddressInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AddressListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AddressSetLabelCall) -* [autoscalers](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Autoscaler) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AutoscalerAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AutoscalerDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AutoscalerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AutoscalerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AutoscalerListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AutoscalerPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::AutoscalerUpdateCall) -* [backend buckets](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucket) - * [*add signed url key*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketAddSignedUrlKeyCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketDeleteCall), [*delete signed url key*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketDeleteSignedUrlKeyCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketPatchCall), [*set edge security policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketSetEdgeSecurityPolicyCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendBucketUpdateCall) -* [backend services](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendService) - * [*add signed url key*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceAddSignedUrlKeyCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceDeleteCall), [*delete signed url key*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceDeleteSignedUrlKeyCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceGetCall), [*get health*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceGetHealthCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServicePatchCall), [*set edge security policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceSetEdgeSecurityPolicyCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceSetIamPolicyCall), [*set security policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceSetSecurityPolicyCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::BackendServiceUpdateCall) -* [disk types](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskType) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskTypeListCall) -* [disks](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Disk) - * [*add resource policies*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskAddResourcePolicyCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskAggregatedListCall), [*create snapshot*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskCreateSnapshotCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskListCall), [*remove resource policies*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskRemoveResourcePolicyCall), [*resize*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskResizeCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::DiskTestIamPermissionCall) -* [external vpn gateways](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ExternalVpnGateway) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ExternalVpnGatewayDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ExternalVpnGatewayGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ExternalVpnGatewayInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ExternalVpnGatewayListCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ExternalVpnGatewaySetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ExternalVpnGatewayTestIamPermissionCall) -* [firewall policies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicy) - * [*add association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyAddAssociationCall), [*add rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyAddRuleCall), [*clone rules*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyCloneRuleCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyGetCall), [*get association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyGetAssociationCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyGetIamPolicyCall), [*get rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyListCall), [*list associations*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyListAssociationCall), [*move*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyMoveCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyPatchRuleCall), [*remove association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyRemoveAssociationCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyRemoveRuleCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPolicyTestIamPermissionCall) -* [firewalls](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Firewall) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::FirewallUpdateCall) -* [forwarding rules](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRule) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRuleAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRuleDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRuleGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRuleInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRuleListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRulePatchCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRuleSetLabelCall) and [*set target*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ForwardingRuleSetTargetCall) +* [accelerator types](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AcceleratorType) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AcceleratorTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AcceleratorTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AcceleratorTypeListCall) +* [addresses](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Address) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AddressAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AddressDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AddressGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AddressInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AddressListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AddressSetLabelCall) +* [autoscalers](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Autoscaler) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AutoscalerAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AutoscalerDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AutoscalerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AutoscalerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AutoscalerListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AutoscalerPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::AutoscalerUpdateCall) +* [backend buckets](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucket) + * [*add signed url key*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketAddSignedUrlKeyCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketDeleteCall), [*delete signed url key*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketDeleteSignedUrlKeyCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketPatchCall), [*set edge security policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketSetEdgeSecurityPolicyCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendBucketUpdateCall) +* [backend services](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendService) + * [*add signed url key*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceAddSignedUrlKeyCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceDeleteCall), [*delete signed url key*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceDeleteSignedUrlKeyCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceGetCall), [*get health*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceGetHealthCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServicePatchCall), [*set edge security policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceSetEdgeSecurityPolicyCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceSetIamPolicyCall), [*set security policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceSetSecurityPolicyCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::BackendServiceUpdateCall) +* [disk types](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskType) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskTypeListCall) +* [disks](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Disk) + * [*add resource policies*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskAddResourcePolicyCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskAggregatedListCall), [*create snapshot*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskCreateSnapshotCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskListCall), [*remove resource policies*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskRemoveResourcePolicyCall), [*resize*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskResizeCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::DiskTestIamPermissionCall) +* [external vpn gateways](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ExternalVpnGateway) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ExternalVpnGatewayDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ExternalVpnGatewayGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ExternalVpnGatewayInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ExternalVpnGatewayListCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ExternalVpnGatewaySetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ExternalVpnGatewayTestIamPermissionCall) +* [firewall policies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicy) + * [*add association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyAddAssociationCall), [*add rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyAddRuleCall), [*clone rules*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyCloneRuleCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyGetCall), [*get association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyGetAssociationCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyGetIamPolicyCall), [*get rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyListCall), [*list associations*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyListAssociationCall), [*move*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyMoveCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyPatchRuleCall), [*remove association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyRemoveAssociationCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyRemoveRuleCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPolicyTestIamPermissionCall) +* [firewalls](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Firewall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::FirewallUpdateCall) +* [forwarding rules](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRule) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRuleAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRuleDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRuleGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRuleInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRuleListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRulePatchCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRuleSetLabelCall) and [*set target*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ForwardingRuleSetTargetCall) * global addresses - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalAddressDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalAddressGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalAddressInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalAddressListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalAddressSetLabelCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalAddressDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalAddressGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalAddressInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalAddressListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalAddressSetLabelCall) * global forwarding rules - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalForwardingRuleDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalForwardingRuleGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalForwardingRuleInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalForwardingRuleListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalForwardingRulePatchCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalForwardingRuleSetLabelCall) and [*set target*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalForwardingRuleSetTargetCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalForwardingRuleDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalForwardingRuleGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalForwardingRuleInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalForwardingRuleListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalForwardingRulePatchCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalForwardingRuleSetLabelCall) and [*set target*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalForwardingRuleSetTargetCall) * global network endpoint groups - * [*attach network endpoints*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalNetworkEndpointGroupAttachNetworkEndpointCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalNetworkEndpointGroupDeleteCall), [*detach network endpoints*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalNetworkEndpointGroupDetachNetworkEndpointCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalNetworkEndpointGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalNetworkEndpointGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalNetworkEndpointGroupListCall) and [*list network endpoints*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalNetworkEndpointGroupListNetworkEndpointCall) + * [*attach network endpoints*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalNetworkEndpointGroupAttachNetworkEndpointCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalNetworkEndpointGroupDeleteCall), [*detach network endpoints*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalNetworkEndpointGroupDetachNetworkEndpointCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalNetworkEndpointGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalNetworkEndpointGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalNetworkEndpointGroupListCall) and [*list network endpoints*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalNetworkEndpointGroupListNetworkEndpointCall) * global operations - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOperationAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOperationGetCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOperationListCall) and [*wait*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOperationWaitCall) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOperationAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOperationGetCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOperationListCall) and [*wait*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOperationWaitCall) * global organization operations - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOrganizationOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOrganizationOperationGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalOrganizationOperationListCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOrganizationOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOrganizationOperationGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalOrganizationOperationListCall) * global public delegated prefixes - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalPublicDelegatedPrefixDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalPublicDelegatedPrefixGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalPublicDelegatedPrefixInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalPublicDelegatedPrefixListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::GlobalPublicDelegatedPrefixPatchCall) -* [health checks](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheck) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheckAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HealthCheckUpdateCall) -* [http health checks](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpHealthCheck) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpHealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpHealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpHealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpHealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpHealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpHealthCheckUpdateCall) -* [https health checks](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpsHealthCheck) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpsHealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpsHealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpsHealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpsHealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpsHealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::HttpsHealthCheckUpdateCall) -* [image family views](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageFamilyView) - * [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageFamilyViewGetCall) -* [images](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Image) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageDeleteCall), [*deprecate*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageDeprecateCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageGetCall), [*get from family*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageGetFromFamilyCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImagePatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ImageTestIamPermissionCall) -* [instance group managers](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManager) - * [*abandon instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerAbandonInstanceCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerAggregatedListCall), [*apply updates to instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerApplyUpdatesToInstanceCall), [*create instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerCreateInstanceCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerDeleteCall), [*delete instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerDeleteInstanceCall), [*delete per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerDeletePerInstanceConfigCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerListCall), [*list errors*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerListErrorCall), [*list managed instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerListManagedInstanceCall), [*list per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerListPerInstanceConfigCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerPatchCall), [*patch per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerPatchPerInstanceConfigCall), [*recreate instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerRecreateInstanceCall), [*resize*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerResizeCall), [*set instance template*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerSetInstanceTemplateCall), [*set target pools*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerSetTargetPoolCall) and [*update per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupManagerUpdatePerInstanceConfigCall) -* [instance groups](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroup) - * [*add instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupAddInstanceCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupListCall), [*list instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupListInstanceCall), [*remove instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupRemoveInstanceCall) and [*set named ports*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGroupSetNamedPortCall) -* [instance templates](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplate) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplateGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplateGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplateInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplateListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplateSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTemplateTestIamPermissionCall) -* [instances](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Instance) - * [*add access config*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceAddAccessConfigCall), [*add resource policies*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceAddResourcePolicyCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceAggregatedListCall), [*attach disk*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceAttachDiskCall), [*bulk insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceBulkInsertCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceDeleteCall), [*delete access config*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceDeleteAccessConfigCall), [*detach disk*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceDetachDiskCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGetCall), [*get effective firewalls*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGetEffectiveFirewallCall), [*get guest attributes*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGetGuestAttributeCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGetIamPolicyCall), [*get screenshot*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGetScreenshotCall), [*get serial port output*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGetSerialPortOutputCall), [*get shielded instance identity*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceGetShieldedInstanceIdentityCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceListCall), [*list referrers*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceListReferrerCall), [*remove resource policies*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceRemoveResourcePolicyCall), [*reset*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceResetCall), [*resume*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceResumeCall), [*send diagnostic interrupt*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSendDiagnosticInterruptCall), [*set deletion protection*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetDeletionProtectionCall), [*set disk auto delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetDiskAutoDeleteCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetLabelCall), [*set machine resources*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetMachineResourceCall), [*set machine type*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetMachineTypeCall), [*set metadata*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetMetadataCall), [*set min cpu platform*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetMinCpuPlatformCall), [*set scheduling*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetSchedulingCall), [*set service account*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetServiceAccountCall), [*set shielded instance integrity policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetShieldedInstanceIntegrityPolicyCall), [*set tags*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSetTagCall), [*simulate maintenance event*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSimulateMaintenanceEventCall), [*start*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceStartCall), [*start with encryption key*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceStartWithEncryptionKeyCall), [*stop*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceStopCall), [*suspend*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceSuspendCall), [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceTestIamPermissionCall), [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceUpdateCall), [*update access config*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceUpdateAccessConfigCall), [*update display device*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceUpdateDisplayDeviceCall), [*update network interface*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceUpdateNetworkInterfaceCall) and [*update shielded instance config*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InstanceUpdateShieldedInstanceConfigCall) -* [interconnect attachments](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachment) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachmentAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachmentDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachmentGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachmentListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachmentPatchCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectAttachmentSetLabelCall) -* [interconnect locations](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectLocation) - * [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectLocationGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectLocationListCall) -* [interconnects](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Interconnect) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectGetCall), [*get diagnostics*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectGetDiagnosticCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectPatchCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::InterconnectSetLabelCall) -* [license codes](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseCode) - * [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseCodeGetCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseCodeTestIamPermissionCall) -* [licenses](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::License) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::LicenseTestIamPermissionCall) -* [machine images](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImage) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImageDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImageGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImageGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImageInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImageListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImageSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineImageTestIamPermissionCall) -* [machine types](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineType) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::MachineTypeListCall) -* [network attachments](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachment) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAttachmentTestIamPermissionCall) -* [network edge security services](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEdgeSecurityService) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEdgeSecurityServiceAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEdgeSecurityServiceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEdgeSecurityServiceGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEdgeSecurityServiceInsertCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEdgeSecurityServicePatchCall) -* [network endpoint groups](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroup) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupAggregatedListCall), [*attach network endpoints*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupAttachNetworkEndpointCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupDeleteCall), [*detach network endpoints*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupDetachNetworkEndpointCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupListCall), [*list network endpoints*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupListNetworkEndpointCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkEndpointGroupTestIamPermissionCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalPublicDelegatedPrefixDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalPublicDelegatedPrefixGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalPublicDelegatedPrefixInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalPublicDelegatedPrefixListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::GlobalPublicDelegatedPrefixPatchCall) +* [health checks](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheck) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheckAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HealthCheckUpdateCall) +* [http health checks](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpHealthCheck) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpHealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpHealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpHealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpHealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpHealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpHealthCheckUpdateCall) +* [https health checks](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpsHealthCheck) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpsHealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpsHealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpsHealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpsHealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpsHealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::HttpsHealthCheckUpdateCall) +* [image family views](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageFamilyView) + * [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageFamilyViewGetCall) +* [images](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Image) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageDeleteCall), [*deprecate*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageDeprecateCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageGetCall), [*get from family*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageGetFromFamilyCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImagePatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ImageTestIamPermissionCall) +* [instance group managers](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManager) + * [*abandon instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerAbandonInstanceCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerAggregatedListCall), [*apply updates to instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerApplyUpdatesToInstanceCall), [*create instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerCreateInstanceCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerDeleteCall), [*delete instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerDeleteInstanceCall), [*delete per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerDeletePerInstanceConfigCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerListCall), [*list errors*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerListErrorCall), [*list managed instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerListManagedInstanceCall), [*list per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerListPerInstanceConfigCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerPatchCall), [*patch per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerPatchPerInstanceConfigCall), [*recreate instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerRecreateInstanceCall), [*resize*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerResizeCall), [*set instance template*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerSetInstanceTemplateCall), [*set target pools*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerSetTargetPoolCall) and [*update per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupManagerUpdatePerInstanceConfigCall) +* [instance groups](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroup) + * [*add instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupAddInstanceCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupListCall), [*list instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupListInstanceCall), [*remove instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupRemoveInstanceCall) and [*set named ports*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGroupSetNamedPortCall) +* [instance templates](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplate) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplateGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplateGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplateInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplateListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplateSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTemplateTestIamPermissionCall) +* [instances](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Instance) + * [*add access config*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceAddAccessConfigCall), [*add resource policies*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceAddResourcePolicyCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceAggregatedListCall), [*attach disk*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceAttachDiskCall), [*bulk insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceBulkInsertCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceDeleteCall), [*delete access config*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceDeleteAccessConfigCall), [*detach disk*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceDetachDiskCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGetCall), [*get effective firewalls*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGetEffectiveFirewallCall), [*get guest attributes*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGetGuestAttributeCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGetIamPolicyCall), [*get screenshot*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGetScreenshotCall), [*get serial port output*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGetSerialPortOutputCall), [*get shielded instance identity*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceGetShieldedInstanceIdentityCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceListCall), [*list referrers*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceListReferrerCall), [*remove resource policies*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceRemoveResourcePolicyCall), [*reset*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceResetCall), [*resume*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceResumeCall), [*send diagnostic interrupt*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSendDiagnosticInterruptCall), [*set deletion protection*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetDeletionProtectionCall), [*set disk auto delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetDiskAutoDeleteCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetLabelCall), [*set machine resources*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetMachineResourceCall), [*set machine type*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetMachineTypeCall), [*set metadata*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetMetadataCall), [*set min cpu platform*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetMinCpuPlatformCall), [*set scheduling*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetSchedulingCall), [*set service account*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetServiceAccountCall), [*set shielded instance integrity policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetShieldedInstanceIntegrityPolicyCall), [*set tags*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSetTagCall), [*simulate maintenance event*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSimulateMaintenanceEventCall), [*start*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceStartCall), [*start with encryption key*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceStartWithEncryptionKeyCall), [*stop*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceStopCall), [*suspend*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceSuspendCall), [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceTestIamPermissionCall), [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceUpdateCall), [*update access config*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceUpdateAccessConfigCall), [*update display device*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceUpdateDisplayDeviceCall), [*update network interface*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceUpdateNetworkInterfaceCall) and [*update shielded instance config*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InstanceUpdateShieldedInstanceConfigCall) +* [interconnect attachments](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachment) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachmentAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachmentDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachmentGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachmentListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachmentPatchCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectAttachmentSetLabelCall) +* [interconnect locations](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectLocation) + * [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectLocationGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectLocationListCall) +* [interconnects](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Interconnect) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectGetCall), [*get diagnostics*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectGetDiagnosticCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectPatchCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::InterconnectSetLabelCall) +* [license codes](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseCode) + * [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseCodeGetCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseCodeTestIamPermissionCall) +* [licenses](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::License) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::LicenseTestIamPermissionCall) +* [machine images](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImage) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImageDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImageGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImageGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImageInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImageListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImageSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineImageTestIamPermissionCall) +* [machine types](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineType) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::MachineTypeListCall) +* [network attachments](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachment) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAttachmentTestIamPermissionCall) +* [network edge security services](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEdgeSecurityService) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEdgeSecurityServiceAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEdgeSecurityServiceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEdgeSecurityServiceGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEdgeSecurityServiceInsertCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEdgeSecurityServicePatchCall) +* [network endpoint groups](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroup) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupAggregatedListCall), [*attach network endpoints*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupAttachNetworkEndpointCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupDeleteCall), [*detach network endpoints*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupDetachNetworkEndpointCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupListCall), [*list network endpoints*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupListNetworkEndpointCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkEndpointGroupTestIamPermissionCall) * network firewall policies - * [*add association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyAddAssociationCall), [*add rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyAddRuleCall), [*clone rules*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyCloneRuleCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyGetCall), [*get association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyGetAssociationCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyGetIamPolicyCall), [*get rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyPatchRuleCall), [*remove association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyRemoveAssociationCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyRemoveRuleCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkFirewallPolicyTestIamPermissionCall) -* [networks](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Network) - * [*add peering*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkAddPeeringCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkGetCall), [*get effective firewalls*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkGetEffectiveFirewallCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkListCall), [*list peering routes*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkListPeeringRouteCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkPatchCall), [*remove peering*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkRemovePeeringCall), [*switch to custom mode*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkSwitchToCustomModeCall) and [*update peering*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NetworkUpdatePeeringCall) -* [node groups](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroup) - * [*add nodes*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupAddNodeCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupDeleteCall), [*delete nodes*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupDeleteNodeCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupListCall), [*list nodes*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupListNodeCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupPatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupSetIamPolicyCall), [*set node template*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupSetNodeTemplateCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeGroupTestIamPermissionCall) -* [node templates](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplate) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTemplateTestIamPermissionCall) -* [node types](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeType) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::NodeTypeListCall) -* [packet mirrorings](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroring) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroringAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroringDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroringGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroringInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroringListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroringPatchCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PacketMirroringTestIamPermissionCall) -* [projects](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Project) - * [*disable xpn host*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectDisableXpnHostCall), [*disable xpn resource*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectDisableXpnResourceCall), [*enable xpn host*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectEnableXpnHostCall), [*enable xpn resource*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectEnableXpnResourceCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectGetCall), [*get xpn host*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectGetXpnHostCall), [*get xpn resources*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectGetXpnResourceCall), [*list xpn hosts*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectListXpnHostCall), [*move disk*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectMoveDiskCall), [*move instance*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectMoveInstanceCall), [*set common instance metadata*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectSetCommonInstanceMetadataCall), [*set default network tier*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectSetDefaultNetworkTierCall) and [*set usage export bucket*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ProjectSetUsageExportBucketCall) -* [public advertised prefixes](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicAdvertisedPrefix) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicAdvertisedPrefixDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicAdvertisedPrefixGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicAdvertisedPrefixInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicAdvertisedPrefixListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicAdvertisedPrefixPatchCall) -* [public delegated prefixes](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicDelegatedPrefix) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicDelegatedPrefixAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicDelegatedPrefixDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicDelegatedPrefixGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicDelegatedPrefixInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicDelegatedPrefixListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::PublicDelegatedPrefixPatchCall) + * [*add association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyAddAssociationCall), [*add rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyAddRuleCall), [*clone rules*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyCloneRuleCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyGetCall), [*get association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyGetAssociationCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyGetIamPolicyCall), [*get rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyPatchRuleCall), [*remove association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyRemoveAssociationCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyRemoveRuleCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkFirewallPolicyTestIamPermissionCall) +* [networks](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Network) + * [*add peering*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkAddPeeringCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkGetCall), [*get effective firewalls*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkGetEffectiveFirewallCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkListCall), [*list peering routes*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkListPeeringRouteCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkPatchCall), [*remove peering*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkRemovePeeringCall), [*switch to custom mode*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkSwitchToCustomModeCall) and [*update peering*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NetworkUpdatePeeringCall) +* [node groups](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroup) + * [*add nodes*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupAddNodeCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupDeleteCall), [*delete nodes*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupDeleteNodeCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupListCall), [*list nodes*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupListNodeCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupPatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupSetIamPolicyCall), [*set node template*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupSetNodeTemplateCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeGroupTestIamPermissionCall) +* [node templates](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplate) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTemplateTestIamPermissionCall) +* [node types](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeType) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTypeAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::NodeTypeListCall) +* [packet mirrorings](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroring) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroringAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroringDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroringGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroringInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroringListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroringPatchCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PacketMirroringTestIamPermissionCall) +* [projects](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Project) + * [*disable xpn host*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectDisableXpnHostCall), [*disable xpn resource*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectDisableXpnResourceCall), [*enable xpn host*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectEnableXpnHostCall), [*enable xpn resource*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectEnableXpnResourceCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectGetCall), [*get xpn host*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectGetXpnHostCall), [*get xpn resources*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectGetXpnResourceCall), [*list xpn hosts*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectListXpnHostCall), [*move disk*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectMoveDiskCall), [*move instance*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectMoveInstanceCall), [*set common instance metadata*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectSetCommonInstanceMetadataCall), [*set default network tier*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectSetDefaultNetworkTierCall) and [*set usage export bucket*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ProjectSetUsageExportBucketCall) +* [public advertised prefixes](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicAdvertisedPrefix) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicAdvertisedPrefixDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicAdvertisedPrefixGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicAdvertisedPrefixInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicAdvertisedPrefixListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicAdvertisedPrefixPatchCall) +* [public delegated prefixes](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicDelegatedPrefix) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicDelegatedPrefixAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicDelegatedPrefixDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicDelegatedPrefixGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicDelegatedPrefixInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicDelegatedPrefixListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::PublicDelegatedPrefixPatchCall) * region autoscalers - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionAutoscalerDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionAutoscalerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionAutoscalerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionAutoscalerListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionAutoscalerPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionAutoscalerUpdateCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionAutoscalerDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionAutoscalerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionAutoscalerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionAutoscalerListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionAutoscalerPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionAutoscalerUpdateCall) * region backend services - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceGetCall), [*get health*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceGetHealthCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServicePatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceSetIamPolicyCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionBackendServiceUpdateCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceGetCall), [*get health*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceGetHealthCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServicePatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceSetIamPolicyCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionBackendServiceUpdateCall) * region commitments - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionCommitmentAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionCommitmentGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionCommitmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionCommitmentListCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionCommitmentUpdateCall) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionCommitmentAggregatedListCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionCommitmentGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionCommitmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionCommitmentListCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionCommitmentUpdateCall) * region disk types - * [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskTypeListCall) + * [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskTypeGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskTypeListCall) * region disks - * [*add resource policies*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskAddResourcePolicyCall), [*create snapshot*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskCreateSnapshotCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskListCall), [*remove resource policies*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskRemoveResourcePolicyCall), [*resize*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskResizeCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionDiskTestIamPermissionCall) + * [*add resource policies*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskAddResourcePolicyCall), [*create snapshot*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskCreateSnapshotCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskListCall), [*remove resource policies*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskRemoveResourcePolicyCall), [*resize*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskResizeCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionDiskTestIamPermissionCall) * region health check services - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckServiceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckServiceGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckServiceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckServiceListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckServicePatchCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckServiceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckServiceGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckServiceInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckServiceListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckServicePatchCall) * region health checks - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionHealthCheckUpdateCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckPatchCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionHealthCheckUpdateCall) * region instance group managers - * [*abandon instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerAbandonInstanceCall), [*apply updates to instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerApplyUpdatesToInstanceCall), [*create instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerCreateInstanceCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerDeleteCall), [*delete instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerDeleteInstanceCall), [*delete per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerDeletePerInstanceConfigCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerListCall), [*list errors*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerListErrorCall), [*list managed instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerListManagedInstanceCall), [*list per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerListPerInstanceConfigCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerPatchCall), [*patch per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerPatchPerInstanceConfigCall), [*recreate instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerRecreateInstanceCall), [*resize*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerResizeCall), [*set instance template*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerSetInstanceTemplateCall), [*set target pools*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerSetTargetPoolCall) and [*update per instance configs*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupManagerUpdatePerInstanceConfigCall) + * [*abandon instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerAbandonInstanceCall), [*apply updates to instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerApplyUpdatesToInstanceCall), [*create instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerCreateInstanceCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerDeleteCall), [*delete instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerDeleteInstanceCall), [*delete per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerDeletePerInstanceConfigCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerListCall), [*list errors*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerListErrorCall), [*list managed instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerListManagedInstanceCall), [*list per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerListPerInstanceConfigCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerPatchCall), [*patch per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerPatchPerInstanceConfigCall), [*recreate instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerRecreateInstanceCall), [*resize*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerResizeCall), [*set instance template*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerSetInstanceTemplateCall), [*set target pools*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerSetTargetPoolCall) and [*update per instance configs*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupManagerUpdatePerInstanceConfigCall) * region instance groups - * [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupGetCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupListCall), [*list instances*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupListInstanceCall) and [*set named ports*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceGroupSetNamedPortCall) + * [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupGetCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupListCall), [*list instances*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupListInstanceCall) and [*set named ports*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceGroupSetNamedPortCall) * region instances - * [*bulk insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionInstanceBulkInsertCall) + * [*bulk insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionInstanceBulkInsertCall) * region network endpoint groups - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkEndpointGroupDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkEndpointGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkEndpointGroupInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkEndpointGroupListCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkEndpointGroupDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkEndpointGroupGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkEndpointGroupInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkEndpointGroupListCall) * region network firewall policies - * [*add association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyAddAssociationCall), [*add rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyAddRuleCall), [*clone rules*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyCloneRuleCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetCall), [*get association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetAssociationCall), [*get effective firewalls*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetEffectiveFirewallCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetIamPolicyCall), [*get rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyPatchRuleCall), [*remove association*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyRemoveAssociationCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyRemoveRuleCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNetworkFirewallPolicyTestIamPermissionCall) + * [*add association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyAddAssociationCall), [*add rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyAddRuleCall), [*clone rules*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyCloneRuleCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetCall), [*get association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetAssociationCall), [*get effective firewalls*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetEffectiveFirewallCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetIamPolicyCall), [*get rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyPatchRuleCall), [*remove association*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyRemoveAssociationCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyRemoveRuleCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNetworkFirewallPolicyTestIamPermissionCall) * region notification endpoints - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNotificationEndpointDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNotificationEndpointGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNotificationEndpointInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionNotificationEndpointListCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNotificationEndpointDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNotificationEndpointGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNotificationEndpointInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionNotificationEndpointListCall) * region operations - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionOperationGetCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionOperationListCall) and [*wait*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionOperationWaitCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionOperationGetCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionOperationListCall) and [*wait*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionOperationWaitCall) * region security policies - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSecurityPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSecurityPolicyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSecurityPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSecurityPolicyListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSecurityPolicyPatchCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSecurityPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSecurityPolicyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSecurityPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSecurityPolicyListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSecurityPolicyPatchCall) * region ssl certificates - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslCertificateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslCertificateGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslCertificateInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslCertificateListCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslCertificateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslCertificateGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslCertificateInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslCertificateListCall) * region ssl policies - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslPolicyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslPolicyListCall), [*list available features*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslPolicyListAvailableFeatureCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionSslPolicyPatchCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslPolicyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslPolicyListCall), [*list available features*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslPolicyListAvailableFeatureCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionSslPolicyPatchCall) * region target http proxies - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpProxyListCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpProxySetUrlMapCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpProxyListCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpProxySetUrlMapCall) * region target https proxies - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpsProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpsProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpsProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpsProxyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpsProxyPatchCall), [*set ssl certificates*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpsProxySetSslCertificateCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetHttpsProxySetUrlMapCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpsProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpsProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpsProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpsProxyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpsProxyPatchCall), [*set ssl certificates*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpsProxySetSslCertificateCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetHttpsProxySetUrlMapCall) * region target tcp proxies - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetTcpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetTcpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetTcpProxyInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionTargetTcpProxyListCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetTcpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetTcpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetTcpProxyInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionTargetTcpProxyListCall) * region url maps - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionUrlMapDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionUrlMapGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionUrlMapInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionUrlMapListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionUrlMapPatchCall), [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionUrlMapUpdateCall) and [*validate*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionUrlMapValidateCall) -* [regions](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Region) - * [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RegionListCall) -* [reservations](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Reservation) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationListCall), [*resize*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationResizeCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationTestIamPermissionCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ReservationUpdateCall) -* [resource policies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicy) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicyGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicyGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicyListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ResourcePolicyTestIamPermissionCall) -* [routers](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Router) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterGetCall), [*get nat mapping info*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterGetNatMappingInfoCall), [*get router status*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterGetRouterStatuCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterPatchCall), [*preview*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterPreviewCall) and [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouterUpdateCall) -* [routes](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Route) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouteDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouteGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouteInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::RouteListCall) -* [security policies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicy) - * [*add rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyAddRuleCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyGetCall), [*get rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyListCall), [*list preconfigured expression sets*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyListPreconfiguredExpressionSetCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyPatchRuleCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicyRemoveRuleCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SecurityPolicySetLabelCall) -* [service attachments](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachment) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentPatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ServiceAttachmentTestIamPermissionCall) -* [snapshots](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Snapshot) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SnapshotTestIamPermissionCall) -* [ssl certificates](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslCertificate) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslCertificateAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslCertificateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslCertificateGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslCertificateInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslCertificateListCall) -* [ssl policies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicy) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicyListCall), [*list available features*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicyListAvailableFeatureCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SslPolicyPatchCall) -* [subnetworks](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Subnetwork) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkDeleteCall), [*expand ip cidr range*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkExpandIpCidrRangeCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkListCall), [*list usable*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkListUsableCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkPatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkSetIamPolicyCall), [*set private ip google access*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkSetPrivateIpGoogleAccesCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::SubnetworkTestIamPermissionCall) -* [target grpc proxies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetGrpcProxy) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetGrpcProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetGrpcProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetGrpcProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetGrpcProxyListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetGrpcProxyPatchCall) -* [target http proxies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxy) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxyPatchCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpProxySetUrlMapCall) -* [target https proxies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxy) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxyPatchCall), [*set certificate map*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxySetCertificateMapCall), [*set quic override*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxySetQuicOverrideCall), [*set ssl certificates*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxySetSslCertificateCall), [*set ssl policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxySetSslPolicyCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetHttpsProxySetUrlMapCall) -* [target instances](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetInstance) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetInstanceAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetInstanceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetInstanceGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetInstanceInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetInstanceListCall) -* [target pools](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPool) - * [*add health check*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolAddHealthCheckCall), [*add instance*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolAddInstanceCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolGetCall), [*get health*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolGetHealthCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolListCall), [*remove health check*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolRemoveHealthCheckCall), [*remove instance*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolRemoveInstanceCall) and [*set backup*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetPoolSetBackupCall) -* [target ssl proxies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxy) - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxyListCall), [*set backend service*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxySetBackendServiceCall), [*set certificate map*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxySetCertificateMapCall), [*set proxy header*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxySetProxyHeaderCall), [*set ssl certificates*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxySetSslCertificateCall) and [*set ssl policy*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetSslProxySetSslPolicyCall) -* [target tcp proxies](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxy) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxyListCall), [*set backend service*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxySetBackendServiceCall) and [*set proxy header*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetTcpProxySetProxyHeaderCall) -* [target vpn gateways](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetVpnGateway) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetVpnGatewayAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetVpnGatewayDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetVpnGatewayGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetVpnGatewayInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetVpnGatewayListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::TargetVpnGatewaySetLabelCall) -* [url maps](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMap) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapInsertCall), [*invalidate cache*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapInvalidateCacheCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapListCall), [*patch*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapPatchCall), [*update*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapUpdateCall) and [*validate*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::UrlMapValidateCall) -* [vpn gateways](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGateway) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewayAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewayDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewayGetCall), [*get status*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewayGetStatuCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewayInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewayListCall), [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewaySetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnGatewayTestIamPermissionCall) -* [vpn tunnels](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnTunnel) - * [*aggregated list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnTunnelAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnTunnelDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnTunnelGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnTunnelInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnTunnelListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::VpnTunnelSetLabelCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionUrlMapDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionUrlMapGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionUrlMapInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionUrlMapListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionUrlMapPatchCall), [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionUrlMapUpdateCall) and [*validate*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionUrlMapValidateCall) +* [regions](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Region) + * [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RegionListCall) +* [reservations](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Reservation) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationListCall), [*resize*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationResizeCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationTestIamPermissionCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ReservationUpdateCall) +* [resource policies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicy) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicyGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicyGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicyListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicySetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ResourcePolicyTestIamPermissionCall) +* [routers](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Router) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterGetCall), [*get nat mapping info*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterGetNatMappingInfoCall), [*get router status*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterGetRouterStatuCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterPatchCall), [*preview*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterPreviewCall) and [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouterUpdateCall) +* [routes](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Route) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouteDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouteGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouteInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::RouteListCall) +* [security policies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicy) + * [*add rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyAddRuleCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyGetCall), [*get rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyGetRuleCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyListCall), [*list preconfigured expression sets*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyListPreconfiguredExpressionSetCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyPatchCall), [*patch rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyPatchRuleCall), [*remove rule*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicyRemoveRuleCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SecurityPolicySetLabelCall) +* [service attachments](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachment) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentPatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentSetIamPolicyCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ServiceAttachmentTestIamPermissionCall) +* [snapshots](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Snapshot) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotListCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotSetIamPolicyCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotSetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SnapshotTestIamPermissionCall) +* [ssl certificates](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslCertificate) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslCertificateAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslCertificateDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslCertificateGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslCertificateInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslCertificateListCall) +* [ssl policies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicy) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicyListCall), [*list available features*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicyListAvailableFeatureCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SslPolicyPatchCall) +* [subnetworks](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Subnetwork) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkDeleteCall), [*expand ip cidr range*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkExpandIpCidrRangeCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkGetCall), [*get iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkGetIamPolicyCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkListCall), [*list usable*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkListUsableCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkPatchCall), [*set iam policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkSetIamPolicyCall), [*set private ip google access*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkSetPrivateIpGoogleAccesCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::SubnetworkTestIamPermissionCall) +* [target grpc proxies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetGrpcProxy) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetGrpcProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetGrpcProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetGrpcProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetGrpcProxyListCall) and [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetGrpcProxyPatchCall) +* [target http proxies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxy) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxyPatchCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpProxySetUrlMapCall) +* [target https proxies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxy) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxyListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxyPatchCall), [*set certificate map*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxySetCertificateMapCall), [*set quic override*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxySetQuicOverrideCall), [*set ssl certificates*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxySetSslCertificateCall), [*set ssl policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxySetSslPolicyCall) and [*set url map*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetHttpsProxySetUrlMapCall) +* [target instances](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetInstance) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetInstanceAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetInstanceDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetInstanceGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetInstanceInsertCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetInstanceListCall) +* [target pools](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPool) + * [*add health check*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolAddHealthCheckCall), [*add instance*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolAddInstanceCall), [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolGetCall), [*get health*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolGetHealthCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolListCall), [*remove health check*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolRemoveHealthCheckCall), [*remove instance*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolRemoveInstanceCall) and [*set backup*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetPoolSetBackupCall) +* [target ssl proxies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxy) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxyListCall), [*set backend service*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxySetBackendServiceCall), [*set certificate map*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxySetCertificateMapCall), [*set proxy header*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxySetProxyHeaderCall), [*set ssl certificates*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxySetSslCertificateCall) and [*set ssl policy*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetSslProxySetSslPolicyCall) +* [target tcp proxies](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxy) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxyAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxyDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxyGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxyInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxyListCall), [*set backend service*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxySetBackendServiceCall) and [*set proxy header*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetTcpProxySetProxyHeaderCall) +* [target vpn gateways](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetVpnGateway) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetVpnGatewayAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetVpnGatewayDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetVpnGatewayGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetVpnGatewayInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetVpnGatewayListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::TargetVpnGatewaySetLabelCall) +* [url maps](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMap) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapInsertCall), [*invalidate cache*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapInvalidateCacheCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapListCall), [*patch*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapPatchCall), [*update*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapUpdateCall) and [*validate*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::UrlMapValidateCall) +* [vpn gateways](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGateway) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewayAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewayDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewayGetCall), [*get status*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewayGetStatuCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewayInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewayListCall), [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewaySetLabelCall) and [*test iam permissions*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnGatewayTestIamPermissionCall) +* [vpn tunnels](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnTunnel) + * [*aggregated list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnTunnelAggregatedListCall), [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnTunnelDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnTunnelGetCall), [*insert*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnTunnelInsertCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnTunnelListCall) and [*set labels*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::VpnTunnelSetLabelCall) * zone operations - * [*delete*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ZoneOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ZoneOperationGetCall), [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ZoneOperationListCall) and [*wait*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ZoneOperationWaitCall) -* [zones](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::Zone) - * [*get*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ZoneGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/api::ZoneListCall) + * [*delete*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ZoneOperationDeleteCall), [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ZoneOperationGetCall), [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ZoneOperationListCall) and [*wait*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ZoneOperationWaitCall) +* [zones](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::Zone) + * [*get*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ZoneGetCall) and [*list*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/api::ZoneListCall) @@ -201,17 +201,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/Compute)** +* **[Hub](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/Compute)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::CallBuilder) -* **[Resources](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::CallBuilder) +* **[Resources](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Part)** + * **[Parts](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -669,17 +669,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -689,29 +689,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Delegate) to the -[Method Builder](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Delegate) to the +[Method Builder](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::RequestValue) and -[decodable](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::RequestValue) and +[decodable](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-compute1/5.0.2-beta-1+20230103/google_compute1/client::RequestValue) are moved +* [request values](https://docs.rs/google-compute1/5.0.2+20230103/google_compute1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/compute1/src/api.rs b/gen/compute1/src/api.rs index fdd965ec70..8db9076cd0 100644 --- a/gen/compute1/src/api.rs +++ b/gen/compute1/src/api.rs @@ -148,7 +148,7 @@ impl<'a, S> Compute { Compute { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://compute.googleapis.com/compute/v1/".to_string(), _root_url: "https://compute.googleapis.com/".to_string(), } @@ -426,7 +426,7 @@ impl<'a, S> Compute { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/compute1/src/client.rs b/gen/compute1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/compute1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/compute1/src/lib.rs b/gen/compute1/src/lib.rs index fcdde00962..8d591f8d7b 100644 --- a/gen/compute1/src/lib.rs +++ b/gen/compute1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *compute* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *compute:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *compute* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *compute:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *compute* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/compute/). diff --git a/gen/connectors1-cli/Cargo.toml b/gen/connectors1-cli/Cargo.toml index 27f4606c75..a2fa71e2cc 100644 --- a/gen/connectors1-cli/Cargo.toml +++ b/gen/connectors1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-connectors1-cli" -version = "4.0.1+20220214" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Connectors (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/connectors1-cli" @@ -20,13 +20,13 @@ name = "connectors1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-connectors1] path = "../connectors1" -version = "4.0.1+20220214" +version = "5.0.2+20230103" + diff --git a/gen/connectors1-cli/README.md b/gen/connectors1-cli/README.md index 938c5b9d6a..203f72cd02 100644 --- a/gen/connectors1-cli/README.md +++ b/gen/connectors1-cli/README.md @@ -25,11 +25,12 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Connectors* API at revision *20220214*. The CLI is at version *4.0.1*. +This documentation was generated from the *Connectors* API at revision *20230103*. The CLI is at version *5.0.2*. ```bash connectors1 [options] projects + locations-connections-connection-schema-metadata-refresh (-r )... [-p ]... [-o ] locations-connections-create (-r )... [-p ]... [-o ] locations-connections-delete [-p ]... [-o ] locations-connections-get [-p ]... [-o ] @@ -43,18 +44,18 @@ connectors1 [options] locations-connections-test-iam-permissions (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-get-runtime-config [-p ]... [-o ] - locations-global-providers-connectors-get [-p ]... [-o ] - locations-global-providers-connectors-list [-p ]... [-o ] - locations-global-providers-connectors-versions-get [-p ]... [-o ] - locations-global-providers-connectors-versions-list [-p ]... [-o ] - locations-global-providers-get [-p ]... [-o ] - locations-global-providers-list [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] + locations-providers-connectors-get [-p ]... [-o ] + locations-providers-connectors-list [-p ]... [-o ] + locations-providers-connectors-versions-get [-p ]... [-o ] + locations-providers-connectors-versions-list [-p ]... [-o ] + locations-providers-get [-p ]... [-o ] locations-providers-get-iam-policy [-p ]... [-o ] + locations-providers-list [-p ]... [-o ] locations-providers-set-iam-policy (-r )... [-p ]... [-o ] locations-providers-test-iam-permissions (-r )... [-p ]... [-o ] connectors1 --help diff --git a/gen/connectors1-cli/mkdocs.yml b/gen/connectors1-cli/mkdocs.yml index dfb364f283..181ba85094 100644 --- a/gen/connectors1-cli/mkdocs.yml +++ b/gen/connectors1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Connectors v4.0.1+20220214 +site_name: Connectors v5.0.2+20230103 site_url: http://byron.github.io/google-apis-rs/google-connectors1-cli site_description: A complete library to interact with Connectors (protocol v1) @@ -7,35 +7,37 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/connectors1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-connections-create.md', 'Projects', 'Locations Connections Create'] -- ['projects_locations-connections-delete.md', 'Projects', 'Locations Connections Delete'] -- ['projects_locations-connections-get.md', 'Projects', 'Locations Connections Get'] -- ['projects_locations-connections-get-connection-schema-metadata.md', 'Projects', 'Locations Connections Get Connection Schema Metadata'] -- ['projects_locations-connections-get-iam-policy.md', 'Projects', 'Locations Connections Get Iam Policy'] -- ['projects_locations-connections-list.md', 'Projects', 'Locations Connections List'] -- ['projects_locations-connections-patch.md', 'Projects', 'Locations Connections Patch'] -- ['projects_locations-connections-runtime-action-schemas-list.md', 'Projects', 'Locations Connections Runtime Action Schemas List'] -- ['projects_locations-connections-runtime-entity-schemas-list.md', 'Projects', 'Locations Connections Runtime Entity Schemas List'] -- ['projects_locations-connections-set-iam-policy.md', 'Projects', 'Locations Connections Set Iam Policy'] -- ['projects_locations-connections-test-iam-permissions.md', 'Projects', 'Locations Connections Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-get-runtime-config.md', 'Projects', 'Locations Get Runtime Config'] -- ['projects_locations-global-providers-connectors-get.md', 'Projects', 'Locations Global Providers Connectors Get'] -- ['projects_locations-global-providers-connectors-list.md', 'Projects', 'Locations Global Providers Connectors List'] -- ['projects_locations-global-providers-connectors-versions-get.md', 'Projects', 'Locations Global Providers Connectors Versions Get'] -- ['projects_locations-global-providers-connectors-versions-list.md', 'Projects', 'Locations Global Providers Connectors Versions List'] -- ['projects_locations-global-providers-get.md', 'Projects', 'Locations Global Providers Get'] -- ['projects_locations-global-providers-list.md', 'Projects', 'Locations Global Providers List'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-providers-get-iam-policy.md', 'Projects', 'Locations Providers Get Iam Policy'] -- ['projects_locations-providers-set-iam-policy.md', 'Projects', 'Locations Providers Set Iam Policy'] -- ['projects_locations-providers-test-iam-permissions.md', 'Projects', 'Locations Providers Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Connections Connection Schema Metadata Refresh': 'projects_locations-connections-connection-schema-metadata-refresh.md' + - 'Locations Connections Create': 'projects_locations-connections-create.md' + - 'Locations Connections Delete': 'projects_locations-connections-delete.md' + - 'Locations Connections Get': 'projects_locations-connections-get.md' + - 'Locations Connections Get Connection Schema Metadata': 'projects_locations-connections-get-connection-schema-metadata.md' + - 'Locations Connections Get Iam Policy': 'projects_locations-connections-get-iam-policy.md' + - 'Locations Connections List': 'projects_locations-connections-list.md' + - 'Locations Connections Patch': 'projects_locations-connections-patch.md' + - 'Locations Connections Runtime Action Schemas List': 'projects_locations-connections-runtime-action-schemas-list.md' + - 'Locations Connections Runtime Entity Schemas List': 'projects_locations-connections-runtime-entity-schemas-list.md' + - 'Locations Connections Set Iam Policy': 'projects_locations-connections-set-iam-policy.md' + - 'Locations Connections Test Iam Permissions': 'projects_locations-connections-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Get Runtime Config': 'projects_locations-get-runtime-config.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Providers Connectors Get': 'projects_locations-providers-connectors-get.md' + - 'Locations Providers Connectors List': 'projects_locations-providers-connectors-list.md' + - 'Locations Providers Connectors Versions Get': 'projects_locations-providers-connectors-versions-get.md' + - 'Locations Providers Connectors Versions List': 'projects_locations-providers-connectors-versions-list.md' + - 'Locations Providers Get': 'projects_locations-providers-get.md' + - 'Locations Providers Get Iam Policy': 'projects_locations-providers-get-iam-policy.md' + - 'Locations Providers List': 'projects_locations-providers-list.md' + - 'Locations Providers Set Iam Policy': 'projects_locations-providers-set-iam-policy.md' + - 'Locations Providers Test Iam Permissions': 'projects_locations-providers-test-iam-permissions.md' theme: readthedocs diff --git a/gen/connectors1-cli/src/client.rs b/gen/connectors1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/connectors1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/connectors1-cli/src/main.rs b/gen/connectors1-cli/src/main.rs index 1c5364aacb..19a5350358 100644 --- a/gen/connectors1-cli/src/main.rs +++ b/gen/connectors1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_connectors1::{api, Error, oauth2}; +use google_connectors1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,90 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_connections_connection_schema_metadata_refresh(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RefreshConnectionSchemaMetadataRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_connections_connection_schema_metadata_refresh(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_connections_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -81,18 +164,23 @@ where "auth-config.oauth2-jwt-bearer.jwt-claims.audience" => Some(("authConfig.oauth2JwtBearer.jwtClaims.audience", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.oauth2-jwt-bearer.jwt-claims.issuer" => Some(("authConfig.oauth2JwtBearer.jwtClaims.issuer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.oauth2-jwt-bearer.jwt-claims.subject" => Some(("authConfig.oauth2JwtBearer.jwtClaims.subject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.cert-type" => Some(("authConfig.sshPublicKey.certType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.ssh-client-cert.secret-version" => Some(("authConfig.sshPublicKey.sshClientCert.secretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.ssh-client-cert-pass.secret-version" => Some(("authConfig.sshPublicKey.sshClientCertPass.secretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.username" => Some(("authConfig.sshPublicKey.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.user-password.password.secret-version" => Some(("authConfig.userPassword.password.secretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.user-password.username" => Some(("authConfig.userPassword.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connector-version" => Some(("connectorVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "egress-backends" => Some(("egressBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "envoy-image-location" => Some(("envoyImageLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "image-location" => Some(("imageLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "lock-config.locked" => Some(("lockConfig.locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "lock-config.reason" => Some(("lockConfig.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-config.max-node-count" => Some(("nodeConfig.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.min-node-count" => Some(("nodeConfig.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-directory" => Some(("serviceDirectory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.description" => Some(("status.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -101,7 +189,7 @@ where "suspended" => Some(("suspended", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["audience", "auth-config", "auth-type", "client-id", "client-key", "client-secret", "connector-version", "create-time", "description", "egress-backends", "envoy-image-location", "image-location", "issuer", "jwt-claims", "labels", "lock-config", "locked", "name", "oauth2-client-credentials", "oauth2-jwt-bearer", "password", "reason", "secret-version", "service-account", "service-directory", "state", "status", "subject", "suspended", "update-time", "user-password", "username"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["audience", "auth-config", "auth-type", "cert-type", "client-id", "client-key", "client-secret", "connector-version", "create-time", "description", "envoy-image-location", "image-location", "issuer", "jwt-claims", "labels", "lock-config", "locked", "max-node-count", "min-node-count", "name", "node-config", "oauth2-client-credentials", "oauth2-jwt-bearer", "password", "reason", "secret-version", "service-account", "service-directory", "ssh-client-cert", "ssh-client-cert-pass", "ssh-public-key", "state", "status", "subject", "suspended", "update-time", "user-password", "username"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -332,7 +420,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -394,7 +482,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -479,18 +567,23 @@ where "auth-config.oauth2-jwt-bearer.jwt-claims.audience" => Some(("authConfig.oauth2JwtBearer.jwtClaims.audience", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.oauth2-jwt-bearer.jwt-claims.issuer" => Some(("authConfig.oauth2JwtBearer.jwtClaims.issuer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.oauth2-jwt-bearer.jwt-claims.subject" => Some(("authConfig.oauth2JwtBearer.jwtClaims.subject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.cert-type" => Some(("authConfig.sshPublicKey.certType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.ssh-client-cert.secret-version" => Some(("authConfig.sshPublicKey.sshClientCert.secretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.ssh-client-cert-pass.secret-version" => Some(("authConfig.sshPublicKey.sshClientCertPass.secretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auth-config.ssh-public-key.username" => Some(("authConfig.sshPublicKey.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.user-password.password.secret-version" => Some(("authConfig.userPassword.password.secretVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "auth-config.user-password.username" => Some(("authConfig.userPassword.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connector-version" => Some(("connectorVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "egress-backends" => Some(("egressBackends", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "envoy-image-location" => Some(("envoyImageLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "image-location" => Some(("imageLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "lock-config.locked" => Some(("lockConfig.locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "lock-config.reason" => Some(("lockConfig.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-config.max-node-count" => Some(("nodeConfig.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-config.min-node-count" => Some(("nodeConfig.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-directory" => Some(("serviceDirectory", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.description" => Some(("status.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -499,7 +592,7 @@ where "suspended" => Some(("suspended", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["audience", "auth-config", "auth-type", "client-id", "client-key", "client-secret", "connector-version", "create-time", "description", "egress-backends", "envoy-image-location", "image-location", "issuer", "jwt-claims", "labels", "lock-config", "locked", "name", "oauth2-client-credentials", "oauth2-jwt-bearer", "password", "reason", "secret-version", "service-account", "service-directory", "state", "status", "subject", "suspended", "update-time", "user-password", "username"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["audience", "auth-config", "auth-type", "cert-type", "client-id", "client-key", "client-secret", "connector-version", "create-time", "description", "envoy-image-location", "image-location", "issuer", "jwt-claims", "labels", "lock-config", "locked", "max-node-count", "min-node-count", "name", "node-config", "oauth2-client-credentials", "oauth2-jwt-bearer", "password", "reason", "secret-version", "service-account", "service-directory", "ssh-client-cert", "ssh-client-cert-pass", "ssh-public-key", "state", "status", "subject", "suspended", "update-time", "user-password", "username"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -514,7 +607,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -573,7 +666,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -635,7 +728,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -963,346 +1056,6 @@ where } } - async fn _projects_locations_global_providers_connectors_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_global_providers_connectors_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_global_providers_connectors_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_global_providers_connectors_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_global_providers_connectors_versions_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_global_providers_connectors_versions_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "view" => { - call = call.view(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["view"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_global_providers_connectors_versions_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_global_providers_connectors_versions_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "view" => { - call = call.view(value.unwrap_or("")); - }, - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token", "view"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_global_providers_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_global_providers_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_global_providers_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_global_providers_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or("")); @@ -1313,7 +1066,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1563,7 +1316,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1615,6 +1368,287 @@ where } } + async fn _projects_locations_providers_connectors_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_providers_connectors_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_providers_connectors_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_providers_connectors_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_providers_connectors_versions_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_providers_connectors_versions_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["view"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_providers_connectors_versions_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_providers_connectors_versions_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "view"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_providers_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_providers_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_providers_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_providers_get_iam_policy(opt.value_of("resource").unwrap_or("")); @@ -1622,7 +1656,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1671,6 +1705,65 @@ where } } + async fn _projects_locations_providers_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_providers_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_providers_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1850,6 +1943,9 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-connections-connection-schema-metadata-refresh", Some(opt)) => { + call_result = self._projects_locations_connections_connection_schema_metadata_refresh(opt, dry_run, &mut err).await; + }, ("locations-connections-create", Some(opt)) => { call_result = self._projects_locations_connections_create(opt, dry_run, &mut err).await; }, @@ -1889,24 +1985,6 @@ where ("locations-get-runtime-config", Some(opt)) => { call_result = self._projects_locations_get_runtime_config(opt, dry_run, &mut err).await; }, - ("locations-global-providers-connectors-get", Some(opt)) => { - call_result = self._projects_locations_global_providers_connectors_get(opt, dry_run, &mut err).await; - }, - ("locations-global-providers-connectors-list", Some(opt)) => { - call_result = self._projects_locations_global_providers_connectors_list(opt, dry_run, &mut err).await; - }, - ("locations-global-providers-connectors-versions-get", Some(opt)) => { - call_result = self._projects_locations_global_providers_connectors_versions_get(opt, dry_run, &mut err).await; - }, - ("locations-global-providers-connectors-versions-list", Some(opt)) => { - call_result = self._projects_locations_global_providers_connectors_versions_list(opt, dry_run, &mut err).await; - }, - ("locations-global-providers-get", Some(opt)) => { - call_result = self._projects_locations_global_providers_get(opt, dry_run, &mut err).await; - }, - ("locations-global-providers-list", Some(opt)) => { - call_result = self._projects_locations_global_providers_list(opt, dry_run, &mut err).await; - }, ("locations-list", Some(opt)) => { call_result = self._projects_locations_list(opt, dry_run, &mut err).await; }, @@ -1922,9 +2000,27 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, + ("locations-providers-connectors-get", Some(opt)) => { + call_result = self._projects_locations_providers_connectors_get(opt, dry_run, &mut err).await; + }, + ("locations-providers-connectors-list", Some(opt)) => { + call_result = self._projects_locations_providers_connectors_list(opt, dry_run, &mut err).await; + }, + ("locations-providers-connectors-versions-get", Some(opt)) => { + call_result = self._projects_locations_providers_connectors_versions_get(opt, dry_run, &mut err).await; + }, + ("locations-providers-connectors-versions-list", Some(opt)) => { + call_result = self._projects_locations_providers_connectors_versions_list(opt, dry_run, &mut err).await; + }, + ("locations-providers-get", Some(opt)) => { + call_result = self._projects_locations_providers_get(opt, dry_run, &mut err).await; + }, ("locations-providers-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_providers_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-providers-list", Some(opt)) => { + call_result = self._projects_locations_providers_list(opt, dry_run, &mut err).await; + }, ("locations-providers-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_providers_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -2010,7 +2106,35 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-connections-create', 'locations-connections-delete', 'locations-connections-get', 'locations-connections-get-connection-schema-metadata', 'locations-connections-get-iam-policy', 'locations-connections-list', 'locations-connections-patch', 'locations-connections-runtime-action-schemas-list', 'locations-connections-runtime-entity-schemas-list', 'locations-connections-set-iam-policy', 'locations-connections-test-iam-permissions', 'locations-get', 'locations-get-runtime-config', 'locations-global-providers-connectors-get', 'locations-global-providers-connectors-list', 'locations-global-providers-connectors-versions-get', 'locations-global-providers-connectors-versions-list', 'locations-global-providers-get', 'locations-global-providers-list', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-providers-get-iam-policy', 'locations-providers-set-iam-policy' and 'locations-providers-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-connections-connection-schema-metadata-refresh', 'locations-connections-create', 'locations-connections-delete', 'locations-connections-get', 'locations-connections-get-connection-schema-metadata', 'locations-connections-get-iam-policy', 'locations-connections-list', 'locations-connections-patch', 'locations-connections-runtime-action-schemas-list', 'locations-connections-runtime-entity-schemas-list', 'locations-connections-set-iam-policy', 'locations-connections-test-iam-permissions', 'locations-get', 'locations-get-runtime-config', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-providers-connectors-get', 'locations-providers-connectors-list', 'locations-providers-connectors-versions-get', 'locations-providers-connectors-versions-list', 'locations-providers-get', 'locations-providers-get-iam-policy', 'locations-providers-list', 'locations-providers-set-iam-policy' and 'locations-providers-test-iam-permissions'", vec![ + ("locations-connections-connection-schema-metadata-refresh", + Some(r##"Refresh runtime schema of a connection."##), + "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-connections-connection-schema-metadata-refresh", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Resource name. Format: projects/{project}/locations/{location}/connections/{connection}/connectionSchemaMetadata"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-connections-create", Some(r##"Creates a new Connection in a given project and location."##), "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-connections-create", @@ -2111,7 +2235,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2227,7 +2351,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2255,7 +2379,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2315,138 +2439,6 @@ async fn main() { Some(false), Some(true)), - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-global-providers-connectors-get", - Some(r##"Gets details of a single Connector."##), - "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-global-providers-connectors-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. Resource name of the form: `projects/*/locations/*/providers/*/connectors/*`"##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-global-providers-connectors-list", - Some(r##"Lists Connectors in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-global-providers-connectors-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. Parent resource of the connectors, of the form: `projects/*/locations/*/providers/*`"##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-global-providers-connectors-versions-get", - Some(r##"Gets details of a single connector version."##), - "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-global-providers-connectors-versions-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. Resource name of the form: `projects/*/locations/*/providers/*/connectors/*/versions/*`"##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-global-providers-connectors-versions-list", - Some(r##"Lists Connector Versions in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-global-providers-connectors-versions-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. Parent resource of the connectors, of the form: `projects/*/locations/*/providers/*/connectors/*`"##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-global-providers-get", - Some(r##"Gets details of a single Provider."##), - "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-global-providers-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. Resource name of the form: `projects/*/locations/*/providers/*`"##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-global-providers-list", - Some(r##"Lists Providers in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-global-providers-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. Parent resource of the API, of the form: `projects/*/locations/*`"##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2563,6 +2555,116 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-providers-connectors-get", + Some(r##"Gets details of a single Connector."##), + "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-providers-connectors-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Resource name of the form: `projects/*/locations/*/providers/*/connectors/*` Only global location is supported for Connector resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-providers-connectors-list", + Some(r##"Lists Connectors in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-providers-connectors-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Parent resource of the connectors, of the form: `projects/*/locations/*/providers/*` Only global location is supported for Connector resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-providers-connectors-versions-get", + Some(r##"Gets details of a single connector version."##), + "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-providers-connectors-versions-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Resource name of the form: `projects/*/locations/*/providers/*/connectors/*/versions/*` Only global location is supported for ConnectorVersion resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-providers-connectors-versions-list", + Some(r##"Lists Connector Versions in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-providers-connectors-versions-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Parent resource of the connectors, of the form: `projects/*/locations/*/providers/*/connectors/*` Only global location is supported for ConnectorVersion resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-providers-get", + Some(r##"Gets details of a provider."##), + "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-providers-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Resource name of the form: `projects/*/locations/*/providers/*` Only global location is supported for Provider resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2575,7 +2677,29 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-providers-list", + Some(r##"Lists Providers in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_connectors1_cli/projects_locations-providers-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Parent resource of the API, of the form: `projects/*/locations/*` Only global location is supported for Provider resource."##), Some(true), Some(false)), @@ -2597,7 +2721,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2625,7 +2749,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2653,7 +2777,7 @@ async fn main() { let mut app = App::new("connectors1") .author("Sebastian Thiel ") - .version("4.0.1+20220214") + .version("5.0.2+20230103") .about("Enables users to create and manage connections to Google Cloud services and third-party business applications using the Connectors interface.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_connectors1_cli") .arg(Arg::with_name("url") diff --git a/gen/connectors1/Cargo.toml b/gen/connectors1/Cargo.toml index 28f42d3dfb..a82834a67c 100644 --- a/gen/connectors1/Cargo.toml +++ b/gen/connectors1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-connectors1" -version = "5.0.2-beta-1+20230103" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Connectors (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/connectors1" homepage = "https://cloud.google.com/apigee/docs/api-platform/connectors/about-connectors" -documentation = "https://docs.rs/google-connectors1/5.0.2-beta-1+20230103" +documentation = "https://docs.rs/google-connectors1/5.0.2+20230103" license = "MIT" keywords = ["connectors", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/connectors1/README.md b/gen/connectors1/README.md index 0191f3d323..73f08da82b 100644 --- a/gen/connectors1/README.md +++ b/gen/connectors1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-connectors1` library allows access to all features of the *Google Connectors* service. -This documentation was generated from *Connectors* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *connectors:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Connectors* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *connectors:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Connectors* *v1* API can be found at the [official documentation site](https://cloud.google.com/apigee/docs/api-platform/connectors/about-connectors). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/Connectors) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/Connectors) ... * projects - * [*locations connections connection schema metadata refresh*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionConnectionSchemaMetadataRefreshCall), [*locations connections create*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionCreateCall), [*locations connections delete*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionDeleteCall), [*locations connections get*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionGetCall), [*locations connections get connection schema metadata*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionGetConnectionSchemaMetadataCall), [*locations connections get iam policy*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionGetIamPolicyCall), [*locations connections list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionListCall), [*locations connections patch*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionPatchCall), [*locations connections runtime action schemas list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionRuntimeActionSchemaListCall), [*locations connections runtime entity schemas list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionRuntimeEntitySchemaListCall), [*locations connections set iam policy*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionSetIamPolicyCall), [*locations connections test iam permissions*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationConnectionTestIamPermissionCall), [*locations get*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationGetCall), [*locations get runtime config*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationGetRuntimeConfigCall), [*locations list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationOperationListCall), [*locations providers connectors get*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderConnectorGetCall), [*locations providers connectors list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderConnectorListCall), [*locations providers connectors versions get*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderConnectorVersionGetCall), [*locations providers connectors versions list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderConnectorVersionListCall), [*locations providers get*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderGetCall), [*locations providers get iam policy*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderGetIamPolicyCall), [*locations providers list*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderListCall), [*locations providers set iam policy*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderSetIamPolicyCall) and [*locations providers test iam permissions*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/api::ProjectLocationProviderTestIamPermissionCall) + * [*locations connections connection schema metadata refresh*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionConnectionSchemaMetadataRefreshCall), [*locations connections create*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionCreateCall), [*locations connections delete*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionDeleteCall), [*locations connections get*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionGetCall), [*locations connections get connection schema metadata*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionGetConnectionSchemaMetadataCall), [*locations connections get iam policy*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionGetIamPolicyCall), [*locations connections list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionListCall), [*locations connections patch*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionPatchCall), [*locations connections runtime action schemas list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionRuntimeActionSchemaListCall), [*locations connections runtime entity schemas list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionRuntimeEntitySchemaListCall), [*locations connections set iam policy*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionSetIamPolicyCall), [*locations connections test iam permissions*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationConnectionTestIamPermissionCall), [*locations get*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationGetCall), [*locations get runtime config*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationGetRuntimeConfigCall), [*locations list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationOperationListCall), [*locations providers connectors get*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderConnectorGetCall), [*locations providers connectors list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderConnectorListCall), [*locations providers connectors versions get*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderConnectorVersionGetCall), [*locations providers connectors versions list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderConnectorVersionListCall), [*locations providers get*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderGetCall), [*locations providers get iam policy*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderGetIamPolicyCall), [*locations providers list*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderListCall), [*locations providers set iam policy*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderSetIamPolicyCall) and [*locations providers test iam permissions*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/api::ProjectLocationProviderTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/Connectors)** +* **[Hub](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/Connectors)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::CallBuilder) -* **[Resources](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::CallBuilder) +* **[Resources](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Part)** + * **[Parts](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Delegate) to the -[Method Builder](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Delegate) to the +[Method Builder](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::RequestValue) and -[decodable](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::RequestValue) and +[decodable](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-connectors1/5.0.2-beta-1+20230103/google_connectors1/client::RequestValue) are moved +* [request values](https://docs.rs/google-connectors1/5.0.2+20230103/google_connectors1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/connectors1/src/api.rs b/gen/connectors1/src/api.rs index 704e9f6465..e4f29313cf 100644 --- a/gen/connectors1/src/api.rs +++ b/gen/connectors1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Connectors { Connectors { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://connectors.googleapis.com/".to_string(), _root_url: "https://connectors.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Connectors { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/connectors1/src/client.rs b/gen/connectors1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/connectors1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/connectors1/src/lib.rs b/gen/connectors1/src/lib.rs index 8165973cb2..44df0871a0 100644 --- a/gen/connectors1/src/lib.rs +++ b/gen/connectors1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Connectors* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *connectors:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Connectors* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *connectors:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Connectors* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/apigee/docs/api-platform/connectors/about-connectors). diff --git a/gen/consumersurveys2-cli/Cargo.toml b/gen/consumersurveys2-cli/Cargo.toml index b2141dfcaa..60ae3403cd 100644 --- a/gen/consumersurveys2-cli/Cargo.toml +++ b/gen/consumersurveys2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-consumersurveys2-cli" -version = "4.0.1+20170407" +version = "5.0.2+20170407" authors = ["Sebastian Thiel "] description = "A complete library to interact with Consumer Surveys (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/consumersurveys2-cli" @@ -19,13 +19,13 @@ name = "consumersurveys2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -35,7 +35,7 @@ tower-service = "^0.3.1" - [dependencies.google-consumersurveys2] path = "../consumersurveys2" -version = "4.0.1+20170407" +version = "5.0.2+20170407" + diff --git a/gen/consumersurveys2-cli/README.md b/gen/consumersurveys2-cli/README.md index 23db97512a..0d21906164 100644 --- a/gen/consumersurveys2-cli/README.md +++ b/gen/consumersurveys2-cli/README.md @@ -22,7 +22,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Consumer Surveys* API at revision *20170407*. The CLI is at version *4.0.1*. +This documentation was generated from the *Consumer Surveys* API at revision *20170407*. The CLI is at version *5.0.2*. ```bash consumersurveys2 [options] diff --git a/gen/consumersurveys2-cli/mkdocs.yml b/gen/consumersurveys2-cli/mkdocs.yml index 78b58c7d64..bbb56450e9 100644 --- a/gen/consumersurveys2-cli/mkdocs.yml +++ b/gen/consumersurveys2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Consumer Surveys v4.0.1+20170407 +site_name: Consumer Surveys v5.0.2+20170407 site_url: http://byron.github.io/google-apis-rs/google-consumersurveys2-cli site_description: A complete library to interact with Consumer Surveys (protocol v2) @@ -7,19 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/consumersurveys2 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['mobileapppanels_get.md', 'Mobileapppanels', 'Get'] -- ['mobileapppanels_list.md', 'Mobileapppanels', 'List'] -- ['mobileapppanels_update.md', 'Mobileapppanels', 'Update'] -- ['results_get.md', 'Results', 'Get'] -- ['surveys_delete.md', 'Surveys', 'Delete'] -- ['surveys_get.md', 'Surveys', 'Get'] -- ['surveys_insert.md', 'Surveys', 'Insert'] -- ['surveys_list.md', 'Surveys', 'List'] -- ['surveys_start.md', 'Surveys', 'Start'] -- ['surveys_stop.md', 'Surveys', 'Stop'] -- ['surveys_update.md', 'Surveys', 'Update'] +nav: +- Home: 'index.md' +- 'Mobileapppanels': + - 'Get': 'mobileapppanels_get.md' + - 'List': 'mobileapppanels_list.md' + - 'Update': 'mobileapppanels_update.md' +- 'Results': + - 'Get': 'results_get.md' +- 'Surveys': + - 'Delete': 'surveys_delete.md' + - 'Get': 'surveys_get.md' + - 'Insert': 'surveys_insert.md' + - 'List': 'surveys_list.md' + - 'Start': 'surveys_start.md' + - 'Stop': 'surveys_stop.md' + - 'Update': 'surveys_update.md' theme: readthedocs diff --git a/gen/consumersurveys2-cli/src/client.rs b/gen/consumersurveys2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/consumersurveys2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/consumersurveys2-cli/src/main.rs b/gen/consumersurveys2-cli/src/main.rs index a67c048cc7..8d49845a6a 100644 --- a/gen/consumersurveys2-cli/src/main.rs +++ b/gen/consumersurveys2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_consumersurveys2::{api, Error, oauth2}; +use google_consumersurveys2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,10 +112,10 @@ where call = call.token(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -568,10 +567,10 @@ where call = call.token(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1260,7 +1259,7 @@ async fn main() { let mut app = App::new("consumersurveys2") .author("Sebastian Thiel ") - .version("4.0.1+20170407") + .version("5.0.2+20170407") .about("Creates and conducts surveys, lists the surveys that an authenticated user owns, and retrieves survey results and information about specified surveys.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_consumersurveys2_cli") .arg(Arg::with_name("url") diff --git a/gen/consumersurveys2/Cargo.toml b/gen/consumersurveys2/Cargo.toml index a750e428ae..564a2ba7f8 100644 --- a/gen/consumersurveys2/Cargo.toml +++ b/gen/consumersurveys2/Cargo.toml @@ -4,11 +4,11 @@ [package] name = "google-consumersurveys2" -version = "5.0.2-beta-1+20170407" +version = "5.0.2+20170407" authors = ["Sebastian Thiel "] description = "A complete library to interact with Consumer Surveys (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/consumersurveys2" -documentation = "https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407" +documentation = "https://docs.rs/google-consumersurveys2/5.0.2+20170407" license = "MIT" keywords = ["consumersurveys", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/consumersurveys2/README.md b/gen/consumersurveys2/README.md index 07a5d61091..b7925c89fa 100644 --- a/gen/consumersurveys2/README.md +++ b/gen/consumersurveys2/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-consumersurveys2` library allows access to all features of the *Google Consumer Surveys* service. -This documentation was generated from *Consumer Surveys* crate version *5.0.2-beta-1+20170407*, where *20170407* is the exact revision of the *consumersurveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Consumer Surveys* crate version *5.0.2+20170407*, where *20170407* is the exact revision of the *consumersurveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/ConsumerSurveys) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/ConsumerSurveys) ... * mobileapppanels - * [*get*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::MobileapppanelGetCall), [*list*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::MobileapppanelListCall) and [*update*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::MobileapppanelUpdateCall) + * [*get*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::MobileapppanelGetCall), [*list*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::MobileapppanelListCall) and [*update*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::MobileapppanelUpdateCall) * results - * [*get*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::ResultGetCall) -* [surveys](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::Survey) - * [*delete*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::SurveyDeleteCall), [*get*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::SurveyGetCall), [*insert*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::SurveyInsertCall), [*list*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::SurveyListCall), [*start*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::SurveyStartCall), [*stop*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::SurveyStopCall) and [*update*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::SurveyUpdateCall) + * [*get*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::ResultGetCall) +* [surveys](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::Survey) + * [*delete*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::SurveyDeleteCall), [*get*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::SurveyGetCall), [*insert*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::SurveyInsertCall), [*list*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::SurveyListCall), [*start*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::SurveyStartCall), [*stop*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::SurveyStopCall) and [*update*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::SurveyUpdateCall) Download supported by ... -* [*get results*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/api::ResultGetCall) +* [*get results*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/api::ResultGetCall) @@ -28,17 +28,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/ConsumerSurveys)** +* **[Hub](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/ConsumerSurveys)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::CallBuilder) -* **[Resources](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::CallBuilder) +* **[Resources](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Part)** + * **[Parts](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Delegate) to the -[Method Builder](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Delegate) to the +[Method Builder](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::RequestValue) and -[decodable](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::RequestValue) and +[decodable](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-consumersurveys2/5.0.2-beta-1+20170407/google_consumersurveys2/client::RequestValue) are moved +* [request values](https://docs.rs/google-consumersurveys2/5.0.2+20170407/google_consumersurveys2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/consumersurveys2/src/api.rs b/gen/consumersurveys2/src/api.rs index 51a46825d1..f08020abfb 100644 --- a/gen/consumersurveys2/src/api.rs +++ b/gen/consumersurveys2/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> ConsumerSurveys { ConsumerSurveys { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/consumersurveys/v2/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -147,7 +147,7 @@ impl<'a, S> ConsumerSurveys { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/consumersurveys2/src/client.rs b/gen/consumersurveys2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/consumersurveys2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/consumersurveys2/src/lib.rs b/gen/consumersurveys2/src/lib.rs index d7f30a0d64..dbbbf88fd6 100644 --- a/gen/consumersurveys2/src/lib.rs +++ b/gen/consumersurveys2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Consumer Surveys* crate version *5.0.2-beta-1+20170407*, where *20170407* is the exact revision of the *consumersurveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Consumer Surveys* crate version *5.0.2+20170407*, where *20170407* is the exact revision of the *consumersurveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/main/gen/consumersurveys2). //! # Features //! diff --git a/gen/contactcenterinsights1-cli/Cargo.toml b/gen/contactcenterinsights1-cli/Cargo.toml index a6eafe7ed7..3970aef82b 100644 --- a/gen/contactcenterinsights1-cli/Cargo.toml +++ b/gen/contactcenterinsights1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-contactcenterinsights1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Contactcenterinsights (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/contactcenterinsights1-cli" @@ -20,13 +20,13 @@ name = "contactcenterinsights1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-contactcenterinsights1] path = "../contactcenterinsights1" -version = "4.0.1+20220227" +version = "5.0.2+20230115" + diff --git a/gen/contactcenterinsights1-cli/README.md b/gen/contactcenterinsights1-cli/README.md index 2af8acbad1..1794c9c998 100644 --- a/gen/contactcenterinsights1-cli/README.md +++ b/gen/contactcenterinsights1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Contactcenterinsights* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Contactcenterinsights* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash contactcenterinsights1 [options] @@ -34,10 +34,12 @@ contactcenterinsights1 [options] locations-conversations-analyses-delete [-p ]... [-o ] locations-conversations-analyses-get [-p ]... [-o ] locations-conversations-analyses-list [-p ]... [-o ] + locations-conversations-bulk-analyze (-r )... [-p ]... [-o ] locations-conversations-calculate-stats [-p ]... [-o ] locations-conversations-create (-r )... [-p ]... [-o ] locations-conversations-delete [-p ]... [-o ] locations-conversations-get [-p ]... [-o ] + locations-conversations-ingest (-r )... [-p ]... [-o ] locations-conversations-list [-p ]... [-o ] locations-conversations-patch (-r )... [-p ]... [-o ] locations-get-settings [-p ]... [-o ] @@ -47,6 +49,7 @@ contactcenterinsights1 [options] locations-issue-models-delete [-p ]... [-o ] locations-issue-models-deploy (-r )... [-p ]... [-o ] locations-issue-models-get [-p ]... [-o ] + locations-issue-models-issues-delete [-p ]... [-o ] locations-issue-models-issues-get [-p ]... [-o ] locations-issue-models-issues-list [-p ]... [-o ] locations-issue-models-issues-patch (-r )... [-p ]... [-o ] diff --git a/gen/contactcenterinsights1-cli/mkdocs.yml b/gen/contactcenterinsights1-cli/mkdocs.yml index 5508286e65..295beb7024 100644 --- a/gen/contactcenterinsights1-cli/mkdocs.yml +++ b/gen/contactcenterinsights1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Contactcenterinsights v4.0.1+20220227 +site_name: Contactcenterinsights v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-contactcenterinsights1-cli site_description: A complete library to interact with Contactcenterinsights (protocol v1) @@ -7,45 +7,49 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/contactcenterins docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-conversations-analyses-create.md', 'Projects', 'Locations Conversations Analyses Create'] -- ['projects_locations-conversations-analyses-delete.md', 'Projects', 'Locations Conversations Analyses Delete'] -- ['projects_locations-conversations-analyses-get.md', 'Projects', 'Locations Conversations Analyses Get'] -- ['projects_locations-conversations-analyses-list.md', 'Projects', 'Locations Conversations Analyses List'] -- ['projects_locations-conversations-calculate-stats.md', 'Projects', 'Locations Conversations Calculate Stats'] -- ['projects_locations-conversations-create.md', 'Projects', 'Locations Conversations Create'] -- ['projects_locations-conversations-delete.md', 'Projects', 'Locations Conversations Delete'] -- ['projects_locations-conversations-get.md', 'Projects', 'Locations Conversations Get'] -- ['projects_locations-conversations-list.md', 'Projects', 'Locations Conversations List'] -- ['projects_locations-conversations-patch.md', 'Projects', 'Locations Conversations Patch'] -- ['projects_locations-get-settings.md', 'Projects', 'Locations Get Settings'] -- ['projects_locations-insightsdata-export.md', 'Projects', 'Locations Insightsdata Export'] -- ['projects_locations-issue-models-calculate-issue-model-stats.md', 'Projects', 'Locations Issue Models Calculate Issue Model Stats'] -- ['projects_locations-issue-models-create.md', 'Projects', 'Locations Issue Models Create'] -- ['projects_locations-issue-models-delete.md', 'Projects', 'Locations Issue Models Delete'] -- ['projects_locations-issue-models-deploy.md', 'Projects', 'Locations Issue Models Deploy'] -- ['projects_locations-issue-models-get.md', 'Projects', 'Locations Issue Models Get'] -- ['projects_locations-issue-models-issues-get.md', 'Projects', 'Locations Issue Models Issues Get'] -- ['projects_locations-issue-models-issues-list.md', 'Projects', 'Locations Issue Models Issues List'] -- ['projects_locations-issue-models-issues-patch.md', 'Projects', 'Locations Issue Models Issues Patch'] -- ['projects_locations-issue-models-list.md', 'Projects', 'Locations Issue Models List'] -- ['projects_locations-issue-models-patch.md', 'Projects', 'Locations Issue Models Patch'] -- ['projects_locations-issue-models-undeploy.md', 'Projects', 'Locations Issue Models Undeploy'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-phrase-matchers-create.md', 'Projects', 'Locations Phrase Matchers Create'] -- ['projects_locations-phrase-matchers-delete.md', 'Projects', 'Locations Phrase Matchers Delete'] -- ['projects_locations-phrase-matchers-get.md', 'Projects', 'Locations Phrase Matchers Get'] -- ['projects_locations-phrase-matchers-list.md', 'Projects', 'Locations Phrase Matchers List'] -- ['projects_locations-phrase-matchers-patch.md', 'Projects', 'Locations Phrase Matchers Patch'] -- ['projects_locations-update-settings.md', 'Projects', 'Locations Update Settings'] -- ['projects_locations-views-create.md', 'Projects', 'Locations Views Create'] -- ['projects_locations-views-delete.md', 'Projects', 'Locations Views Delete'] -- ['projects_locations-views-get.md', 'Projects', 'Locations Views Get'] -- ['projects_locations-views-list.md', 'Projects', 'Locations Views List'] -- ['projects_locations-views-patch.md', 'Projects', 'Locations Views Patch'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Conversations Analyses Create': 'projects_locations-conversations-analyses-create.md' + - 'Locations Conversations Analyses Delete': 'projects_locations-conversations-analyses-delete.md' + - 'Locations Conversations Analyses Get': 'projects_locations-conversations-analyses-get.md' + - 'Locations Conversations Analyses List': 'projects_locations-conversations-analyses-list.md' + - 'Locations Conversations Bulk Analyze': 'projects_locations-conversations-bulk-analyze.md' + - 'Locations Conversations Calculate Stats': 'projects_locations-conversations-calculate-stats.md' + - 'Locations Conversations Create': 'projects_locations-conversations-create.md' + - 'Locations Conversations Delete': 'projects_locations-conversations-delete.md' + - 'Locations Conversations Get': 'projects_locations-conversations-get.md' + - 'Locations Conversations Ingest': 'projects_locations-conversations-ingest.md' + - 'Locations Conversations List': 'projects_locations-conversations-list.md' + - 'Locations Conversations Patch': 'projects_locations-conversations-patch.md' + - 'Locations Get Settings': 'projects_locations-get-settings.md' + - 'Locations Insightsdata Export': 'projects_locations-insightsdata-export.md' + - 'Locations Issue Models Calculate Issue Model Stats': 'projects_locations-issue-models-calculate-issue-model-stats.md' + - 'Locations Issue Models Create': 'projects_locations-issue-models-create.md' + - 'Locations Issue Models Delete': 'projects_locations-issue-models-delete.md' + - 'Locations Issue Models Deploy': 'projects_locations-issue-models-deploy.md' + - 'Locations Issue Models Get': 'projects_locations-issue-models-get.md' + - 'Locations Issue Models Issues Delete': 'projects_locations-issue-models-issues-delete.md' + - 'Locations Issue Models Issues Get': 'projects_locations-issue-models-issues-get.md' + - 'Locations Issue Models Issues List': 'projects_locations-issue-models-issues-list.md' + - 'Locations Issue Models Issues Patch': 'projects_locations-issue-models-issues-patch.md' + - 'Locations Issue Models List': 'projects_locations-issue-models-list.md' + - 'Locations Issue Models Patch': 'projects_locations-issue-models-patch.md' + - 'Locations Issue Models Undeploy': 'projects_locations-issue-models-undeploy.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Phrase Matchers Create': 'projects_locations-phrase-matchers-create.md' + - 'Locations Phrase Matchers Delete': 'projects_locations-phrase-matchers-delete.md' + - 'Locations Phrase Matchers Get': 'projects_locations-phrase-matchers-get.md' + - 'Locations Phrase Matchers List': 'projects_locations-phrase-matchers-list.md' + - 'Locations Phrase Matchers Patch': 'projects_locations-phrase-matchers-patch.md' + - 'Locations Update Settings': 'projects_locations-update-settings.md' + - 'Locations Views Create': 'projects_locations-views-create.md' + - 'Locations Views Delete': 'projects_locations-views-delete.md' + - 'Locations Views Get': 'projects_locations-views-get.md' + - 'Locations Views List': 'projects_locations-views-list.md' + - 'Locations Views Patch': 'projects_locations-views-patch.md' theme: readthedocs diff --git a/gen/contactcenterinsights1-cli/src/client.rs b/gen/contactcenterinsights1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/contactcenterinsights1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/contactcenterinsights1-cli/src/main.rs b/gen/contactcenterinsights1-cli/src/main.rs index 5b797a1311..cb80a07681 100644 --- a/gen/contactcenterinsights1-cli/src/main.rs +++ b/gen/contactcenterinsights1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_contactcenterinsights1::{api, Error, oauth2}; +use google_contactcenterinsights1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -76,11 +75,20 @@ where match &temp_cursor.to_string()[..] { "analysis-result.call-analysis-metadata.issue-model-result.issue-model" => Some(("analysisResult.callAnalysisMetadata.issueModelResult.issueModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "analysis-result.end-time" => Some(("analysisResult.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "annotator-selector.issue-models" => Some(("annotatorSelector.issueModels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "annotator-selector.phrase-matchers" => Some(("annotatorSelector.phraseMatchers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "annotator-selector.run-entity-annotator" => Some(("annotatorSelector.runEntityAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-intent-annotator" => Some(("annotatorSelector.runIntentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-interruption-annotator" => Some(("annotatorSelector.runInterruptionAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-issue-model-annotator" => Some(("annotatorSelector.runIssueModelAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-phrase-matcher-annotator" => Some(("annotatorSelector.runPhraseMatcherAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-sentiment-annotator" => Some(("annotatorSelector.runSentimentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-silence-annotator" => Some(("annotatorSelector.runSilenceAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "request-time" => Some(("requestTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-result", "call-analysis-metadata", "create-time", "end-time", "issue-model", "issue-model-result", "name", "request-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-result", "annotator-selector", "call-analysis-metadata", "create-time", "end-time", "issue-model", "issue-model-result", "issue-models", "name", "phrase-matchers", "request-time", "run-entity-annotator", "run-intent-annotator", "run-interruption-annotator", "run-issue-model-annotator", "run-phrase-matcher-annotator", "run-sentiment-annotator", "run-silence-annotator"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -254,7 +262,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -306,6 +314,102 @@ where } } + async fn _projects_locations_conversations_bulk_analyze(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "analysis-percentage" => Some(("analysisPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "annotator-selector.issue-models" => Some(("annotatorSelector.issueModels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "annotator-selector.phrase-matchers" => Some(("annotatorSelector.phraseMatchers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "annotator-selector.run-entity-annotator" => Some(("annotatorSelector.runEntityAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-intent-annotator" => Some(("annotatorSelector.runIntentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-interruption-annotator" => Some(("annotatorSelector.runInterruptionAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-issue-model-annotator" => Some(("annotatorSelector.runIssueModelAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-phrase-matcher-annotator" => Some(("annotatorSelector.runPhraseMatcherAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-sentiment-annotator" => Some(("annotatorSelector.runSentimentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "annotator-selector.run-silence-annotator" => Some(("annotatorSelector.runSilenceAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-percentage", "annotator-selector", "filter", "issue-models", "parent", "phrase-matchers", "run-entity-annotator", "run-intent-annotator", "run-interruption-annotator", "run-issue-model-annotator", "run-phrase-matcher-annotator", "run-sentiment-annotator", "run-silence-annotator"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudContactcenterinsightsV1BulkAnalyzeConversationsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversations_bulk_analyze(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_conversations_calculate_stats(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_conversations_calculate_stats(opt.value_of("location").unwrap_or("")); @@ -399,6 +503,15 @@ where "language-code" => Some(("languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.analysis-result.call-analysis-metadata.issue-model-result.issue-model" => Some(("latestAnalysis.analysisResult.callAnalysisMetadata.issueModelResult.issueModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.analysis-result.end-time" => Some(("latestAnalysis.analysisResult.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.issue-models" => Some(("latestAnalysis.annotatorSelector.issueModels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "latest-analysis.annotator-selector.phrase-matchers" => Some(("latestAnalysis.annotatorSelector.phraseMatchers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "latest-analysis.annotator-selector.run-entity-annotator" => Some(("latestAnalysis.annotatorSelector.runEntityAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-intent-annotator" => Some(("latestAnalysis.annotatorSelector.runIntentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-interruption-annotator" => Some(("latestAnalysis.annotatorSelector.runInterruptionAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-issue-model-annotator" => Some(("latestAnalysis.annotatorSelector.runIssueModelAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-phrase-matcher-annotator" => Some(("latestAnalysis.annotatorSelector.runPhraseMatcherAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-sentiment-annotator" => Some(("latestAnalysis.annotatorSelector.runSentimentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-silence-annotator" => Some(("latestAnalysis.annotatorSelector.runSilenceAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "latest-analysis.create-time" => Some(("latestAnalysis.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.name" => Some(("latestAnalysis.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.request-time" => Some(("latestAnalysis.requestTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -410,7 +523,7 @@ where "turn-count" => Some(("turnCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-channel", "agent-id", "analysis-result", "audio-uri", "call-analysis-metadata", "call-metadata", "create-time", "customer-channel", "data-source", "dialogflow-conversation", "dialogflow-source", "duration", "end-time", "expire-time", "gcs-source", "issue-model", "issue-model-result", "labels", "language-code", "latest-analysis", "medium", "name", "obfuscated-user-id", "request-time", "start-time", "transcript-uri", "ttl", "turn-count", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-channel", "agent-id", "analysis-result", "annotator-selector", "audio-uri", "call-analysis-metadata", "call-metadata", "create-time", "customer-channel", "data-source", "dialogflow-conversation", "dialogflow-source", "duration", "end-time", "expire-time", "gcs-source", "issue-model", "issue-model-result", "issue-models", "labels", "language-code", "latest-analysis", "medium", "name", "obfuscated-user-id", "phrase-matchers", "request-time", "run-entity-annotator", "run-intent-annotator", "run-interruption-annotator", "run-issue-model-annotator", "run-phrase-matcher-annotator", "run-sentiment-annotator", "run-silence-annotator", "start-time", "transcript-uri", "ttl", "turn-count", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -481,7 +594,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -586,6 +699,94 @@ where } } + async fn _projects_locations_conversations_ingest(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "conversation-config.agent-id" => Some(("conversationConfig.agentId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-source.bucket-uri" => Some(("gcsSource.bucketUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transcript-object-config.medium" => Some(("transcriptObjectConfig.medium", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-id", "bucket-uri", "conversation-config", "gcs-source", "medium", "parent", "transcript-object-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudContactcenterinsightsV1IngestConversationsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversations_ingest(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_conversations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_conversations_list(opt.value_of("parent").unwrap_or("")); @@ -599,7 +800,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -688,6 +889,15 @@ where "language-code" => Some(("languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.analysis-result.call-analysis-metadata.issue-model-result.issue-model" => Some(("latestAnalysis.analysisResult.callAnalysisMetadata.issueModelResult.issueModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.analysis-result.end-time" => Some(("latestAnalysis.analysisResult.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.issue-models" => Some(("latestAnalysis.annotatorSelector.issueModels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "latest-analysis.annotator-selector.phrase-matchers" => Some(("latestAnalysis.annotatorSelector.phraseMatchers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "latest-analysis.annotator-selector.run-entity-annotator" => Some(("latestAnalysis.annotatorSelector.runEntityAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-intent-annotator" => Some(("latestAnalysis.annotatorSelector.runIntentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-interruption-annotator" => Some(("latestAnalysis.annotatorSelector.runInterruptionAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-issue-model-annotator" => Some(("latestAnalysis.annotatorSelector.runIssueModelAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-phrase-matcher-annotator" => Some(("latestAnalysis.annotatorSelector.runPhraseMatcherAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-sentiment-annotator" => Some(("latestAnalysis.annotatorSelector.runSentimentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-analysis.annotator-selector.run-silence-annotator" => Some(("latestAnalysis.annotatorSelector.runSilenceAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "latest-analysis.create-time" => Some(("latestAnalysis.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.name" => Some(("latestAnalysis.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-analysis.request-time" => Some(("latestAnalysis.requestTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -699,7 +909,7 @@ where "turn-count" => Some(("turnCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-channel", "agent-id", "analysis-result", "audio-uri", "call-analysis-metadata", "call-metadata", "create-time", "customer-channel", "data-source", "dialogflow-conversation", "dialogflow-source", "duration", "end-time", "expire-time", "gcs-source", "issue-model", "issue-model-result", "labels", "language-code", "latest-analysis", "medium", "name", "obfuscated-user-id", "request-time", "start-time", "transcript-uri", "ttl", "turn-count", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-channel", "agent-id", "analysis-result", "annotator-selector", "audio-uri", "call-analysis-metadata", "call-metadata", "create-time", "customer-channel", "data-source", "dialogflow-conversation", "dialogflow-source", "duration", "end-time", "expire-time", "gcs-source", "issue-model", "issue-model-result", "issue-models", "labels", "language-code", "latest-analysis", "medium", "name", "obfuscated-user-id", "phrase-matchers", "request-time", "run-entity-annotator", "run-intent-annotator", "run-interruption-annotator", "run-issue-model-annotator", "run-phrase-matcher-annotator", "run-sentiment-annotator", "run-silence-annotator", "start-time", "transcript-uri", "ttl", "turn-count", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -714,7 +924,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -986,13 +1196,14 @@ where "input-data-config.filter" => Some(("inputDataConfig.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "input-data-config.medium" => Some(("inputDataConfig.medium", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "input-data-config.training-conversations-count" => Some(("inputDataConfig.trainingConversationsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "issue-count" => Some(("issueCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "training-stats.analyzed-conversations-count" => Some(("trainingStats.analyzedConversationsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "training-stats.unclassified-conversations-count" => Some(("trainingStats.unclassifiedConversationsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyzed-conversations-count", "create-time", "display-name", "filter", "input-data-config", "medium", "name", "state", "training-conversations-count", "training-stats", "unclassified-conversations-count", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyzed-conversations-count", "create-time", "display-name", "filter", "input-data-config", "issue-count", "medium", "name", "state", "training-conversations-count", "training-stats", "unclassified-conversations-count", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1241,6 +1452,58 @@ where } } + async fn _projects_locations_issue_models_issues_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_issue_models_issues_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_issue_models_issues_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_issue_models_issues_get(opt.value_of("name").unwrap_or("")); @@ -1371,9 +1634,10 @@ where "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sample-utterances" => Some(("sampleUtterances", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "name", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "name", "sample-utterances", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1388,7 +1652,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1517,13 +1781,14 @@ where "input-data-config.filter" => Some(("inputDataConfig.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "input-data-config.medium" => Some(("inputDataConfig.medium", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "input-data-config.training-conversations-count" => Some(("inputDataConfig.trainingConversationsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "issue-count" => Some(("issueCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "training-stats.analyzed-conversations-count" => Some(("trainingStats.analyzedConversationsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "training-stats.unclassified-conversations-count" => Some(("trainingStats.unclassifiedConversationsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyzed-conversations-count", "create-time", "display-name", "filter", "input-data-config", "medium", "name", "state", "training-conversations-count", "training-stats", "unclassified-conversations-count", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyzed-conversations-count", "create-time", "display-name", "filter", "input-data-config", "issue-count", "medium", "name", "state", "training-conversations-count", "training-stats", "unclassified-conversations-count", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1538,7 +1803,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1786,7 +2051,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2046,7 +2311,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2147,7 +2412,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2219,6 +2484,15 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "analysis-config.annotator-selector.issue-models" => Some(("analysisConfig.annotatorSelector.issueModels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "analysis-config.annotator-selector.phrase-matchers" => Some(("analysisConfig.annotatorSelector.phraseMatchers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "analysis-config.annotator-selector.run-entity-annotator" => Some(("analysisConfig.annotatorSelector.runEntityAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "analysis-config.annotator-selector.run-intent-annotator" => Some(("analysisConfig.annotatorSelector.runIntentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "analysis-config.annotator-selector.run-interruption-annotator" => Some(("analysisConfig.annotatorSelector.runInterruptionAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "analysis-config.annotator-selector.run-issue-model-annotator" => Some(("analysisConfig.annotatorSelector.runIssueModelAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "analysis-config.annotator-selector.run-phrase-matcher-annotator" => Some(("analysisConfig.annotatorSelector.runPhraseMatcherAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "analysis-config.annotator-selector.run-sentiment-annotator" => Some(("analysisConfig.annotatorSelector.runSentimentAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "analysis-config.annotator-selector.run-silence-annotator" => Some(("analysisConfig.annotatorSelector.runSilenceAnnotator", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "analysis-config.runtime-integration-analysis-percentage" => Some(("analysisConfig.runtimeIntegrationAnalysisPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "conversation-ttl" => Some(("conversationTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2227,7 +2501,7 @@ where "pubsub-notification-settings" => Some(("pubsubNotificationSettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-config", "conversation-ttl", "create-time", "language-code", "name", "pubsub-notification-settings", "runtime-integration-analysis-percentage", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-config", "annotator-selector", "conversation-ttl", "create-time", "issue-models", "language-code", "name", "phrase-matchers", "pubsub-notification-settings", "run-entity-annotator", "run-intent-annotator", "run-interruption-annotator", "run-issue-model-annotator", "run-phrase-matcher-annotator", "run-sentiment-annotator", "run-silence-annotator", "runtime-integration-analysis-percentage", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2242,7 +2516,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2494,7 +2768,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2587,7 +2861,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2655,6 +2929,9 @@ where ("locations-conversations-analyses-list", Some(opt)) => { call_result = self._projects_locations_conversations_analyses_list(opt, dry_run, &mut err).await; }, + ("locations-conversations-bulk-analyze", Some(opt)) => { + call_result = self._projects_locations_conversations_bulk_analyze(opt, dry_run, &mut err).await; + }, ("locations-conversations-calculate-stats", Some(opt)) => { call_result = self._projects_locations_conversations_calculate_stats(opt, dry_run, &mut err).await; }, @@ -2667,6 +2944,9 @@ where ("locations-conversations-get", Some(opt)) => { call_result = self._projects_locations_conversations_get(opt, dry_run, &mut err).await; }, + ("locations-conversations-ingest", Some(opt)) => { + call_result = self._projects_locations_conversations_ingest(opt, dry_run, &mut err).await; + }, ("locations-conversations-list", Some(opt)) => { call_result = self._projects_locations_conversations_list(opt, dry_run, &mut err).await; }, @@ -2694,6 +2974,9 @@ where ("locations-issue-models-get", Some(opt)) => { call_result = self._projects_locations_issue_models_get(opt, dry_run, &mut err).await; }, + ("locations-issue-models-issues-delete", Some(opt)) => { + call_result = self._projects_locations_issue_models_issues_delete(opt, dry_run, &mut err).await; + }, ("locations-issue-models-issues-get", Some(opt)) => { call_result = self._projects_locations_issue_models_issues_get(opt, dry_run, &mut err).await; }, @@ -2833,7 +3116,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-conversations-analyses-create', 'locations-conversations-analyses-delete', 'locations-conversations-analyses-get', 'locations-conversations-analyses-list', 'locations-conversations-calculate-stats', 'locations-conversations-create', 'locations-conversations-delete', 'locations-conversations-get', 'locations-conversations-list', 'locations-conversations-patch', 'locations-get-settings', 'locations-insightsdata-export', 'locations-issue-models-calculate-issue-model-stats', 'locations-issue-models-create', 'locations-issue-models-delete', 'locations-issue-models-deploy', 'locations-issue-models-get', 'locations-issue-models-issues-get', 'locations-issue-models-issues-list', 'locations-issue-models-issues-patch', 'locations-issue-models-list', 'locations-issue-models-patch', 'locations-issue-models-undeploy', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-phrase-matchers-create', 'locations-phrase-matchers-delete', 'locations-phrase-matchers-get', 'locations-phrase-matchers-list', 'locations-phrase-matchers-patch', 'locations-update-settings', 'locations-views-create', 'locations-views-delete', 'locations-views-get', 'locations-views-list' and 'locations-views-patch'", vec![ + ("projects", "methods: 'locations-conversations-analyses-create', 'locations-conversations-analyses-delete', 'locations-conversations-analyses-get', 'locations-conversations-analyses-list', 'locations-conversations-bulk-analyze', 'locations-conversations-calculate-stats', 'locations-conversations-create', 'locations-conversations-delete', 'locations-conversations-get', 'locations-conversations-ingest', 'locations-conversations-list', 'locations-conversations-patch', 'locations-get-settings', 'locations-insightsdata-export', 'locations-issue-models-calculate-issue-model-stats', 'locations-issue-models-create', 'locations-issue-models-delete', 'locations-issue-models-deploy', 'locations-issue-models-get', 'locations-issue-models-issues-delete', 'locations-issue-models-issues-get', 'locations-issue-models-issues-list', 'locations-issue-models-issues-patch', 'locations-issue-models-list', 'locations-issue-models-patch', 'locations-issue-models-undeploy', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-phrase-matchers-create', 'locations-phrase-matchers-delete', 'locations-phrase-matchers-get', 'locations-phrase-matchers-list', 'locations-phrase-matchers-patch', 'locations-update-settings', 'locations-views-create', 'locations-views-delete', 'locations-views-get', 'locations-views-list' and 'locations-views-patch'", vec![ ("locations-conversations-analyses-create", Some(r##"Creates an analysis. The long running operation is done when the analysis has completed."##), "Details at http://byron.github.io/google-apis-rs/google_contactcenterinsights1_cli/projects_locations-conversations-analyses-create", @@ -2922,6 +3205,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversations-bulk-analyze", + Some(r##"Analyzes multiple conversations in a single request."##), + "Details at http://byron.github.io/google-apis-rs/google_contactcenterinsights1_cli/projects_locations-conversations-bulk-analyze", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource to create analyses in."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3016,6 +3327,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversations-ingest", + Some(r##"Imports conversations and processes them according to the user's configuration."##), + "Details at http://byron.github.io/google-apis-rs/google_contactcenterinsights1_cli/projects_locations-conversations-ingest", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource for new conversations."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3238,6 +3577,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-issue-models-issues-delete", + Some(r##"Deletes an issue."##), + "Details at http://byron.github.io/google-apis-rs/google_contactcenterinsights1_cli/projects_locations-issue-models-issues-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the issue to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3738,7 +4099,7 @@ async fn main() { let mut app = App::new("contactcenterinsights1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230115") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_contactcenterinsights1_cli") .arg(Arg::with_name("url") diff --git a/gen/contactcenterinsights1/Cargo.toml b/gen/contactcenterinsights1/Cargo.toml index 1e4b93059a..16ac2e9475 100644 --- a/gen/contactcenterinsights1/Cargo.toml +++ b/gen/contactcenterinsights1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-contactcenterinsights1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Contactcenterinsights (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/contactcenterinsights1" homepage = "https://cloud.google.com/contact-center/insights/docs" -documentation = "https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-contactcenterinsights1/5.0.2+20230115" license = "MIT" keywords = ["contactcenterinsight", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/contactcenterinsights1/README.md b/gen/contactcenterinsights1/README.md index a60298498d..f937a6d53a 100644 --- a/gen/contactcenterinsights1/README.md +++ b/gen/contactcenterinsights1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-contactcenterinsights1` library allows access to all features of the *Google Contactcenterinsights* service. -This documentation was generated from *Contactcenterinsights* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *contactcenterinsights:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Contactcenterinsights* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *contactcenterinsights:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Contactcenterinsights* *v1* API can be found at the [official documentation site](https://cloud.google.com/contact-center/insights/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/Contactcenterinsights) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/Contactcenterinsights) ... * projects - * [*locations conversations analyses create*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisCreateCall), [*locations conversations analyses delete*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisDeleteCall), [*locations conversations analyses get*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisGetCall), [*locations conversations analyses list*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisListCall), [*locations conversations bulk analyze*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationBulkAnalyzeCall), [*locations conversations calculate stats*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationCalculateStatCall), [*locations conversations create*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationCreateCall), [*locations conversations delete*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationDeleteCall), [*locations conversations get*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationGetCall), [*locations conversations ingest*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationIngestCall), [*locations conversations list*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationListCall), [*locations conversations patch*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationConversationPatchCall), [*locations get settings*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationGetSettingCall), [*locations insightsdata export*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationInsightsdataExportCall), [*locations issue models calculate issue model stats*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelCalculateIssueModelStatCall), [*locations issue models create*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelCreateCall), [*locations issue models delete*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelDeleteCall), [*locations issue models deploy*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelDeployCall), [*locations issue models get*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelGetCall), [*locations issue models issues delete*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssueDeleteCall), [*locations issue models issues get*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssueGetCall), [*locations issue models issues list*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssueListCall), [*locations issue models issues patch*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssuePatchCall), [*locations issue models list*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelListCall), [*locations issue models patch*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelPatchCall), [*locations issue models undeploy*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelUndeployCall), [*locations operations cancel*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationOperationListCall), [*locations phrase matchers create*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherCreateCall), [*locations phrase matchers delete*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherDeleteCall), [*locations phrase matchers get*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherGetCall), [*locations phrase matchers list*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherListCall), [*locations phrase matchers patch*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherPatchCall), [*locations update settings*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationUpdateSettingCall), [*locations views create*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationViewCreateCall), [*locations views delete*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationViewDeleteCall), [*locations views get*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationViewGetCall), [*locations views list*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationViewListCall) and [*locations views patch*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/api::ProjectLocationViewPatchCall) + * [*locations conversations analyses create*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisCreateCall), [*locations conversations analyses delete*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisDeleteCall), [*locations conversations analyses get*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisGetCall), [*locations conversations analyses list*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationAnalysisListCall), [*locations conversations bulk analyze*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationBulkAnalyzeCall), [*locations conversations calculate stats*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationCalculateStatCall), [*locations conversations create*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationCreateCall), [*locations conversations delete*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationDeleteCall), [*locations conversations get*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationGetCall), [*locations conversations ingest*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationIngestCall), [*locations conversations list*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationListCall), [*locations conversations patch*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationConversationPatchCall), [*locations get settings*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationGetSettingCall), [*locations insightsdata export*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationInsightsdataExportCall), [*locations issue models calculate issue model stats*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelCalculateIssueModelStatCall), [*locations issue models create*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelCreateCall), [*locations issue models delete*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelDeleteCall), [*locations issue models deploy*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelDeployCall), [*locations issue models get*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelGetCall), [*locations issue models issues delete*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssueDeleteCall), [*locations issue models issues get*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssueGetCall), [*locations issue models issues list*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssueListCall), [*locations issue models issues patch*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelIssuePatchCall), [*locations issue models list*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelListCall), [*locations issue models patch*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelPatchCall), [*locations issue models undeploy*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationIssueModelUndeployCall), [*locations operations cancel*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationOperationListCall), [*locations phrase matchers create*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherCreateCall), [*locations phrase matchers delete*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherDeleteCall), [*locations phrase matchers get*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherGetCall), [*locations phrase matchers list*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherListCall), [*locations phrase matchers patch*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationPhraseMatcherPatchCall), [*locations update settings*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationUpdateSettingCall), [*locations views create*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationViewCreateCall), [*locations views delete*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationViewDeleteCall), [*locations views get*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationViewGetCall), [*locations views list*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationViewListCall) and [*locations views patch*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/api::ProjectLocationViewPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/Contactcenterinsights)** +* **[Hub](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/Contactcenterinsights)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::CallBuilder) -* **[Resources](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::CallBuilder) +* **[Resources](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Part)** + * **[Parts](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Delegate) to the -[Method Builder](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Delegate) to the +[Method Builder](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::RequestValue) and -[decodable](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::RequestValue) and +[decodable](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-contactcenterinsights1/5.0.2-beta-1+20230115/google_contactcenterinsights1/client::RequestValue) are moved +* [request values](https://docs.rs/google-contactcenterinsights1/5.0.2+20230115/google_contactcenterinsights1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/contactcenterinsights1/src/api.rs b/gen/contactcenterinsights1/src/api.rs index 043cbd3581..35985c29ea 100644 --- a/gen/contactcenterinsights1/src/api.rs +++ b/gen/contactcenterinsights1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Contactcenterinsights { Contactcenterinsights { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://contactcenterinsights.googleapis.com/".to_string(), _root_url: "https://contactcenterinsights.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Contactcenterinsights { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/contactcenterinsights1/src/client.rs b/gen/contactcenterinsights1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/contactcenterinsights1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/contactcenterinsights1/src/lib.rs b/gen/contactcenterinsights1/src/lib.rs index 5981b55eed..258a7eb106 100644 --- a/gen/contactcenterinsights1/src/lib.rs +++ b/gen/contactcenterinsights1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Contactcenterinsights* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *contactcenterinsights:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Contactcenterinsights* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *contactcenterinsights:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Contactcenterinsights* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/contact-center/insights/docs). diff --git a/gen/container1-cli/Cargo.toml b/gen/container1-cli/Cargo.toml index 06f2b629a6..b50f203d10 100644 --- a/gen/container1-cli/Cargo.toml +++ b/gen/container1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-container1-cli" -version = "4.0.1+20220215" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Container (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/container1-cli" @@ -20,13 +20,13 @@ name = "container1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-container1] path = "../container1" -version = "4.0.1+20220215" +version = "5.0.2+20230104" + diff --git a/gen/container1-cli/README.md b/gen/container1-cli/README.md index ad742651ba..233144e853 100644 --- a/gen/container1-cli/README.md +++ b/gen/container1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Container* API at revision *20220215*. The CLI is at version *4.0.1*. +This documentation was generated from the *Container* API at revision *20230104*. The CLI is at version *5.0.2*. ```bash container1 [options] @@ -37,6 +37,7 @@ container1 [options] locations-clusters-get [-p ]... [-o ] locations-clusters-get-jwks [-p ]... [-o ] locations-clusters-list [-p ]... [-o ] + locations-clusters-node-pools-complete-upgrade (-r )... [-p ]... [-o ] locations-clusters-node-pools-create (-r )... [-p ]... [-o ] locations-clusters-node-pools-delete [-p ]... [-o ] locations-clusters-node-pools-get [-p ]... [-o ] diff --git a/gen/container1-cli/mkdocs.yml b/gen/container1-cli/mkdocs.yml index 04c56963e9..35bc1951ed 100644 --- a/gen/container1-cli/mkdocs.yml +++ b/gen/container1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Container v4.0.1+20220215 +site_name: Container v5.0.2+20230104 site_url: http://byron.github.io/google-apis-rs/google-container1-cli site_description: A complete library to interact with Container (protocol v1) @@ -7,71 +7,73 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/container1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_aggregated-usable-subnetworks-list.md', 'Projects', 'Aggregated Usable Subnetworks List'] -- ['projects_locations-clusters-complete-ip-rotation.md', 'Projects', 'Locations Clusters Complete Ip Rotation'] -- ['projects_locations-clusters-create.md', 'Projects', 'Locations Clusters Create'] -- ['projects_locations-clusters-delete.md', 'Projects', 'Locations Clusters Delete'] -- ['projects_locations-clusters-get.md', 'Projects', 'Locations Clusters Get'] -- ['projects_locations-clusters-get-jwks.md', 'Projects', 'Locations Clusters Get Jwks'] -- ['projects_locations-clusters-list.md', 'Projects', 'Locations Clusters List'] -- ['projects_locations-clusters-node-pools-create.md', 'Projects', 'Locations Clusters Node Pools Create'] -- ['projects_locations-clusters-node-pools-delete.md', 'Projects', 'Locations Clusters Node Pools Delete'] -- ['projects_locations-clusters-node-pools-get.md', 'Projects', 'Locations Clusters Node Pools Get'] -- ['projects_locations-clusters-node-pools-list.md', 'Projects', 'Locations Clusters Node Pools List'] -- ['projects_locations-clusters-node-pools-rollback.md', 'Projects', 'Locations Clusters Node Pools Rollback'] -- ['projects_locations-clusters-node-pools-set-autoscaling.md', 'Projects', 'Locations Clusters Node Pools Set Autoscaling'] -- ['projects_locations-clusters-node-pools-set-management.md', 'Projects', 'Locations Clusters Node Pools Set Management'] -- ['projects_locations-clusters-node-pools-set-size.md', 'Projects', 'Locations Clusters Node Pools Set Size'] -- ['projects_locations-clusters-node-pools-update.md', 'Projects', 'Locations Clusters Node Pools Update'] -- ['projects_locations-clusters-set-addons.md', 'Projects', 'Locations Clusters Set Addons'] -- ['projects_locations-clusters-set-legacy-abac.md', 'Projects', 'Locations Clusters Set Legacy Abac'] -- ['projects_locations-clusters-set-locations.md', 'Projects', 'Locations Clusters Set Locations'] -- ['projects_locations-clusters-set-logging.md', 'Projects', 'Locations Clusters Set Logging'] -- ['projects_locations-clusters-set-maintenance-policy.md', 'Projects', 'Locations Clusters Set Maintenance Policy'] -- ['projects_locations-clusters-set-master-auth.md', 'Projects', 'Locations Clusters Set Master Auth'] -- ['projects_locations-clusters-set-monitoring.md', 'Projects', 'Locations Clusters Set Monitoring'] -- ['projects_locations-clusters-set-network-policy.md', 'Projects', 'Locations Clusters Set Network Policy'] -- ['projects_locations-clusters-set-resource-labels.md', 'Projects', 'Locations Clusters Set Resource Labels'] -- ['projects_locations-clusters-start-ip-rotation.md', 'Projects', 'Locations Clusters Start Ip Rotation'] -- ['projects_locations-clusters-update.md', 'Projects', 'Locations Clusters Update'] -- ['projects_locations-clusters-update-master.md', 'Projects', 'Locations Clusters Update Master'] -- ['projects_locations-clusters-well-known-get-openid-configuration.md', 'Projects', 'Locations Clusters Well Known Get Openid Configuration'] -- ['projects_locations-get-server-config.md', 'Projects', 'Locations Get Server Config'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_zones-clusters-addons.md', 'Projects', 'Zones Clusters Addons'] -- ['projects_zones-clusters-complete-ip-rotation.md', 'Projects', 'Zones Clusters Complete Ip Rotation'] -- ['projects_zones-clusters-create.md', 'Projects', 'Zones Clusters Create'] -- ['projects_zones-clusters-delete.md', 'Projects', 'Zones Clusters Delete'] -- ['projects_zones-clusters-get.md', 'Projects', 'Zones Clusters Get'] -- ['projects_zones-clusters-legacy-abac.md', 'Projects', 'Zones Clusters Legacy Abac'] -- ['projects_zones-clusters-list.md', 'Projects', 'Zones Clusters List'] -- ['projects_zones-clusters-locations.md', 'Projects', 'Zones Clusters Locations'] -- ['projects_zones-clusters-logging.md', 'Projects', 'Zones Clusters Logging'] -- ['projects_zones-clusters-master.md', 'Projects', 'Zones Clusters Master'] -- ['projects_zones-clusters-monitoring.md', 'Projects', 'Zones Clusters Monitoring'] -- ['projects_zones-clusters-node-pools-autoscaling.md', 'Projects', 'Zones Clusters Node Pools Autoscaling'] -- ['projects_zones-clusters-node-pools-create.md', 'Projects', 'Zones Clusters Node Pools Create'] -- ['projects_zones-clusters-node-pools-delete.md', 'Projects', 'Zones Clusters Node Pools Delete'] -- ['projects_zones-clusters-node-pools-get.md', 'Projects', 'Zones Clusters Node Pools Get'] -- ['projects_zones-clusters-node-pools-list.md', 'Projects', 'Zones Clusters Node Pools List'] -- ['projects_zones-clusters-node-pools-rollback.md', 'Projects', 'Zones Clusters Node Pools Rollback'] -- ['projects_zones-clusters-node-pools-set-management.md', 'Projects', 'Zones Clusters Node Pools Set Management'] -- ['projects_zones-clusters-node-pools-set-size.md', 'Projects', 'Zones Clusters Node Pools Set Size'] -- ['projects_zones-clusters-node-pools-update.md', 'Projects', 'Zones Clusters Node Pools Update'] -- ['projects_zones-clusters-resource-labels.md', 'Projects', 'Zones Clusters Resource Labels'] -- ['projects_zones-clusters-set-maintenance-policy.md', 'Projects', 'Zones Clusters Set Maintenance Policy'] -- ['projects_zones-clusters-set-master-auth.md', 'Projects', 'Zones Clusters Set Master Auth'] -- ['projects_zones-clusters-set-network-policy.md', 'Projects', 'Zones Clusters Set Network Policy'] -- ['projects_zones-clusters-start-ip-rotation.md', 'Projects', 'Zones Clusters Start Ip Rotation'] -- ['projects_zones-clusters-update.md', 'Projects', 'Zones Clusters Update'] -- ['projects_zones-get-serverconfig.md', 'Projects', 'Zones Get Serverconfig'] -- ['projects_zones-operations-cancel.md', 'Projects', 'Zones Operations Cancel'] -- ['projects_zones-operations-get.md', 'Projects', 'Zones Operations Get'] -- ['projects_zones-operations-list.md', 'Projects', 'Zones Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Aggregated Usable Subnetworks List': 'projects_aggregated-usable-subnetworks-list.md' + - 'Locations Clusters Complete Ip Rotation': 'projects_locations-clusters-complete-ip-rotation.md' + - 'Locations Clusters Create': 'projects_locations-clusters-create.md' + - 'Locations Clusters Delete': 'projects_locations-clusters-delete.md' + - 'Locations Clusters Get': 'projects_locations-clusters-get.md' + - 'Locations Clusters Get Jwks': 'projects_locations-clusters-get-jwks.md' + - 'Locations Clusters List': 'projects_locations-clusters-list.md' + - 'Locations Clusters Node Pools Complete Upgrade': 'projects_locations-clusters-node-pools-complete-upgrade.md' + - 'Locations Clusters Node Pools Create': 'projects_locations-clusters-node-pools-create.md' + - 'Locations Clusters Node Pools Delete': 'projects_locations-clusters-node-pools-delete.md' + - 'Locations Clusters Node Pools Get': 'projects_locations-clusters-node-pools-get.md' + - 'Locations Clusters Node Pools List': 'projects_locations-clusters-node-pools-list.md' + - 'Locations Clusters Node Pools Rollback': 'projects_locations-clusters-node-pools-rollback.md' + - 'Locations Clusters Node Pools Set Autoscaling': 'projects_locations-clusters-node-pools-set-autoscaling.md' + - 'Locations Clusters Node Pools Set Management': 'projects_locations-clusters-node-pools-set-management.md' + - 'Locations Clusters Node Pools Set Size': 'projects_locations-clusters-node-pools-set-size.md' + - 'Locations Clusters Node Pools Update': 'projects_locations-clusters-node-pools-update.md' + - 'Locations Clusters Set Addons': 'projects_locations-clusters-set-addons.md' + - 'Locations Clusters Set Legacy Abac': 'projects_locations-clusters-set-legacy-abac.md' + - 'Locations Clusters Set Locations': 'projects_locations-clusters-set-locations.md' + - 'Locations Clusters Set Logging': 'projects_locations-clusters-set-logging.md' + - 'Locations Clusters Set Maintenance Policy': 'projects_locations-clusters-set-maintenance-policy.md' + - 'Locations Clusters Set Master Auth': 'projects_locations-clusters-set-master-auth.md' + - 'Locations Clusters Set Monitoring': 'projects_locations-clusters-set-monitoring.md' + - 'Locations Clusters Set Network Policy': 'projects_locations-clusters-set-network-policy.md' + - 'Locations Clusters Set Resource Labels': 'projects_locations-clusters-set-resource-labels.md' + - 'Locations Clusters Start Ip Rotation': 'projects_locations-clusters-start-ip-rotation.md' + - 'Locations Clusters Update': 'projects_locations-clusters-update.md' + - 'Locations Clusters Update Master': 'projects_locations-clusters-update-master.md' + - 'Locations Clusters Well Known Get Openid Configuration': 'projects_locations-clusters-well-known-get-openid-configuration.md' + - 'Locations Get Server Config': 'projects_locations-get-server-config.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Zones Clusters Addons': 'projects_zones-clusters-addons.md' + - 'Zones Clusters Complete Ip Rotation': 'projects_zones-clusters-complete-ip-rotation.md' + - 'Zones Clusters Create': 'projects_zones-clusters-create.md' + - 'Zones Clusters Delete': 'projects_zones-clusters-delete.md' + - 'Zones Clusters Get': 'projects_zones-clusters-get.md' + - 'Zones Clusters Legacy Abac': 'projects_zones-clusters-legacy-abac.md' + - 'Zones Clusters List': 'projects_zones-clusters-list.md' + - 'Zones Clusters Locations': 'projects_zones-clusters-locations.md' + - 'Zones Clusters Logging': 'projects_zones-clusters-logging.md' + - 'Zones Clusters Master': 'projects_zones-clusters-master.md' + - 'Zones Clusters Monitoring': 'projects_zones-clusters-monitoring.md' + - 'Zones Clusters Node Pools Autoscaling': 'projects_zones-clusters-node-pools-autoscaling.md' + - 'Zones Clusters Node Pools Create': 'projects_zones-clusters-node-pools-create.md' + - 'Zones Clusters Node Pools Delete': 'projects_zones-clusters-node-pools-delete.md' + - 'Zones Clusters Node Pools Get': 'projects_zones-clusters-node-pools-get.md' + - 'Zones Clusters Node Pools List': 'projects_zones-clusters-node-pools-list.md' + - 'Zones Clusters Node Pools Rollback': 'projects_zones-clusters-node-pools-rollback.md' + - 'Zones Clusters Node Pools Set Management': 'projects_zones-clusters-node-pools-set-management.md' + - 'Zones Clusters Node Pools Set Size': 'projects_zones-clusters-node-pools-set-size.md' + - 'Zones Clusters Node Pools Update': 'projects_zones-clusters-node-pools-update.md' + - 'Zones Clusters Resource Labels': 'projects_zones-clusters-resource-labels.md' + - 'Zones Clusters Set Maintenance Policy': 'projects_zones-clusters-set-maintenance-policy.md' + - 'Zones Clusters Set Master Auth': 'projects_zones-clusters-set-master-auth.md' + - 'Zones Clusters Set Network Policy': 'projects_zones-clusters-set-network-policy.md' + - 'Zones Clusters Start Ip Rotation': 'projects_zones-clusters-start-ip-rotation.md' + - 'Zones Clusters Update': 'projects_zones-clusters-update.md' + - 'Zones Get Serverconfig': 'projects_zones-get-serverconfig.md' + - 'Zones Operations Cancel': 'projects_zones-operations-cancel.md' + - 'Zones Operations Get': 'projects_zones-operations-get.md' + - 'Zones Operations List': 'projects_zones-operations-list.md' theme: readthedocs diff --git a/gen/container1-cli/src/client.rs b/gen/container1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/container1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/container1-cli/src/main.rs b/gen/container1-cli/src/main.rs index fe9779702b..11d960d399 100644 --- a/gen/container1-cli/src/main.rs +++ b/gen/container1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_container1::{api, Error, oauth2}; +use google_container1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -230,6 +229,7 @@ where "cluster.addons-config.dns-cache-config.enabled" => Some(("cluster.addonsConfig.dnsCacheConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.gce-persistent-disk-csi-driver-config.enabled" => Some(("cluster.addonsConfig.gcePersistentDiskCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.gcp-filestore-csi-driver-config.enabled" => Some(("cluster.addonsConfig.gcpFilestoreCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.addons-config.gke-backup-agent-config.enabled" => Some(("cluster.addonsConfig.gkeBackupAgentConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.horizontal-pod-autoscaling.disabled" => Some(("cluster.addonsConfig.horizontalPodAutoscaling.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.http-load-balancing.disabled" => Some(("cluster.addonsConfig.httpLoadBalancing.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.kubernetes-dashboard.disabled" => Some(("cluster.addonsConfig.kubernetesDashboard.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -251,13 +251,20 @@ where "cluster.autoscaling.autoprovisioning-node-pool-defaults.service-account" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-integrity-monitoring" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-secure-boot" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-surge" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-unavailable" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.strategy" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.autoscaling-profile" => Some(("cluster.autoscaling.autoscalingProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.enable-node-autoprovisioning" => Some(("cluster.autoscaling.enableNodeAutoprovisioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.binary-authorization.enabled" => Some(("cluster.binaryAuthorization.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.binary-authorization.evaluation-mode" => Some(("cluster.binaryAuthorization.evaluationMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.cluster-ipv4-cidr" => Some(("cluster.clusterIpv4Cidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.confidential-nodes.enabled" => Some(("cluster.confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.cost-management-config.enabled" => Some(("cluster.costManagementConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.create-time" => Some(("cluster.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.current-master-version" => Some(("cluster.currentMasterVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.current-node-count" => Some(("cluster.currentNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -269,6 +276,7 @@ where "cluster.enable-kubernetes-alpha" => Some(("cluster.enableKubernetesAlpha", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.enable-tpu" => Some(("cluster.enableTpu", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.endpoint" => Some(("cluster.endpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.etag" => Some(("cluster.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.expire-time" => Some(("cluster.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.id" => Some(("cluster.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.identity-service-config.enabled" => Some(("cluster.identityServiceConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -279,11 +287,13 @@ where "cluster.ip-allocation-policy.cluster-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.clusterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.cluster-secondary-range-name" => Some(("cluster.ipAllocationPolicy.clusterSecondaryRangeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.create-subnetwork" => Some(("cluster.ipAllocationPolicy.createSubnetwork", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.ip-allocation-policy.ipv6-access-type" => Some(("cluster.ipAllocationPolicy.ipv6AccessType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.node-ipv4-cidr" => Some(("cluster.ipAllocationPolicy.nodeIpv4Cidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.node-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.nodeIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.services-ipv4-cidr" => Some(("cluster.ipAllocationPolicy.servicesIpv4Cidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.services-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.servicesIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.services-secondary-range-name" => Some(("cluster.ipAllocationPolicy.servicesSecondaryRangeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.ip-allocation-policy.stack-type" => Some(("cluster.ipAllocationPolicy.stackType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.subnetwork-name" => Some(("cluster.ipAllocationPolicy.subnetworkName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.tpu-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.tpuIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.use-ip-aliases" => Some(("cluster.ipAllocationPolicy.useIpAliases", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -308,8 +318,10 @@ where "cluster.master-auth.password" => Some(("cluster.masterAuth.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.master-auth.username" => Some(("cluster.masterAuth.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.master-authorized-networks-config.enabled" => Some(("cluster.masterAuthorizedNetworksConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.master-authorized-networks-config.gcp-public-cidrs-access-enabled" => Some(("cluster.masterAuthorizedNetworksConfig.gcpPublicCidrsAccessEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.mesh-certificates.enable-certificates" => Some(("cluster.meshCertificates.enableCertificates", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.monitoring-config.component-config.enable-components" => Some(("cluster.monitoringConfig.componentConfig.enableComponents", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "cluster.monitoring-config.managed-prometheus-config.enabled" => Some(("cluster.monitoringConfig.managedPrometheusConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.monitoring-service" => Some(("cluster.monitoringService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.name" => Some(("cluster.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network" => Some(("cluster.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -320,6 +332,7 @@ where "cluster.network-config.dns-config.cluster-dns-scope" => Some(("cluster.networkConfig.dnsConfig.clusterDnsScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.enable-intra-node-visibility" => Some(("cluster.networkConfig.enableIntraNodeVisibility", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.network-config.enable-l4ilb-subsetting" => Some(("cluster.networkConfig.enableL4ilbSubsetting", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.network-config.gateway-api-config.channel" => Some(("cluster.networkConfig.gatewayApiConfig.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.network" => Some(("cluster.networkConfig.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.private-ipv6-google-access" => Some(("cluster.networkConfig.privateIpv6GoogleAccess", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.service-external-ips-config.enabled" => Some(("cluster.networkConfig.serviceExternalIpsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -328,17 +341,24 @@ where "cluster.network-policy.provider" => Some(("cluster.networkPolicy.provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.advanced-machine-features.threads-per-core" => Some(("cluster.nodeConfig.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.boot-disk-kms-key" => Some(("cluster.nodeConfig.bootDiskKmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.node-config.confidential-nodes.enabled" => Some(("cluster.nodeConfig.confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.disk-size-gb" => Some(("cluster.nodeConfig.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.node-config.disk-type" => Some(("cluster.nodeConfig.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.node-config.ephemeral-storage-local-ssd-config.local-ssd-count" => Some(("cluster.nodeConfig.ephemeralStorageLocalSsdConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.node-config.fast-socket.enabled" => Some(("cluster.nodeConfig.fastSocket.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.gcfs-config.enabled" => Some(("cluster.nodeConfig.gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.gvnic.enabled" => Some(("cluster.nodeConfig.gvnic.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.image-type" => Some(("cluster.nodeConfig.imageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.kubelet-config.cpu-cfs-quota" => Some(("cluster.nodeConfig.kubeletConfig.cpuCfsQuota", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.kubelet-config.cpu-cfs-quota-period" => Some(("cluster.nodeConfig.kubeletConfig.cpuCfsQuotaPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.kubelet-config.cpu-manager-policy" => Some(("cluster.nodeConfig.kubeletConfig.cpuManagerPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.node-config.kubelet-config.pod-pids-limit" => Some(("cluster.nodeConfig.kubeletConfig.podPidsLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.labels" => Some(("cluster.nodeConfig.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "cluster.node-config.linux-node-config.cgroup-mode" => Some(("cluster.nodeConfig.linuxNodeConfig.cgroupMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.linux-node-config.sysctls" => Some(("cluster.nodeConfig.linuxNodeConfig.sysctls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "cluster.node-config.local-nvme-ssd-block-config.local-ssd-count" => Some(("cluster.nodeConfig.localNvmeSsdBlockConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.node-config.local-ssd-count" => Some(("cluster.nodeConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.node-config.logging-config.variant-config.variant" => Some(("cluster.nodeConfig.loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.machine-type" => Some(("cluster.nodeConfig.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.metadata" => Some(("cluster.nodeConfig.metadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "cluster.node-config.min-cpu-platform" => Some(("cluster.nodeConfig.minCpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -348,16 +368,19 @@ where "cluster.node-config.reservation-affinity.consume-reservation-type" => Some(("cluster.nodeConfig.reservationAffinity.consumeReservationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.reservation-affinity.key" => Some(("cluster.nodeConfig.reservationAffinity.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.reservation-affinity.values" => Some(("cluster.nodeConfig.reservationAffinity.values", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "cluster.node-config.resource-labels" => Some(("cluster.nodeConfig.resourceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "cluster.node-config.sandbox-config.type" => Some(("cluster.nodeConfig.sandboxConfig.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.service-account" => Some(("cluster.nodeConfig.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.shielded-instance-config.enable-integrity-monitoring" => Some(("cluster.nodeConfig.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.shielded-instance-config.enable-secure-boot" => Some(("cluster.nodeConfig.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.spot" => Some(("cluster.nodeConfig.spot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.tags" => Some(("cluster.nodeConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "cluster.node-config.windows-node-config.os-version" => Some(("cluster.nodeConfig.windowsNodeConfig.osVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.workload-metadata-config.mode" => Some(("cluster.nodeConfig.workloadMetadataConfig.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-ipv4-cidr-size" => Some(("cluster.nodeIpv4CidrSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.node-pool-auto-config.network-tags.tags" => Some(("cluster.nodePoolAutoConfig.networkTags.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "cluster.node-pool-defaults.node-config-defaults.gcfs-config.enabled" => Some(("cluster.nodePoolDefaults.nodeConfigDefaults.gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.node-pool-defaults.node-config-defaults.logging-config.variant-config.variant" => Some(("cluster.nodePoolDefaults.nodeConfigDefaults.loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.notification-config.pubsub.enabled" => Some(("cluster.notificationConfig.pubsub.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.notification-config.pubsub.filter.event-type" => Some(("cluster.notificationConfig.pubsub.filter.eventType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "cluster.notification-config.pubsub.topic" => Some(("cluster.notificationConfig.pubsub.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -367,6 +390,7 @@ where "cluster.private-cluster-config.master-ipv4-cidr-block" => Some(("cluster.privateClusterConfig.masterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.private-cluster-config.peering-name" => Some(("cluster.privateClusterConfig.peeringName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.private-cluster-config.private-endpoint" => Some(("cluster.privateClusterConfig.privateEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.private-cluster-config.private-endpoint-subnetwork" => Some(("cluster.privateClusterConfig.privateEndpointSubnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.private-cluster-config.public-endpoint" => Some(("cluster.privateClusterConfig.publicEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.release-channel.channel" => Some(("cluster.releaseChannel.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.resource-labels" => Some(("cluster.resourceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -387,7 +411,7 @@ where "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-machine-features", "authenticator-groups-config", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autopilot", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling", "autoscaling-profile", "bigquery-destination", "binary-authorization", "boot-disk-kms-key", "channel", "client-certificate", "client-certificate-config", "client-key", "cloud-run-config", "cluster", "cluster-ca-certificate", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-ipv4-cidr", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "component-config", "confidential-nodes", "config-connector-config", "consume-reservation-type", "consumption-metering-config", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-subnetwork", "create-time", "current-master-version", "current-node-count", "current-node-version", "daily-maintenance-window", "database-encryption", "datapath-provider", "dataset-id", "default-max-pods-constraint", "default-snat-status", "description", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "dns-config", "duration", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-intra-node-visibility", "enable-kubernetes-alpha", "enable-l4ilb-subsetting", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enable-tpu", "enabled", "end-time", "endpoint", "event-type", "expire-time", "filter", "gce-persistent-disk-csi-driver-config", "gcfs-config", "gcp-filestore-csi-driver-config", "gvnic", "horizontal-pod-autoscaling", "http-load-balancing", "id", "identity-service-config", "image-type", "initial-cluster-version", "initial-node-count", "instance-group-urls", "ip-allocation-policy", "issue-client-certificate", "key", "key-name", "kubelet-config", "kubernetes-dashboard", "label-fingerprint", "labels", "legacy-abac", "linux-node-config", "load-balancer-type", "local-ssd-count", "location", "locations", "logging-config", "logging-service", "machine-type", "maintenance-exclusion-options", "maintenance-policy", "management", "master-auth", "master-authorized-networks-config", "master-global-access-config", "master-ipv4-cidr-block", "max-pods-per-node", "max-surge", "max-unavailable", "mesh-certificates", "metadata", "min-cpu-platform", "mode", "monitoring-config", "monitoring-service", "name", "network", "network-config", "network-policy", "network-policy-config", "network-tags", "node-config", "node-config-defaults", "node-group", "node-ipv4-cidr", "node-ipv4-cidr-block", "node-ipv4-cidr-size", "node-pool-auto-config", "node-pool-defaults", "notification-config", "oauth-scopes", "parent", "password", "peering-name", "preemptible", "private-cluster-config", "private-endpoint", "private-ipv6-google-access", "project-id", "provider", "public-endpoint", "pubsub", "recurrence", "recurring-window", "release-channel", "reservation-affinity", "resource-labels", "resource-usage-export-config", "resource-version", "sandbox-config", "scope", "security-group", "self-link", "service-account", "service-external-ips-config", "services-ipv4-cidr", "services-ipv4-cidr-block", "services-secondary-range-name", "shielded-instance-config", "shielded-nodes", "spot", "start-time", "state", "status", "status-message", "subnetwork", "subnetwork-name", "sysctls", "tags", "threads-per-core", "topic", "tpu-ipv4-cidr-block", "type", "upgrade-options", "upgrade-settings", "use-ip-aliases", "use-routes", "username", "values", "vertical-pod-autoscaling", "window", "workload-identity-config", "workload-metadata-config", "workload-pool", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-machine-features", "authenticator-groups-config", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autopilot", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling", "autoscaling-profile", "batch-node-count", "batch-percentage", "batch-soak-duration", "bigquery-destination", "binary-authorization", "blue-green-settings", "boot-disk-kms-key", "cgroup-mode", "channel", "client-certificate", "client-certificate-config", "client-key", "cloud-run-config", "cluster", "cluster-ca-certificate", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-ipv4-cidr", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "component-config", "confidential-nodes", "config-connector-config", "consume-reservation-type", "consumption-metering-config", "cost-management-config", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-subnetwork", "create-time", "current-master-version", "current-node-count", "current-node-version", "daily-maintenance-window", "database-encryption", "datapath-provider", "dataset-id", "default-max-pods-constraint", "default-snat-status", "description", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "dns-config", "duration", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-intra-node-visibility", "enable-kubernetes-alpha", "enable-l4ilb-subsetting", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enable-tpu", "enabled", "end-time", "endpoint", "ephemeral-storage-local-ssd-config", "etag", "evaluation-mode", "event-type", "expire-time", "fast-socket", "filter", "gateway-api-config", "gce-persistent-disk-csi-driver-config", "gcfs-config", "gcp-filestore-csi-driver-config", "gcp-public-cidrs-access-enabled", "gke-backup-agent-config", "gvnic", "horizontal-pod-autoscaling", "http-load-balancing", "id", "identity-service-config", "image-type", "initial-cluster-version", "initial-node-count", "instance-group-urls", "ip-allocation-policy", "ipv6-access-type", "issue-client-certificate", "key", "key-name", "kubelet-config", "kubernetes-dashboard", "label-fingerprint", "labels", "legacy-abac", "linux-node-config", "load-balancer-type", "local-nvme-ssd-block-config", "local-ssd-count", "location", "locations", "logging-config", "logging-service", "machine-type", "maintenance-exclusion-options", "maintenance-policy", "managed-prometheus-config", "management", "master-auth", "master-authorized-networks-config", "master-global-access-config", "master-ipv4-cidr-block", "max-pods-per-node", "max-surge", "max-unavailable", "mesh-certificates", "metadata", "min-cpu-platform", "mode", "monitoring-config", "monitoring-service", "name", "network", "network-config", "network-policy", "network-policy-config", "network-tags", "node-config", "node-config-defaults", "node-group", "node-ipv4-cidr", "node-ipv4-cidr-block", "node-ipv4-cidr-size", "node-pool-auto-config", "node-pool-defaults", "node-pool-soak-duration", "notification-config", "oauth-scopes", "os-version", "parent", "password", "peering-name", "pod-pids-limit", "preemptible", "private-cluster-config", "private-endpoint", "private-endpoint-subnetwork", "private-ipv6-google-access", "project-id", "provider", "public-endpoint", "pubsub", "recurrence", "recurring-window", "release-channel", "reservation-affinity", "resource-labels", "resource-usage-export-config", "resource-version", "sandbox-config", "scope", "security-group", "self-link", "service-account", "service-external-ips-config", "services-ipv4-cidr", "services-ipv4-cidr-block", "services-secondary-range-name", "shielded-instance-config", "shielded-nodes", "spot", "stack-type", "standard-rollout-policy", "start-time", "state", "status", "status-message", "strategy", "subnetwork", "subnetwork-name", "sysctls", "tags", "threads-per-core", "topic", "tpu-ipv4-cidr-block", "type", "upgrade-options", "upgrade-settings", "use-ip-aliases", "use-routes", "username", "values", "variant", "variant-config", "vertical-pod-autoscaling", "window", "windows-node-config", "workload-identity-config", "workload-metadata-config", "workload-pool", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -679,6 +703,90 @@ where } } + async fn _projects_locations_clusters_node_pools_complete_upgrade(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CompleteNodePoolUpgradeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_clusters_node_pools_complete_upgrade(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_clusters_node_pools_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -705,21 +813,31 @@ where "cluster-id" => Some(("clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.autoscaling.autoprovisioned" => Some(("nodePool.autoscaling.autoprovisioned", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.autoscaling.enabled" => Some(("nodePool.autoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-pool.autoscaling.location-policy" => Some(("nodePool.autoscaling.locationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.autoscaling.max-node-count" => Some(("nodePool.autoscaling.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.autoscaling.min-node-count" => Some(("nodePool.autoscaling.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.autoscaling.total-max-node-count" => Some(("nodePool.autoscaling.totalMaxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.autoscaling.total-min-node-count" => Some(("nodePool.autoscaling.totalMinNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.config.advanced-machine-features.threads-per-core" => Some(("nodePool.config.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.boot-disk-kms-key" => Some(("nodePool.config.bootDiskKmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.config.confidential-nodes.enabled" => Some(("nodePool.config.confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.disk-size-gb" => Some(("nodePool.config.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.config.disk-type" => Some(("nodePool.config.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.config.ephemeral-storage-local-ssd-config.local-ssd-count" => Some(("nodePool.config.ephemeralStorageLocalSsdConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.config.fast-socket.enabled" => Some(("nodePool.config.fastSocket.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.gcfs-config.enabled" => Some(("nodePool.config.gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.gvnic.enabled" => Some(("nodePool.config.gvnic.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.image-type" => Some(("nodePool.config.imageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.kubelet-config.cpu-cfs-quota" => Some(("nodePool.config.kubeletConfig.cpuCfsQuota", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.kubelet-config.cpu-cfs-quota-period" => Some(("nodePool.config.kubeletConfig.cpuCfsQuotaPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.kubelet-config.cpu-manager-policy" => Some(("nodePool.config.kubeletConfig.cpuManagerPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.config.kubelet-config.pod-pids-limit" => Some(("nodePool.config.kubeletConfig.podPidsLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.labels" => Some(("nodePool.config.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "node-pool.config.linux-node-config.cgroup-mode" => Some(("nodePool.config.linuxNodeConfig.cgroupMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.linux-node-config.sysctls" => Some(("nodePool.config.linuxNodeConfig.sysctls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "node-pool.config.local-nvme-ssd-block-config.local-ssd-count" => Some(("nodePool.config.localNvmeSsdBlockConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.config.local-ssd-count" => Some(("nodePool.config.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.config.logging-config.variant-config.variant" => Some(("nodePool.config.loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.machine-type" => Some(("nodePool.config.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.metadata" => Some(("nodePool.config.metadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "node-pool.config.min-cpu-platform" => Some(("nodePool.config.minCpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -729,13 +847,16 @@ where "node-pool.config.reservation-affinity.consume-reservation-type" => Some(("nodePool.config.reservationAffinity.consumeReservationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.reservation-affinity.key" => Some(("nodePool.config.reservationAffinity.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.reservation-affinity.values" => Some(("nodePool.config.reservationAffinity.values", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.config.resource-labels" => Some(("nodePool.config.resourceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "node-pool.config.sandbox-config.type" => Some(("nodePool.config.sandboxConfig.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.service-account" => Some(("nodePool.config.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.shielded-instance-config.enable-integrity-monitoring" => Some(("nodePool.config.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.shielded-instance-config.enable-secure-boot" => Some(("nodePool.config.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.spot" => Some(("nodePool.config.spot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.tags" => Some(("nodePool.config.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.config.windows-node-config.os-version" => Some(("nodePool.config.windowsNodeConfig.osVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.workload-metadata-config.mode" => Some(("nodePool.config.workloadMetadataConfig.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.etag" => Some(("nodePool.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.initial-node-count" => Some(("nodePool.initialNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.instance-group-urls" => Some(("nodePool.instanceGroupUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "node-pool.locations" => Some(("nodePool.locations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -746,20 +867,33 @@ where "node-pool.max-pods-constraint.max-pods-per-node" => Some(("nodePool.maxPodsConstraint.maxPodsPerNode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.name" => Some(("nodePool.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.network-config.create-pod-range" => Some(("nodePool.networkConfig.createPodRange", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-pool.network-config.enable-private-nodes" => Some(("nodePool.networkConfig.enablePrivateNodes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-pool.network-config.network-performance-config.total-egress-bandwidth-tier" => Some(("nodePool.networkConfig.networkPerformanceConfig.totalEgressBandwidthTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.network-config.pod-ipv4-cidr-block" => Some(("nodePool.networkConfig.podIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.network-config.pod-range" => Some(("nodePool.networkConfig.podRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.placement-policy.type" => Some(("nodePool.placementPolicy.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.pod-ipv4-cidr-size" => Some(("nodePool.podIpv4CidrSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.self-link" => Some(("nodePool.selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.status" => Some(("nodePool.status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.status-message" => Some(("nodePool.statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.update-info.blue-green-info.blue-instance-group-urls" => Some(("nodePool.updateInfo.blueGreenInfo.blueInstanceGroupUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.update-info.blue-green-info.blue-pool-deletion-start-time" => Some(("nodePool.updateInfo.blueGreenInfo.bluePoolDeletionStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.update-info.blue-green-info.green-instance-group-urls" => Some(("nodePool.updateInfo.blueGreenInfo.greenInstanceGroupUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.update-info.blue-green-info.green-pool-version" => Some(("nodePool.updateInfo.blueGreenInfo.greenPoolVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.update-info.blue-green-info.phase" => Some(("nodePool.updateInfo.blueGreenInfo.phase", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("nodePool.upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("nodePool.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("nodePool.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("nodePool.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.upgrade-settings.max-surge" => Some(("nodePool.upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.upgrade-settings.max-unavailable" => Some(("nodePool.upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.strategy" => Some(("nodePool.upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.version" => Some(("nodePool.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoscaling", "boot-disk-kms-key", "cluster-id", "config", "consume-reservation-type", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-pod-range", "description", "disk-size-gb", "disk-type", "enable-integrity-monitoring", "enable-secure-boot", "enabled", "gcfs-config", "gvnic", "image-type", "initial-node-count", "instance-group-urls", "key", "kubelet-config", "labels", "linux-node-config", "local-ssd-count", "locations", "machine-type", "management", "max-node-count", "max-pods-constraint", "max-pods-per-node", "max-surge", "max-unavailable", "metadata", "min-cpu-platform", "min-node-count", "mode", "name", "network-config", "node-group", "node-pool", "oauth-scopes", "parent", "pod-ipv4-cidr-block", "pod-ipv4-cidr-size", "pod-range", "preemptible", "project-id", "reservation-affinity", "sandbox-config", "self-link", "service-account", "shielded-instance-config", "spot", "status", "status-message", "sysctls", "tags", "threads-per-core", "type", "upgrade-options", "upgrade-settings", "values", "version", "workload-metadata-config", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoscaling", "batch-node-count", "batch-percentage", "batch-soak-duration", "blue-green-info", "blue-green-settings", "blue-instance-group-urls", "blue-pool-deletion-start-time", "boot-disk-kms-key", "cgroup-mode", "cluster-id", "confidential-nodes", "config", "consume-reservation-type", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-pod-range", "description", "disk-size-gb", "disk-type", "enable-integrity-monitoring", "enable-private-nodes", "enable-secure-boot", "enabled", "ephemeral-storage-local-ssd-config", "etag", "fast-socket", "gcfs-config", "green-instance-group-urls", "green-pool-version", "gvnic", "image-type", "initial-node-count", "instance-group-urls", "key", "kubelet-config", "labels", "linux-node-config", "local-nvme-ssd-block-config", "local-ssd-count", "location-policy", "locations", "logging-config", "machine-type", "management", "max-node-count", "max-pods-constraint", "max-pods-per-node", "max-surge", "max-unavailable", "metadata", "min-cpu-platform", "min-node-count", "mode", "name", "network-config", "network-performance-config", "node-group", "node-pool", "node-pool-soak-duration", "oauth-scopes", "os-version", "parent", "phase", "placement-policy", "pod-ipv4-cidr-block", "pod-ipv4-cidr-size", "pod-pids-limit", "pod-range", "preemptible", "project-id", "reservation-affinity", "resource-labels", "sandbox-config", "self-link", "service-account", "shielded-instance-config", "spot", "standard-rollout-policy", "status", "status-message", "strategy", "sysctls", "tags", "threads-per-core", "total-egress-bandwidth-tier", "total-max-node-count", "total-min-node-count", "type", "update-info", "upgrade-options", "upgrade-settings", "values", "variant", "variant-config", "version", "windows-node-config", "workload-metadata-config", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1038,9 +1172,10 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool-id" => Some(("nodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "respect-pdb" => Some(("respectPdb", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-id", "name", "node-pool-id", "project-id", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-id", "name", "node-pool-id", "project-id", "respect-pdb", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1125,15 +1260,18 @@ where match &temp_cursor.to_string()[..] { "autoscaling.autoprovisioned" => Some(("autoscaling.autoprovisioned", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "autoscaling.enabled" => Some(("autoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "autoscaling.location-policy" => Some(("autoscaling.locationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "autoscaling.max-node-count" => Some(("autoscaling.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "autoscaling.min-node-count" => Some(("autoscaling.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "autoscaling.total-max-node-count" => Some(("autoscaling.totalMaxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "autoscaling.total-min-node-count" => Some(("autoscaling.totalMinNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-id" => Some(("clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool-id" => Some(("nodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["autoprovisioned", "autoscaling", "cluster-id", "enabled", "max-node-count", "min-node-count", "name", "node-pool-id", "project-id", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["autoprovisioned", "autoscaling", "cluster-id", "enabled", "location-policy", "max-node-count", "min-node-count", "name", "node-pool-id", "project-id", "total-max-node-count", "total-min-node-count", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1400,24 +1538,44 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cluster-id" => Some(("clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "confidential-nodes.enabled" => Some(("confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fast-socket.enabled" => Some(("fastSocket.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "gcfs-config.enabled" => Some(("gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "gvnic.enabled" => Some(("gvnic.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "image-type" => Some(("imageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kubelet-config.cpu-cfs-quota" => Some(("kubeletConfig.cpuCfsQuota", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "kubelet-config.cpu-cfs-quota-period" => Some(("kubeletConfig.cpuCfsQuotaPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kubelet-config.cpu-manager-policy" => Some(("kubeletConfig.cpuManagerPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kubelet-config.pod-pids-limit" => Some(("kubeletConfig.podPidsLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels.labels" => Some(("labels.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "linux-node-config.cgroup-mode" => Some(("linuxNodeConfig.cgroupMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linux-node-config.sysctls" => Some(("linuxNodeConfig.sysctls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "locations" => Some(("locations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "logging-config.variant-config.variant" => Some(("loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-network-config.create-pod-range" => Some(("nodeNetworkConfig.createPodRange", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-network-config.enable-private-nodes" => Some(("nodeNetworkConfig.enablePrivateNodes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-network-config.network-performance-config.total-egress-bandwidth-tier" => Some(("nodeNetworkConfig.networkPerformanceConfig.totalEgressBandwidthTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-network-config.pod-ipv4-cidr-block" => Some(("nodeNetworkConfig.podIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-network-config.pod-range" => Some(("nodeNetworkConfig.podRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool-id" => Some(("nodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-version" => Some(("nodeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-labels.labels" => Some(("resourceLabels.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "tags.tags" => Some(("tags.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "upgrade-settings.max-surge" => Some(("upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "upgrade-settings.max-unavailable" => Some(("upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "upgrade-settings.strategy" => Some(("upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "windows-node-config.os-version" => Some(("windowsNodeConfig.osVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "workload-metadata-config.mode" => Some(("workloadMetadataConfig.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-id", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "enabled", "gcfs-config", "gvnic", "image-type", "kubelet-config", "linux-node-config", "locations", "max-surge", "max-unavailable", "mode", "name", "node-pool-id", "node-version", "project-id", "sysctls", "upgrade-settings", "workload-metadata-config", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["batch-node-count", "batch-percentage", "batch-soak-duration", "blue-green-settings", "cgroup-mode", "cluster-id", "confidential-nodes", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-pod-range", "enable-private-nodes", "enabled", "etag", "fast-socket", "gcfs-config", "gvnic", "image-type", "kubelet-config", "labels", "linux-node-config", "locations", "logging-config", "max-surge", "max-unavailable", "mode", "name", "network-performance-config", "node-network-config", "node-pool-id", "node-pool-soak-duration", "node-version", "os-version", "pod-ipv4-cidr-block", "pod-pids-limit", "pod-range", "project-id", "resource-labels", "standard-rollout-policy", "strategy", "sysctls", "tags", "total-egress-bandwidth-tier", "upgrade-settings", "variant", "variant-config", "windows-node-config", "workload-metadata-config", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1506,6 +1664,7 @@ where "addons-config.dns-cache-config.enabled" => Some(("addonsConfig.dnsCacheConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.gce-persistent-disk-csi-driver-config.enabled" => Some(("addonsConfig.gcePersistentDiskCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.gcp-filestore-csi-driver-config.enabled" => Some(("addonsConfig.gcpFilestoreCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.gke-backup-agent-config.enabled" => Some(("addonsConfig.gkeBackupAgentConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.horizontal-pod-autoscaling.disabled" => Some(("addonsConfig.horizontalPodAutoscaling.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.http-load-balancing.disabled" => Some(("addonsConfig.httpLoadBalancing.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.kubernetes-dashboard.disabled" => Some(("addonsConfig.kubernetesDashboard.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1515,7 +1674,7 @@ where "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "cloud-run-config", "cluster-id", "config-connector-config", "disabled", "dns-cache-config", "enabled", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "horizontal-pod-autoscaling", "http-load-balancing", "kubernetes-dashboard", "load-balancer-type", "name", "network-policy-config", "project-id", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "cloud-run-config", "cluster-id", "config-connector-config", "disabled", "dns-cache-config", "enabled", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "gke-backup-agent-config", "horizontal-pod-autoscaling", "http-load-balancing", "kubernetes-dashboard", "load-balancer-type", "name", "network-policy-config", "project-id", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2422,6 +2581,7 @@ where "update.desired-addons-config.dns-cache-config.enabled" => Some(("update.desiredAddonsConfig.dnsCacheConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.gce-persistent-disk-csi-driver-config.enabled" => Some(("update.desiredAddonsConfig.gcePersistentDiskCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.gcp-filestore-csi-driver-config.enabled" => Some(("update.desiredAddonsConfig.gcpFilestoreCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-addons-config.gke-backup-agent-config.enabled" => Some(("update.desiredAddonsConfig.gkeBackupAgentConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.horizontal-pod-autoscaling.disabled" => Some(("update.desiredAddonsConfig.horizontalPodAutoscaling.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.http-load-balancing.disabled" => Some(("update.desiredAddonsConfig.httpLoadBalancing.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.kubernetes-dashboard.disabled" => Some(("update.desiredAddonsConfig.kubernetesDashboard.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2429,6 +2589,7 @@ where "update.desired-authenticator-groups-config.enabled" => Some(("update.desiredAuthenticatorGroupsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-authenticator-groups-config.security-group" => Some(("update.desiredAuthenticatorGroupsConfig.securityGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-binary-authorization.enabled" => Some(("update.desiredBinaryAuthorization.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-binary-authorization.evaluation-mode" => Some(("update.desiredBinaryAuthorization.evaluationMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-locations" => Some(("update.desiredClusterAutoscaling.autoprovisioningLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.boot-disk-kms-key" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.bootDiskKmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.disk-size-gb" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2443,10 +2604,16 @@ where "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.service-account" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-integrity-monitoring" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-secure-boot" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-surge" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-unavailable" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.strategy" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoscaling-profile" => Some(("update.desiredClusterAutoscaling.autoscalingProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.enable-node-autoprovisioning" => Some(("update.desiredClusterAutoscaling.enableNodeAutoprovisioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-cost-management-config.enabled" => Some(("update.desiredCostManagementConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-database-encryption.key-name" => Some(("update.desiredDatabaseEncryption.keyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-database-encryption.state" => Some(("update.desiredDatabaseEncryption.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-datapath-provider" => Some(("update.desiredDatapathProvider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2454,6 +2621,8 @@ where "update.desired-dns-config.cluster-dns" => Some(("update.desiredDnsConfig.clusterDns", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-dns-config.cluster-dns-domain" => Some(("update.desiredDnsConfig.clusterDnsDomain", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-dns-config.cluster-dns-scope" => Some(("update.desiredDnsConfig.clusterDnsScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-enable-private-endpoint" => Some(("update.desiredEnablePrivateEndpoint", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-gateway-api-config.channel" => Some(("update.desiredGatewayApiConfig.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-gcfs-config.enabled" => Some(("update.desiredGcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-identity-service-config.enabled" => Some(("update.desiredIdentityServiceConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-image-type" => Some(("update.desiredImageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2463,16 +2632,22 @@ where "update.desired-logging-config.component-config.enable-components" => Some(("update.desiredLoggingConfig.componentConfig.enableComponents", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update.desired-logging-service" => Some(("update.desiredLoggingService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-master-authorized-networks-config.enabled" => Some(("update.desiredMasterAuthorizedNetworksConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-master-authorized-networks-config.gcp-public-cidrs-access-enabled" => Some(("update.desiredMasterAuthorizedNetworksConfig.gcpPublicCidrsAccessEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-master-version" => Some(("update.desiredMasterVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-mesh-certificates.enable-certificates" => Some(("update.desiredMeshCertificates.enableCertificates", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-monitoring-config.component-config.enable-components" => Some(("update.desiredMonitoringConfig.componentConfig.enableComponents", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update.desired-monitoring-config.managed-prometheus-config.enabled" => Some(("update.desiredMonitoringConfig.managedPrometheusConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-monitoring-service" => Some(("update.desiredMonitoringService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-node-pool-auto-config-network-tags.tags" => Some(("update.desiredNodePoolAutoConfigNetworkTags.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update.desired-node-pool-autoscaling.autoprovisioned" => Some(("update.desiredNodePoolAutoscaling.autoprovisioned", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-node-pool-autoscaling.enabled" => Some(("update.desiredNodePoolAutoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-node-pool-autoscaling.location-policy" => Some(("update.desiredNodePoolAutoscaling.locationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-node-pool-autoscaling.max-node-count" => Some(("update.desiredNodePoolAutoscaling.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update.desired-node-pool-autoscaling.min-node-count" => Some(("update.desiredNodePoolAutoscaling.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-node-pool-autoscaling.total-max-node-count" => Some(("update.desiredNodePoolAutoscaling.totalMaxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-node-pool-autoscaling.total-min-node-count" => Some(("update.desiredNodePoolAutoscaling.totalMinNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update.desired-node-pool-id" => Some(("update.desiredNodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-node-pool-logging-config.variant-config.variant" => Some(("update.desiredNodePoolLoggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-node-version" => Some(("update.desiredNodeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-notification-config.pubsub.enabled" => Some(("update.desiredNotificationConfig.pubsub.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-notification-config.pubsub.filter.event-type" => Some(("update.desiredNotificationConfig.pubsub.filter.eventType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2483,6 +2658,7 @@ where "update.desired-private-cluster-config.master-ipv4-cidr-block" => Some(("update.desiredPrivateClusterConfig.masterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-cluster-config.peering-name" => Some(("update.desiredPrivateClusterConfig.peeringName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-cluster-config.private-endpoint" => Some(("update.desiredPrivateClusterConfig.privateEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-private-cluster-config.private-endpoint-subnetwork" => Some(("update.desiredPrivateClusterConfig.privateEndpointSubnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-cluster-config.public-endpoint" => Some(("update.desiredPrivateClusterConfig.publicEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-ipv6-google-access" => Some(("update.desiredPrivateIpv6GoogleAccess", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-release-channel.channel" => Some(("update.desiredReleaseChannel.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2491,11 +2667,13 @@ where "update.desired-resource-usage-export-config.enable-network-egress-metering" => Some(("update.desiredResourceUsageExportConfig.enableNetworkEgressMetering", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-service-external-ips-config.enabled" => Some(("update.desiredServiceExternalIpsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-shielded-nodes.enabled" => Some(("update.desiredShieldedNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-stack-type" => Some(("update.desiredStackType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-vertical-pod-autoscaling.enabled" => Some(("update.desiredVerticalPodAutoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-workload-identity-config.workload-pool" => Some(("update.desiredWorkloadIdentityConfig.workloadPool", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.etag" => Some(("update.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling-profile", "bigquery-destination", "boot-disk-kms-key", "channel", "cloud-run-config", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-id", "component-config", "config-connector-config", "consumption-metering-config", "dataset-id", "description", "desired-addons-config", "desired-authenticator-groups-config", "desired-binary-authorization", "desired-cluster-autoscaling", "desired-database-encryption", "desired-datapath-provider", "desired-default-snat-status", "desired-dns-config", "desired-gcfs-config", "desired-identity-service-config", "desired-image-type", "desired-intra-node-visibility-config", "desired-l4ilb-subsetting-config", "desired-locations", "desired-logging-config", "desired-logging-service", "desired-master-authorized-networks-config", "desired-master-version", "desired-mesh-certificates", "desired-monitoring-config", "desired-monitoring-service", "desired-node-pool-auto-config-network-tags", "desired-node-pool-autoscaling", "desired-node-pool-id", "desired-node-version", "desired-notification-config", "desired-private-cluster-config", "desired-private-ipv6-google-access", "desired-release-channel", "desired-resource-usage-export-config", "desired-service-external-ips-config", "desired-shielded-nodes", "desired-vertical-pod-autoscaling", "desired-workload-identity-config", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enabled", "event-type", "filter", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "horizontal-pod-autoscaling", "http-load-balancing", "image-type", "key-name", "kubernetes-dashboard", "load-balancer-type", "management", "master-global-access-config", "master-ipv4-cidr-block", "max-node-count", "max-surge", "max-unavailable", "min-cpu-platform", "min-node-count", "name", "network-policy-config", "oauth-scopes", "peering-name", "private-endpoint", "project-id", "public-endpoint", "pubsub", "security-group", "service-account", "shielded-instance-config", "state", "tags", "topic", "update", "upgrade-options", "upgrade-settings", "workload-pool", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling-profile", "batch-node-count", "batch-percentage", "batch-soak-duration", "bigquery-destination", "blue-green-settings", "boot-disk-kms-key", "channel", "cloud-run-config", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-id", "component-config", "config-connector-config", "consumption-metering-config", "dataset-id", "description", "desired-addons-config", "desired-authenticator-groups-config", "desired-binary-authorization", "desired-cluster-autoscaling", "desired-cost-management-config", "desired-database-encryption", "desired-datapath-provider", "desired-default-snat-status", "desired-dns-config", "desired-enable-private-endpoint", "desired-gateway-api-config", "desired-gcfs-config", "desired-identity-service-config", "desired-image-type", "desired-intra-node-visibility-config", "desired-l4ilb-subsetting-config", "desired-locations", "desired-logging-config", "desired-logging-service", "desired-master-authorized-networks-config", "desired-master-version", "desired-mesh-certificates", "desired-monitoring-config", "desired-monitoring-service", "desired-node-pool-auto-config-network-tags", "desired-node-pool-autoscaling", "desired-node-pool-id", "desired-node-pool-logging-config", "desired-node-version", "desired-notification-config", "desired-private-cluster-config", "desired-private-ipv6-google-access", "desired-release-channel", "desired-resource-usage-export-config", "desired-service-external-ips-config", "desired-shielded-nodes", "desired-stack-type", "desired-vertical-pod-autoscaling", "desired-workload-identity-config", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enabled", "etag", "evaluation-mode", "event-type", "filter", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "gcp-public-cidrs-access-enabled", "gke-backup-agent-config", "horizontal-pod-autoscaling", "http-load-balancing", "image-type", "key-name", "kubernetes-dashboard", "load-balancer-type", "location-policy", "managed-prometheus-config", "management", "master-global-access-config", "master-ipv4-cidr-block", "max-node-count", "max-surge", "max-unavailable", "min-cpu-platform", "min-node-count", "name", "network-policy-config", "node-pool-soak-duration", "oauth-scopes", "peering-name", "private-endpoint", "private-endpoint-subnetwork", "project-id", "public-endpoint", "pubsub", "security-group", "service-account", "shielded-instance-config", "standard-rollout-policy", "state", "strategy", "tags", "topic", "total-max-node-count", "total-min-node-count", "update", "upgrade-options", "upgrade-settings", "variant", "variant-config", "workload-pool", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2990,6 +3168,7 @@ where "addons-config.dns-cache-config.enabled" => Some(("addonsConfig.dnsCacheConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.gce-persistent-disk-csi-driver-config.enabled" => Some(("addonsConfig.gcePersistentDiskCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.gcp-filestore-csi-driver-config.enabled" => Some(("addonsConfig.gcpFilestoreCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "addons-config.gke-backup-agent-config.enabled" => Some(("addonsConfig.gkeBackupAgentConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.horizontal-pod-autoscaling.disabled" => Some(("addonsConfig.horizontalPodAutoscaling.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.http-load-balancing.disabled" => Some(("addonsConfig.httpLoadBalancing.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "addons-config.kubernetes-dashboard.disabled" => Some(("addonsConfig.kubernetesDashboard.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2999,7 +3178,7 @@ where "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "cloud-run-config", "cluster-id", "config-connector-config", "disabled", "dns-cache-config", "enabled", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "horizontal-pod-autoscaling", "http-load-balancing", "kubernetes-dashboard", "load-balancer-type", "name", "network-policy-config", "project-id", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "cloud-run-config", "cluster-id", "config-connector-config", "disabled", "dns-cache-config", "enabled", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "gke-backup-agent-config", "horizontal-pod-autoscaling", "http-load-balancing", "kubernetes-dashboard", "load-balancer-type", "name", "network-policy-config", "project-id", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3176,6 +3355,7 @@ where "cluster.addons-config.dns-cache-config.enabled" => Some(("cluster.addonsConfig.dnsCacheConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.gce-persistent-disk-csi-driver-config.enabled" => Some(("cluster.addonsConfig.gcePersistentDiskCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.gcp-filestore-csi-driver-config.enabled" => Some(("cluster.addonsConfig.gcpFilestoreCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.addons-config.gke-backup-agent-config.enabled" => Some(("cluster.addonsConfig.gkeBackupAgentConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.horizontal-pod-autoscaling.disabled" => Some(("cluster.addonsConfig.horizontalPodAutoscaling.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.http-load-balancing.disabled" => Some(("cluster.addonsConfig.httpLoadBalancing.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.addons-config.kubernetes-dashboard.disabled" => Some(("cluster.addonsConfig.kubernetesDashboard.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3197,13 +3377,20 @@ where "cluster.autoscaling.autoprovisioning-node-pool-defaults.service-account" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-integrity-monitoring" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-secure-boot" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-surge" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-unavailable" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.strategy" => Some(("cluster.autoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.autoscaling-profile" => Some(("cluster.autoscaling.autoscalingProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.autoscaling.enable-node-autoprovisioning" => Some(("cluster.autoscaling.enableNodeAutoprovisioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.binary-authorization.enabled" => Some(("cluster.binaryAuthorization.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.binary-authorization.evaluation-mode" => Some(("cluster.binaryAuthorization.evaluationMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.cluster-ipv4-cidr" => Some(("cluster.clusterIpv4Cidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.confidential-nodes.enabled" => Some(("cluster.confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.cost-management-config.enabled" => Some(("cluster.costManagementConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.create-time" => Some(("cluster.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.current-master-version" => Some(("cluster.currentMasterVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.current-node-count" => Some(("cluster.currentNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -3215,6 +3402,7 @@ where "cluster.enable-kubernetes-alpha" => Some(("cluster.enableKubernetesAlpha", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.enable-tpu" => Some(("cluster.enableTpu", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.endpoint" => Some(("cluster.endpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.etag" => Some(("cluster.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.expire-time" => Some(("cluster.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.id" => Some(("cluster.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.identity-service-config.enabled" => Some(("cluster.identityServiceConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3225,11 +3413,13 @@ where "cluster.ip-allocation-policy.cluster-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.clusterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.cluster-secondary-range-name" => Some(("cluster.ipAllocationPolicy.clusterSecondaryRangeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.create-subnetwork" => Some(("cluster.ipAllocationPolicy.createSubnetwork", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.ip-allocation-policy.ipv6-access-type" => Some(("cluster.ipAllocationPolicy.ipv6AccessType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.node-ipv4-cidr" => Some(("cluster.ipAllocationPolicy.nodeIpv4Cidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.node-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.nodeIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.services-ipv4-cidr" => Some(("cluster.ipAllocationPolicy.servicesIpv4Cidr", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.services-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.servicesIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.services-secondary-range-name" => Some(("cluster.ipAllocationPolicy.servicesSecondaryRangeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.ip-allocation-policy.stack-type" => Some(("cluster.ipAllocationPolicy.stackType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.subnetwork-name" => Some(("cluster.ipAllocationPolicy.subnetworkName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.tpu-ipv4-cidr-block" => Some(("cluster.ipAllocationPolicy.tpuIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.ip-allocation-policy.use-ip-aliases" => Some(("cluster.ipAllocationPolicy.useIpAliases", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3254,8 +3444,10 @@ where "cluster.master-auth.password" => Some(("cluster.masterAuth.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.master-auth.username" => Some(("cluster.masterAuth.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.master-authorized-networks-config.enabled" => Some(("cluster.masterAuthorizedNetworksConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.master-authorized-networks-config.gcp-public-cidrs-access-enabled" => Some(("cluster.masterAuthorizedNetworksConfig.gcpPublicCidrsAccessEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.mesh-certificates.enable-certificates" => Some(("cluster.meshCertificates.enableCertificates", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.monitoring-config.component-config.enable-components" => Some(("cluster.monitoringConfig.componentConfig.enableComponents", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "cluster.monitoring-config.managed-prometheus-config.enabled" => Some(("cluster.monitoringConfig.managedPrometheusConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.monitoring-service" => Some(("cluster.monitoringService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.name" => Some(("cluster.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network" => Some(("cluster.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3266,6 +3458,7 @@ where "cluster.network-config.dns-config.cluster-dns-scope" => Some(("cluster.networkConfig.dnsConfig.clusterDnsScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.enable-intra-node-visibility" => Some(("cluster.networkConfig.enableIntraNodeVisibility", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.network-config.enable-l4ilb-subsetting" => Some(("cluster.networkConfig.enableL4ilbSubsetting", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.network-config.gateway-api-config.channel" => Some(("cluster.networkConfig.gatewayApiConfig.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.network" => Some(("cluster.networkConfig.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.private-ipv6-google-access" => Some(("cluster.networkConfig.privateIpv6GoogleAccess", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.network-config.service-external-ips-config.enabled" => Some(("cluster.networkConfig.serviceExternalIpsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3274,17 +3467,24 @@ where "cluster.network-policy.provider" => Some(("cluster.networkPolicy.provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.advanced-machine-features.threads-per-core" => Some(("cluster.nodeConfig.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.boot-disk-kms-key" => Some(("cluster.nodeConfig.bootDiskKmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.node-config.confidential-nodes.enabled" => Some(("cluster.nodeConfig.confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.disk-size-gb" => Some(("cluster.nodeConfig.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.node-config.disk-type" => Some(("cluster.nodeConfig.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.node-config.ephemeral-storage-local-ssd-config.local-ssd-count" => Some(("cluster.nodeConfig.ephemeralStorageLocalSsdConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.node-config.fast-socket.enabled" => Some(("cluster.nodeConfig.fastSocket.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.gcfs-config.enabled" => Some(("cluster.nodeConfig.gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.gvnic.enabled" => Some(("cluster.nodeConfig.gvnic.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.image-type" => Some(("cluster.nodeConfig.imageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.kubelet-config.cpu-cfs-quota" => Some(("cluster.nodeConfig.kubeletConfig.cpuCfsQuota", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.kubelet-config.cpu-cfs-quota-period" => Some(("cluster.nodeConfig.kubeletConfig.cpuCfsQuotaPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.kubelet-config.cpu-manager-policy" => Some(("cluster.nodeConfig.kubeletConfig.cpuManagerPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.node-config.kubelet-config.pod-pids-limit" => Some(("cluster.nodeConfig.kubeletConfig.podPidsLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.labels" => Some(("cluster.nodeConfig.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "cluster.node-config.linux-node-config.cgroup-mode" => Some(("cluster.nodeConfig.linuxNodeConfig.cgroupMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.linux-node-config.sysctls" => Some(("cluster.nodeConfig.linuxNodeConfig.sysctls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "cluster.node-config.local-nvme-ssd-block-config.local-ssd-count" => Some(("cluster.nodeConfig.localNvmeSsdBlockConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.node-config.local-ssd-count" => Some(("cluster.nodeConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "cluster.node-config.logging-config.variant-config.variant" => Some(("cluster.nodeConfig.loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.machine-type" => Some(("cluster.nodeConfig.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.metadata" => Some(("cluster.nodeConfig.metadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "cluster.node-config.min-cpu-platform" => Some(("cluster.nodeConfig.minCpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3294,16 +3494,19 @@ where "cluster.node-config.reservation-affinity.consume-reservation-type" => Some(("cluster.nodeConfig.reservationAffinity.consumeReservationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.reservation-affinity.key" => Some(("cluster.nodeConfig.reservationAffinity.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.reservation-affinity.values" => Some(("cluster.nodeConfig.reservationAffinity.values", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "cluster.node-config.resource-labels" => Some(("cluster.nodeConfig.resourceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "cluster.node-config.sandbox-config.type" => Some(("cluster.nodeConfig.sandboxConfig.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.service-account" => Some(("cluster.nodeConfig.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.shielded-instance-config.enable-integrity-monitoring" => Some(("cluster.nodeConfig.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.shielded-instance-config.enable-secure-boot" => Some(("cluster.nodeConfig.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.spot" => Some(("cluster.nodeConfig.spot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.node-config.tags" => Some(("cluster.nodeConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "cluster.node-config.windows-node-config.os-version" => Some(("cluster.nodeConfig.windowsNodeConfig.osVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-config.workload-metadata-config.mode" => Some(("cluster.nodeConfig.workloadMetadataConfig.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.node-ipv4-cidr-size" => Some(("cluster.nodeIpv4CidrSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster.node-pool-auto-config.network-tags.tags" => Some(("cluster.nodePoolAutoConfig.networkTags.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "cluster.node-pool-defaults.node-config-defaults.gcfs-config.enabled" => Some(("cluster.nodePoolDefaults.nodeConfigDefaults.gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cluster.node-pool-defaults.node-config-defaults.logging-config.variant-config.variant" => Some(("cluster.nodePoolDefaults.nodeConfigDefaults.loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.notification-config.pubsub.enabled" => Some(("cluster.notificationConfig.pubsub.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cluster.notification-config.pubsub.filter.event-type" => Some(("cluster.notificationConfig.pubsub.filter.eventType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "cluster.notification-config.pubsub.topic" => Some(("cluster.notificationConfig.pubsub.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3313,6 +3516,7 @@ where "cluster.private-cluster-config.master-ipv4-cidr-block" => Some(("cluster.privateClusterConfig.masterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.private-cluster-config.peering-name" => Some(("cluster.privateClusterConfig.peeringName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.private-cluster-config.private-endpoint" => Some(("cluster.privateClusterConfig.privateEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cluster.private-cluster-config.private-endpoint-subnetwork" => Some(("cluster.privateClusterConfig.privateEndpointSubnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.private-cluster-config.public-endpoint" => Some(("cluster.privateClusterConfig.publicEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.release-channel.channel" => Some(("cluster.releaseChannel.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster.resource-labels" => Some(("cluster.resourceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -3333,7 +3537,7 @@ where "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-machine-features", "authenticator-groups-config", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autopilot", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling", "autoscaling-profile", "bigquery-destination", "binary-authorization", "boot-disk-kms-key", "channel", "client-certificate", "client-certificate-config", "client-key", "cloud-run-config", "cluster", "cluster-ca-certificate", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-ipv4-cidr", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "component-config", "confidential-nodes", "config-connector-config", "consume-reservation-type", "consumption-metering-config", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-subnetwork", "create-time", "current-master-version", "current-node-count", "current-node-version", "daily-maintenance-window", "database-encryption", "datapath-provider", "dataset-id", "default-max-pods-constraint", "default-snat-status", "description", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "dns-config", "duration", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-intra-node-visibility", "enable-kubernetes-alpha", "enable-l4ilb-subsetting", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enable-tpu", "enabled", "end-time", "endpoint", "event-type", "expire-time", "filter", "gce-persistent-disk-csi-driver-config", "gcfs-config", "gcp-filestore-csi-driver-config", "gvnic", "horizontal-pod-autoscaling", "http-load-balancing", "id", "identity-service-config", "image-type", "initial-cluster-version", "initial-node-count", "instance-group-urls", "ip-allocation-policy", "issue-client-certificate", "key", "key-name", "kubelet-config", "kubernetes-dashboard", "label-fingerprint", "labels", "legacy-abac", "linux-node-config", "load-balancer-type", "local-ssd-count", "location", "locations", "logging-config", "logging-service", "machine-type", "maintenance-exclusion-options", "maintenance-policy", "management", "master-auth", "master-authorized-networks-config", "master-global-access-config", "master-ipv4-cidr-block", "max-pods-per-node", "max-surge", "max-unavailable", "mesh-certificates", "metadata", "min-cpu-platform", "mode", "monitoring-config", "monitoring-service", "name", "network", "network-config", "network-policy", "network-policy-config", "network-tags", "node-config", "node-config-defaults", "node-group", "node-ipv4-cidr", "node-ipv4-cidr-block", "node-ipv4-cidr-size", "node-pool-auto-config", "node-pool-defaults", "notification-config", "oauth-scopes", "parent", "password", "peering-name", "preemptible", "private-cluster-config", "private-endpoint", "private-ipv6-google-access", "project-id", "provider", "public-endpoint", "pubsub", "recurrence", "recurring-window", "release-channel", "reservation-affinity", "resource-labels", "resource-usage-export-config", "resource-version", "sandbox-config", "scope", "security-group", "self-link", "service-account", "service-external-ips-config", "services-ipv4-cidr", "services-ipv4-cidr-block", "services-secondary-range-name", "shielded-instance-config", "shielded-nodes", "spot", "start-time", "state", "status", "status-message", "subnetwork", "subnetwork-name", "sysctls", "tags", "threads-per-core", "topic", "tpu-ipv4-cidr-block", "type", "upgrade-options", "upgrade-settings", "use-ip-aliases", "use-routes", "username", "values", "vertical-pod-autoscaling", "window", "workload-identity-config", "workload-metadata-config", "workload-pool", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["addons-config", "advanced-machine-features", "authenticator-groups-config", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autopilot", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling", "autoscaling-profile", "batch-node-count", "batch-percentage", "batch-soak-duration", "bigquery-destination", "binary-authorization", "blue-green-settings", "boot-disk-kms-key", "cgroup-mode", "channel", "client-certificate", "client-certificate-config", "client-key", "cloud-run-config", "cluster", "cluster-ca-certificate", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-ipv4-cidr", "cluster-ipv4-cidr-block", "cluster-secondary-range-name", "component-config", "confidential-nodes", "config-connector-config", "consume-reservation-type", "consumption-metering-config", "cost-management-config", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-subnetwork", "create-time", "current-master-version", "current-node-count", "current-node-version", "daily-maintenance-window", "database-encryption", "datapath-provider", "dataset-id", "default-max-pods-constraint", "default-snat-status", "description", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "dns-config", "duration", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-intra-node-visibility", "enable-kubernetes-alpha", "enable-l4ilb-subsetting", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enable-tpu", "enabled", "end-time", "endpoint", "ephemeral-storage-local-ssd-config", "etag", "evaluation-mode", "event-type", "expire-time", "fast-socket", "filter", "gateway-api-config", "gce-persistent-disk-csi-driver-config", "gcfs-config", "gcp-filestore-csi-driver-config", "gcp-public-cidrs-access-enabled", "gke-backup-agent-config", "gvnic", "horizontal-pod-autoscaling", "http-load-balancing", "id", "identity-service-config", "image-type", "initial-cluster-version", "initial-node-count", "instance-group-urls", "ip-allocation-policy", "ipv6-access-type", "issue-client-certificate", "key", "key-name", "kubelet-config", "kubernetes-dashboard", "label-fingerprint", "labels", "legacy-abac", "linux-node-config", "load-balancer-type", "local-nvme-ssd-block-config", "local-ssd-count", "location", "locations", "logging-config", "logging-service", "machine-type", "maintenance-exclusion-options", "maintenance-policy", "managed-prometheus-config", "management", "master-auth", "master-authorized-networks-config", "master-global-access-config", "master-ipv4-cidr-block", "max-pods-per-node", "max-surge", "max-unavailable", "mesh-certificates", "metadata", "min-cpu-platform", "mode", "monitoring-config", "monitoring-service", "name", "network", "network-config", "network-policy", "network-policy-config", "network-tags", "node-config", "node-config-defaults", "node-group", "node-ipv4-cidr", "node-ipv4-cidr-block", "node-ipv4-cidr-size", "node-pool-auto-config", "node-pool-defaults", "node-pool-soak-duration", "notification-config", "oauth-scopes", "os-version", "parent", "password", "peering-name", "pod-pids-limit", "preemptible", "private-cluster-config", "private-endpoint", "private-endpoint-subnetwork", "private-ipv6-google-access", "project-id", "provider", "public-endpoint", "pubsub", "recurrence", "recurring-window", "release-channel", "reservation-affinity", "resource-labels", "resource-usage-export-config", "resource-version", "sandbox-config", "scope", "security-group", "self-link", "service-account", "service-external-ips-config", "services-ipv4-cidr", "services-ipv4-cidr-block", "services-secondary-range-name", "shielded-instance-config", "shielded-nodes", "spot", "stack-type", "standard-rollout-policy", "start-time", "state", "status", "status-message", "strategy", "subnetwork", "subnetwork-name", "sysctls", "tags", "threads-per-core", "topic", "tpu-ipv4-cidr-block", "type", "upgrade-options", "upgrade-settings", "use-ip-aliases", "use-routes", "username", "values", "variant", "variant-config", "vertical-pod-autoscaling", "window", "windows-node-config", "workload-identity-config", "workload-metadata-config", "workload-pool", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4031,15 +4235,18 @@ where match &temp_cursor.to_string()[..] { "autoscaling.autoprovisioned" => Some(("autoscaling.autoprovisioned", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "autoscaling.enabled" => Some(("autoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "autoscaling.location-policy" => Some(("autoscaling.locationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "autoscaling.max-node-count" => Some(("autoscaling.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "autoscaling.min-node-count" => Some(("autoscaling.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "autoscaling.total-max-node-count" => Some(("autoscaling.totalMaxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "autoscaling.total-min-node-count" => Some(("autoscaling.totalMinNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cluster-id" => Some(("clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool-id" => Some(("nodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["autoprovisioned", "autoscaling", "cluster-id", "enabled", "max-node-count", "min-node-count", "name", "node-pool-id", "project-id", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["autoprovisioned", "autoscaling", "cluster-id", "enabled", "location-policy", "max-node-count", "min-node-count", "name", "node-pool-id", "project-id", "total-max-node-count", "total-min-node-count", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4125,21 +4332,31 @@ where "cluster-id" => Some(("clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.autoscaling.autoprovisioned" => Some(("nodePool.autoscaling.autoprovisioned", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.autoscaling.enabled" => Some(("nodePool.autoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-pool.autoscaling.location-policy" => Some(("nodePool.autoscaling.locationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.autoscaling.max-node-count" => Some(("nodePool.autoscaling.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.autoscaling.min-node-count" => Some(("nodePool.autoscaling.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.autoscaling.total-max-node-count" => Some(("nodePool.autoscaling.totalMaxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.autoscaling.total-min-node-count" => Some(("nodePool.autoscaling.totalMinNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.config.advanced-machine-features.threads-per-core" => Some(("nodePool.config.advancedMachineFeatures.threadsPerCore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.boot-disk-kms-key" => Some(("nodePool.config.bootDiskKmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.config.confidential-nodes.enabled" => Some(("nodePool.config.confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.disk-size-gb" => Some(("nodePool.config.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.config.disk-type" => Some(("nodePool.config.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.config.ephemeral-storage-local-ssd-config.local-ssd-count" => Some(("nodePool.config.ephemeralStorageLocalSsdConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.config.fast-socket.enabled" => Some(("nodePool.config.fastSocket.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.gcfs-config.enabled" => Some(("nodePool.config.gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.gvnic.enabled" => Some(("nodePool.config.gvnic.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.image-type" => Some(("nodePool.config.imageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.kubelet-config.cpu-cfs-quota" => Some(("nodePool.config.kubeletConfig.cpuCfsQuota", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.kubelet-config.cpu-cfs-quota-period" => Some(("nodePool.config.kubeletConfig.cpuCfsQuotaPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.kubelet-config.cpu-manager-policy" => Some(("nodePool.config.kubeletConfig.cpuManagerPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.config.kubelet-config.pod-pids-limit" => Some(("nodePool.config.kubeletConfig.podPidsLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.labels" => Some(("nodePool.config.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "node-pool.config.linux-node-config.cgroup-mode" => Some(("nodePool.config.linuxNodeConfig.cgroupMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.linux-node-config.sysctls" => Some(("nodePool.config.linuxNodeConfig.sysctls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "node-pool.config.local-nvme-ssd-block-config.local-ssd-count" => Some(("nodePool.config.localNvmeSsdBlockConfig.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.config.local-ssd-count" => Some(("nodePool.config.localSsdCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.config.logging-config.variant-config.variant" => Some(("nodePool.config.loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.machine-type" => Some(("nodePool.config.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.metadata" => Some(("nodePool.config.metadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "node-pool.config.min-cpu-platform" => Some(("nodePool.config.minCpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4149,13 +4366,16 @@ where "node-pool.config.reservation-affinity.consume-reservation-type" => Some(("nodePool.config.reservationAffinity.consumeReservationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.reservation-affinity.key" => Some(("nodePool.config.reservationAffinity.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.reservation-affinity.values" => Some(("nodePool.config.reservationAffinity.values", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.config.resource-labels" => Some(("nodePool.config.resourceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "node-pool.config.sandbox-config.type" => Some(("nodePool.config.sandboxConfig.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.service-account" => Some(("nodePool.config.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.shielded-instance-config.enable-integrity-monitoring" => Some(("nodePool.config.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.shielded-instance-config.enable-secure-boot" => Some(("nodePool.config.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.spot" => Some(("nodePool.config.spot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "node-pool.config.tags" => Some(("nodePool.config.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.config.windows-node-config.os-version" => Some(("nodePool.config.windowsNodeConfig.osVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.config.workload-metadata-config.mode" => Some(("nodePool.config.workloadMetadataConfig.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.etag" => Some(("nodePool.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.initial-node-count" => Some(("nodePool.initialNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.instance-group-urls" => Some(("nodePool.instanceGroupUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "node-pool.locations" => Some(("nodePool.locations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -4166,20 +4386,33 @@ where "node-pool.max-pods-constraint.max-pods-per-node" => Some(("nodePool.maxPodsConstraint.maxPodsPerNode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.name" => Some(("nodePool.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.network-config.create-pod-range" => Some(("nodePool.networkConfig.createPodRange", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-pool.network-config.enable-private-nodes" => Some(("nodePool.networkConfig.enablePrivateNodes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-pool.network-config.network-performance-config.total-egress-bandwidth-tier" => Some(("nodePool.networkConfig.networkPerformanceConfig.totalEgressBandwidthTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.network-config.pod-ipv4-cidr-block" => Some(("nodePool.networkConfig.podIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.network-config.pod-range" => Some(("nodePool.networkConfig.podRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.placement-policy.type" => Some(("nodePool.placementPolicy.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.pod-ipv4-cidr-size" => Some(("nodePool.podIpv4CidrSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.self-link" => Some(("nodePool.selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.status" => Some(("nodePool.status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.status-message" => Some(("nodePool.statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.update-info.blue-green-info.blue-instance-group-urls" => Some(("nodePool.updateInfo.blueGreenInfo.blueInstanceGroupUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.update-info.blue-green-info.blue-pool-deletion-start-time" => Some(("nodePool.updateInfo.blueGreenInfo.bluePoolDeletionStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.update-info.blue-green-info.green-instance-group-urls" => Some(("nodePool.updateInfo.blueGreenInfo.greenInstanceGroupUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-pool.update-info.blue-green-info.green-pool-version" => Some(("nodePool.updateInfo.blueGreenInfo.greenPoolVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.update-info.blue-green-info.phase" => Some(("nodePool.updateInfo.blueGreenInfo.phase", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("nodePool.upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("nodePool.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("nodePool.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("nodePool.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.upgrade-settings.max-surge" => Some(("nodePool.upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "node-pool.upgrade-settings.max-unavailable" => Some(("nodePool.upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-pool.upgrade-settings.strategy" => Some(("nodePool.upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool.version" => Some(("nodePool.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoscaling", "boot-disk-kms-key", "cluster-id", "config", "consume-reservation-type", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-pod-range", "description", "disk-size-gb", "disk-type", "enable-integrity-monitoring", "enable-secure-boot", "enabled", "gcfs-config", "gvnic", "image-type", "initial-node-count", "instance-group-urls", "key", "kubelet-config", "labels", "linux-node-config", "local-ssd-count", "locations", "machine-type", "management", "max-node-count", "max-pods-constraint", "max-pods-per-node", "max-surge", "max-unavailable", "metadata", "min-cpu-platform", "min-node-count", "mode", "name", "network-config", "node-group", "node-pool", "oauth-scopes", "parent", "pod-ipv4-cidr-block", "pod-ipv4-cidr-size", "pod-range", "preemptible", "project-id", "reservation-affinity", "sandbox-config", "self-link", "service-account", "shielded-instance-config", "spot", "status", "status-message", "sysctls", "tags", "threads-per-core", "type", "upgrade-options", "upgrade-settings", "values", "version", "workload-metadata-config", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-machine-features", "auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoscaling", "batch-node-count", "batch-percentage", "batch-soak-duration", "blue-green-info", "blue-green-settings", "blue-instance-group-urls", "blue-pool-deletion-start-time", "boot-disk-kms-key", "cgroup-mode", "cluster-id", "confidential-nodes", "config", "consume-reservation-type", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-pod-range", "description", "disk-size-gb", "disk-type", "enable-integrity-monitoring", "enable-private-nodes", "enable-secure-boot", "enabled", "ephemeral-storage-local-ssd-config", "etag", "fast-socket", "gcfs-config", "green-instance-group-urls", "green-pool-version", "gvnic", "image-type", "initial-node-count", "instance-group-urls", "key", "kubelet-config", "labels", "linux-node-config", "local-nvme-ssd-block-config", "local-ssd-count", "location-policy", "locations", "logging-config", "machine-type", "management", "max-node-count", "max-pods-constraint", "max-pods-per-node", "max-surge", "max-unavailable", "metadata", "min-cpu-platform", "min-node-count", "mode", "name", "network-config", "network-performance-config", "node-group", "node-pool", "node-pool-soak-duration", "oauth-scopes", "os-version", "parent", "phase", "placement-policy", "pod-ipv4-cidr-block", "pod-ipv4-cidr-size", "pod-pids-limit", "pod-range", "preemptible", "project-id", "reservation-affinity", "resource-labels", "sandbox-config", "self-link", "service-account", "shielded-instance-config", "spot", "standard-rollout-policy", "status", "status-message", "strategy", "sysctls", "tags", "threads-per-core", "total-egress-bandwidth-tier", "total-max-node-count", "total-min-node-count", "type", "update-info", "upgrade-options", "upgrade-settings", "values", "variant", "variant-config", "version", "windows-node-config", "workload-metadata-config", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4434,9 +4667,10 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool-id" => Some(("nodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "respect-pdb" => Some(("respectPdb", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-id", "name", "node-pool-id", "project-id", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-id", "name", "node-pool-id", "project-id", "respect-pdb", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4703,24 +4937,44 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cluster-id" => Some(("clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "confidential-nodes.enabled" => Some(("confidentialNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fast-socket.enabled" => Some(("fastSocket.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "gcfs-config.enabled" => Some(("gcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "gvnic.enabled" => Some(("gvnic.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "image-type" => Some(("imageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kubelet-config.cpu-cfs-quota" => Some(("kubeletConfig.cpuCfsQuota", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "kubelet-config.cpu-cfs-quota-period" => Some(("kubeletConfig.cpuCfsQuotaPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kubelet-config.cpu-manager-policy" => Some(("kubeletConfig.cpuManagerPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kubelet-config.pod-pids-limit" => Some(("kubeletConfig.podPidsLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels.labels" => Some(("labels.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "linux-node-config.cgroup-mode" => Some(("linuxNodeConfig.cgroupMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linux-node-config.sysctls" => Some(("linuxNodeConfig.sysctls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "locations" => Some(("locations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "logging-config.variant-config.variant" => Some(("loggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-network-config.create-pod-range" => Some(("nodeNetworkConfig.createPodRange", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-network-config.enable-private-nodes" => Some(("nodeNetworkConfig.enablePrivateNodes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-network-config.network-performance-config.total-egress-bandwidth-tier" => Some(("nodeNetworkConfig.networkPerformanceConfig.totalEgressBandwidthTier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-network-config.pod-ipv4-cidr-block" => Some(("nodeNetworkConfig.podIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-network-config.pod-range" => Some(("nodeNetworkConfig.podRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-pool-id" => Some(("nodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "node-version" => Some(("nodeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-labels.labels" => Some(("resourceLabels.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "tags.tags" => Some(("tags.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "upgrade-settings.max-surge" => Some(("upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "upgrade-settings.max-unavailable" => Some(("upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "upgrade-settings.strategy" => Some(("upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "windows-node-config.os-version" => Some(("windowsNodeConfig.osVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "workload-metadata-config.mode" => Some(("workloadMetadataConfig.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-id", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "enabled", "gcfs-config", "gvnic", "image-type", "kubelet-config", "linux-node-config", "locations", "max-surge", "max-unavailable", "mode", "name", "node-pool-id", "node-version", "project-id", "sysctls", "upgrade-settings", "workload-metadata-config", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["batch-node-count", "batch-percentage", "batch-soak-duration", "blue-green-settings", "cgroup-mode", "cluster-id", "confidential-nodes", "cpu-cfs-quota", "cpu-cfs-quota-period", "cpu-manager-policy", "create-pod-range", "enable-private-nodes", "enabled", "etag", "fast-socket", "gcfs-config", "gvnic", "image-type", "kubelet-config", "labels", "linux-node-config", "locations", "logging-config", "max-surge", "max-unavailable", "mode", "name", "network-performance-config", "node-network-config", "node-pool-id", "node-pool-soak-duration", "node-version", "os-version", "pod-ipv4-cidr-block", "pod-pids-limit", "pod-range", "project-id", "resource-labels", "standard-rollout-policy", "strategy", "sysctls", "tags", "total-egress-bandwidth-tier", "upgrade-settings", "variant", "variant-config", "windows-node-config", "workload-metadata-config", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5271,6 +5525,7 @@ where "update.desired-addons-config.dns-cache-config.enabled" => Some(("update.desiredAddonsConfig.dnsCacheConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.gce-persistent-disk-csi-driver-config.enabled" => Some(("update.desiredAddonsConfig.gcePersistentDiskCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.gcp-filestore-csi-driver-config.enabled" => Some(("update.desiredAddonsConfig.gcpFilestoreCsiDriverConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-addons-config.gke-backup-agent-config.enabled" => Some(("update.desiredAddonsConfig.gkeBackupAgentConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.horizontal-pod-autoscaling.disabled" => Some(("update.desiredAddonsConfig.horizontalPodAutoscaling.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.http-load-balancing.disabled" => Some(("update.desiredAddonsConfig.httpLoadBalancing.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-addons-config.kubernetes-dashboard.disabled" => Some(("update.desiredAddonsConfig.kubernetesDashboard.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -5278,6 +5533,7 @@ where "update.desired-authenticator-groups-config.enabled" => Some(("update.desiredAuthenticatorGroupsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-authenticator-groups-config.security-group" => Some(("update.desiredAuthenticatorGroupsConfig.securityGroup", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-binary-authorization.enabled" => Some(("update.desiredBinaryAuthorization.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-binary-authorization.evaluation-mode" => Some(("update.desiredBinaryAuthorization.evaluationMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-locations" => Some(("update.desiredClusterAutoscaling.autoprovisioningLocations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.boot-disk-kms-key" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.bootDiskKmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.disk-size-gb" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.diskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -5292,10 +5548,16 @@ where "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.service-account" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-integrity-monitoring" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.shielded-instance-config.enable-secure-boot" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.node-pool-soak-duration" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.nodePoolSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-node-count" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-percentage" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.blue-green-settings.standard-rollout-policy.batch-soak-duration" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.blueGreenSettings.standardRolloutPolicy.batchSoakDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-surge" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxSurge", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.max-unavailable" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.maxUnavailable", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-cluster-autoscaling.autoprovisioning-node-pool-defaults.upgrade-settings.strategy" => Some(("update.desiredClusterAutoscaling.autoprovisioningNodePoolDefaults.upgradeSettings.strategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.autoscaling-profile" => Some(("update.desiredClusterAutoscaling.autoscalingProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-cluster-autoscaling.enable-node-autoprovisioning" => Some(("update.desiredClusterAutoscaling.enableNodeAutoprovisioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-cost-management-config.enabled" => Some(("update.desiredCostManagementConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-database-encryption.key-name" => Some(("update.desiredDatabaseEncryption.keyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-database-encryption.state" => Some(("update.desiredDatabaseEncryption.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-datapath-provider" => Some(("update.desiredDatapathProvider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5303,6 +5565,8 @@ where "update.desired-dns-config.cluster-dns" => Some(("update.desiredDnsConfig.clusterDns", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-dns-config.cluster-dns-domain" => Some(("update.desiredDnsConfig.clusterDnsDomain", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-dns-config.cluster-dns-scope" => Some(("update.desiredDnsConfig.clusterDnsScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-enable-private-endpoint" => Some(("update.desiredEnablePrivateEndpoint", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-gateway-api-config.channel" => Some(("update.desiredGatewayApiConfig.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-gcfs-config.enabled" => Some(("update.desiredGcfsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-identity-service-config.enabled" => Some(("update.desiredIdentityServiceConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-image-type" => Some(("update.desiredImageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5312,16 +5576,22 @@ where "update.desired-logging-config.component-config.enable-components" => Some(("update.desiredLoggingConfig.componentConfig.enableComponents", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update.desired-logging-service" => Some(("update.desiredLoggingService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-master-authorized-networks-config.enabled" => Some(("update.desiredMasterAuthorizedNetworksConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-master-authorized-networks-config.gcp-public-cidrs-access-enabled" => Some(("update.desiredMasterAuthorizedNetworksConfig.gcpPublicCidrsAccessEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-master-version" => Some(("update.desiredMasterVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-mesh-certificates.enable-certificates" => Some(("update.desiredMeshCertificates.enableCertificates", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-monitoring-config.component-config.enable-components" => Some(("update.desiredMonitoringConfig.componentConfig.enableComponents", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update.desired-monitoring-config.managed-prometheus-config.enabled" => Some(("update.desiredMonitoringConfig.managedPrometheusConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-monitoring-service" => Some(("update.desiredMonitoringService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-node-pool-auto-config-network-tags.tags" => Some(("update.desiredNodePoolAutoConfigNetworkTags.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "update.desired-node-pool-autoscaling.autoprovisioned" => Some(("update.desiredNodePoolAutoscaling.autoprovisioned", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-node-pool-autoscaling.enabled" => Some(("update.desiredNodePoolAutoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-node-pool-autoscaling.location-policy" => Some(("update.desiredNodePoolAutoscaling.locationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-node-pool-autoscaling.max-node-count" => Some(("update.desiredNodePoolAutoscaling.maxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update.desired-node-pool-autoscaling.min-node-count" => Some(("update.desiredNodePoolAutoscaling.minNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-node-pool-autoscaling.total-max-node-count" => Some(("update.desiredNodePoolAutoscaling.totalMaxNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update.desired-node-pool-autoscaling.total-min-node-count" => Some(("update.desiredNodePoolAutoscaling.totalMinNodeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update.desired-node-pool-id" => Some(("update.desiredNodePoolId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-node-pool-logging-config.variant-config.variant" => Some(("update.desiredNodePoolLoggingConfig.variantConfig.variant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-node-version" => Some(("update.desiredNodeVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-notification-config.pubsub.enabled" => Some(("update.desiredNotificationConfig.pubsub.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-notification-config.pubsub.filter.event-type" => Some(("update.desiredNotificationConfig.pubsub.filter.eventType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -5332,6 +5602,7 @@ where "update.desired-private-cluster-config.master-ipv4-cidr-block" => Some(("update.desiredPrivateClusterConfig.masterIpv4CidrBlock", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-cluster-config.peering-name" => Some(("update.desiredPrivateClusterConfig.peeringName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-cluster-config.private-endpoint" => Some(("update.desiredPrivateClusterConfig.privateEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.desired-private-cluster-config.private-endpoint-subnetwork" => Some(("update.desiredPrivateClusterConfig.privateEndpointSubnetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-cluster-config.public-endpoint" => Some(("update.desiredPrivateClusterConfig.publicEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-private-ipv6-google-access" => Some(("update.desiredPrivateIpv6GoogleAccess", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-release-channel.channel" => Some(("update.desiredReleaseChannel.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5340,11 +5611,13 @@ where "update.desired-resource-usage-export-config.enable-network-egress-metering" => Some(("update.desiredResourceUsageExportConfig.enableNetworkEgressMetering", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-service-external-ips-config.enabled" => Some(("update.desiredServiceExternalIpsConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-shielded-nodes.enabled" => Some(("update.desiredShieldedNodes.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update.desired-stack-type" => Some(("update.desiredStackType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update.desired-vertical-pod-autoscaling.enabled" => Some(("update.desiredVerticalPodAutoscaling.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update.desired-workload-identity-config.workload-pool" => Some(("update.desiredWorkloadIdentityConfig.workloadPool", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update.etag" => Some(("update.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling-profile", "bigquery-destination", "boot-disk-kms-key", "channel", "cloud-run-config", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-id", "component-config", "config-connector-config", "consumption-metering-config", "dataset-id", "description", "desired-addons-config", "desired-authenticator-groups-config", "desired-binary-authorization", "desired-cluster-autoscaling", "desired-database-encryption", "desired-datapath-provider", "desired-default-snat-status", "desired-dns-config", "desired-gcfs-config", "desired-identity-service-config", "desired-image-type", "desired-intra-node-visibility-config", "desired-l4ilb-subsetting-config", "desired-locations", "desired-logging-config", "desired-logging-service", "desired-master-authorized-networks-config", "desired-master-version", "desired-mesh-certificates", "desired-monitoring-config", "desired-monitoring-service", "desired-node-pool-auto-config-network-tags", "desired-node-pool-autoscaling", "desired-node-pool-id", "desired-node-version", "desired-notification-config", "desired-private-cluster-config", "desired-private-ipv6-google-access", "desired-release-channel", "desired-resource-usage-export-config", "desired-service-external-ips-config", "desired-shielded-nodes", "desired-vertical-pod-autoscaling", "desired-workload-identity-config", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enabled", "event-type", "filter", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "horizontal-pod-autoscaling", "http-load-balancing", "image-type", "key-name", "kubernetes-dashboard", "load-balancer-type", "management", "master-global-access-config", "master-ipv4-cidr-block", "max-node-count", "max-surge", "max-unavailable", "min-cpu-platform", "min-node-count", "name", "network-policy-config", "oauth-scopes", "peering-name", "private-endpoint", "project-id", "public-endpoint", "pubsub", "security-group", "service-account", "shielded-instance-config", "state", "tags", "topic", "update", "upgrade-options", "upgrade-settings", "workload-pool", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-repair", "auto-upgrade", "auto-upgrade-start-time", "autoprovisioned", "autoprovisioning-locations", "autoprovisioning-node-pool-defaults", "autoscaling-profile", "batch-node-count", "batch-percentage", "batch-soak-duration", "bigquery-destination", "blue-green-settings", "boot-disk-kms-key", "channel", "cloud-run-config", "cluster-dns", "cluster-dns-domain", "cluster-dns-scope", "cluster-id", "component-config", "config-connector-config", "consumption-metering-config", "dataset-id", "description", "desired-addons-config", "desired-authenticator-groups-config", "desired-binary-authorization", "desired-cluster-autoscaling", "desired-cost-management-config", "desired-database-encryption", "desired-datapath-provider", "desired-default-snat-status", "desired-dns-config", "desired-enable-private-endpoint", "desired-gateway-api-config", "desired-gcfs-config", "desired-identity-service-config", "desired-image-type", "desired-intra-node-visibility-config", "desired-l4ilb-subsetting-config", "desired-locations", "desired-logging-config", "desired-logging-service", "desired-master-authorized-networks-config", "desired-master-version", "desired-mesh-certificates", "desired-monitoring-config", "desired-monitoring-service", "desired-node-pool-auto-config-network-tags", "desired-node-pool-autoscaling", "desired-node-pool-id", "desired-node-pool-logging-config", "desired-node-version", "desired-notification-config", "desired-private-cluster-config", "desired-private-ipv6-google-access", "desired-release-channel", "desired-resource-usage-export-config", "desired-service-external-ips-config", "desired-shielded-nodes", "desired-stack-type", "desired-vertical-pod-autoscaling", "desired-workload-identity-config", "disabled", "disk-size-gb", "disk-type", "dns-cache-config", "enable-certificates", "enable-components", "enable-integrity-monitoring", "enable-network-egress-metering", "enable-node-autoprovisioning", "enable-private-endpoint", "enable-private-nodes", "enable-secure-boot", "enabled", "etag", "evaluation-mode", "event-type", "filter", "gce-persistent-disk-csi-driver-config", "gcp-filestore-csi-driver-config", "gcp-public-cidrs-access-enabled", "gke-backup-agent-config", "horizontal-pod-autoscaling", "http-load-balancing", "image-type", "key-name", "kubernetes-dashboard", "load-balancer-type", "location-policy", "managed-prometheus-config", "management", "master-global-access-config", "master-ipv4-cidr-block", "max-node-count", "max-surge", "max-unavailable", "min-cpu-platform", "min-node-count", "name", "network-policy-config", "node-pool-soak-duration", "oauth-scopes", "peering-name", "private-endpoint", "private-endpoint-subnetwork", "project-id", "public-endpoint", "pubsub", "security-group", "service-account", "shielded-instance-config", "standard-rollout-policy", "state", "strategy", "tags", "topic", "total-max-node-count", "total-min-node-count", "update", "upgrade-options", "upgrade-settings", "variant", "variant-config", "workload-pool", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5688,6 +5961,9 @@ where ("locations-clusters-list", Some(opt)) => { call_result = self._projects_locations_clusters_list(opt, dry_run, &mut err).await; }, + ("locations-clusters-node-pools-complete-upgrade", Some(opt)) => { + call_result = self._projects_locations_clusters_node_pools_complete_upgrade(opt, dry_run, &mut err).await; + }, ("locations-clusters-node-pools-create", Some(opt)) => { call_result = self._projects_locations_clusters_node_pools_create(opt, dry_run, &mut err).await; }, @@ -5935,7 +6211,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'aggregated-usable-subnetworks-list', 'locations-clusters-complete-ip-rotation', 'locations-clusters-create', 'locations-clusters-delete', 'locations-clusters-get', 'locations-clusters-get-jwks', 'locations-clusters-list', 'locations-clusters-node-pools-create', 'locations-clusters-node-pools-delete', 'locations-clusters-node-pools-get', 'locations-clusters-node-pools-list', 'locations-clusters-node-pools-rollback', 'locations-clusters-node-pools-set-autoscaling', 'locations-clusters-node-pools-set-management', 'locations-clusters-node-pools-set-size', 'locations-clusters-node-pools-update', 'locations-clusters-set-addons', 'locations-clusters-set-legacy-abac', 'locations-clusters-set-locations', 'locations-clusters-set-logging', 'locations-clusters-set-maintenance-policy', 'locations-clusters-set-master-auth', 'locations-clusters-set-monitoring', 'locations-clusters-set-network-policy', 'locations-clusters-set-resource-labels', 'locations-clusters-start-ip-rotation', 'locations-clusters-update', 'locations-clusters-update-master', 'locations-clusters-well-known-get-openid-configuration', 'locations-get-server-config', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'zones-clusters-addons', 'zones-clusters-complete-ip-rotation', 'zones-clusters-create', 'zones-clusters-delete', 'zones-clusters-get', 'zones-clusters-legacy-abac', 'zones-clusters-list', 'zones-clusters-locations', 'zones-clusters-logging', 'zones-clusters-master', 'zones-clusters-monitoring', 'zones-clusters-node-pools-autoscaling', 'zones-clusters-node-pools-create', 'zones-clusters-node-pools-delete', 'zones-clusters-node-pools-get', 'zones-clusters-node-pools-list', 'zones-clusters-node-pools-rollback', 'zones-clusters-node-pools-set-management', 'zones-clusters-node-pools-set-size', 'zones-clusters-node-pools-update', 'zones-clusters-resource-labels', 'zones-clusters-set-maintenance-policy', 'zones-clusters-set-master-auth', 'zones-clusters-set-network-policy', 'zones-clusters-start-ip-rotation', 'zones-clusters-update', 'zones-get-serverconfig', 'zones-operations-cancel', 'zones-operations-get' and 'zones-operations-list'", vec![ + ("projects", "methods: 'aggregated-usable-subnetworks-list', 'locations-clusters-complete-ip-rotation', 'locations-clusters-create', 'locations-clusters-delete', 'locations-clusters-get', 'locations-clusters-get-jwks', 'locations-clusters-list', 'locations-clusters-node-pools-complete-upgrade', 'locations-clusters-node-pools-create', 'locations-clusters-node-pools-delete', 'locations-clusters-node-pools-get', 'locations-clusters-node-pools-list', 'locations-clusters-node-pools-rollback', 'locations-clusters-node-pools-set-autoscaling', 'locations-clusters-node-pools-set-management', 'locations-clusters-node-pools-set-size', 'locations-clusters-node-pools-update', 'locations-clusters-set-addons', 'locations-clusters-set-legacy-abac', 'locations-clusters-set-locations', 'locations-clusters-set-logging', 'locations-clusters-set-maintenance-policy', 'locations-clusters-set-master-auth', 'locations-clusters-set-monitoring', 'locations-clusters-set-network-policy', 'locations-clusters-set-resource-labels', 'locations-clusters-start-ip-rotation', 'locations-clusters-update', 'locations-clusters-update-master', 'locations-clusters-well-known-get-openid-configuration', 'locations-get-server-config', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'zones-clusters-addons', 'zones-clusters-complete-ip-rotation', 'zones-clusters-create', 'zones-clusters-delete', 'zones-clusters-get', 'zones-clusters-legacy-abac', 'zones-clusters-list', 'zones-clusters-locations', 'zones-clusters-logging', 'zones-clusters-master', 'zones-clusters-monitoring', 'zones-clusters-node-pools-autoscaling', 'zones-clusters-node-pools-create', 'zones-clusters-node-pools-delete', 'zones-clusters-node-pools-get', 'zones-clusters-node-pools-list', 'zones-clusters-node-pools-rollback', 'zones-clusters-node-pools-set-management', 'zones-clusters-node-pools-set-size', 'zones-clusters-node-pools-update', 'zones-clusters-resource-labels', 'zones-clusters-set-maintenance-policy', 'zones-clusters-set-master-auth', 'zones-clusters-set-network-policy', 'zones-clusters-start-ip-rotation', 'zones-clusters-update', 'zones-get-serverconfig', 'zones-operations-cancel', 'zones-operations-get' and 'zones-operations-list'", vec![ ("aggregated-usable-subnetworks-list", Some(r##"Lists subnetworks that are usable for creating clusters in a project."##), "Details at http://byron.github.io/google-apis-rs/google_container1_cli/projects_aggregated-usable-subnetworks-list", @@ -6096,6 +6372,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-clusters-node-pools-complete-upgrade", + Some(r##"CompleteNodePoolUpgrade will signal an on-going node pool upgrade to complete."##), + "Details at http://byron.github.io/google-apis-rs/google_container1_cli/projects_locations-clusters-node-pools-complete-upgrade", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name (project, location, cluster, node pool id) of the node pool to complete upgrade. Specified in the format `projects/*/locations/*/clusters/*/nodePools/*`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -6794,7 +7098,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -6834,7 +7138,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -6874,7 +7178,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the parent field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the parent field."##), Some(true), Some(false)), @@ -6908,7 +7212,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -6942,7 +7246,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -6976,7 +7280,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7016,7 +7320,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the parent field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the parent field."##), Some(true), Some(false)), @@ -7044,7 +7348,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7084,7 +7388,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7124,7 +7428,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7164,7 +7468,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7204,7 +7508,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7250,7 +7554,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the parent field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the parent field."##), Some(true), Some(false)), @@ -7290,7 +7594,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7330,7 +7634,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7370,7 +7674,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the parent field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the parent field."##), Some(true), Some(false)), @@ -7404,7 +7708,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7450,7 +7754,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7496,7 +7800,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7542,7 +7846,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7588,7 +7892,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7628,7 +7932,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Required. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840)."##), + Some(r##"Required. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects)."##), Some(true), Some(false)), @@ -7668,7 +7972,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7708,7 +8012,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7748,7 +8052,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://developers.google.com/console/help/new/#projectnumber). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7788,7 +8092,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7828,7 +8132,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7856,7 +8160,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7896,7 +8200,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the name field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the name field."##), Some(true), Some(false)), @@ -7930,7 +8234,7 @@ async fn main() { vec![ (Some(r##"project-id"##), None, - Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://support.google.com/cloud/answer/6158840). This field has been deprecated and replaced by the parent field."##), + Some(r##"Deprecated. The Google Developers Console [project ID or project number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). This field has been deprecated and replaced by the parent field."##), Some(true), Some(false)), @@ -7958,7 +8262,7 @@ async fn main() { let mut app = App::new("container1") .author("Sebastian Thiel ") - .version("4.0.1+20220215") + .version("5.0.2+20230104") .about("Builds and manages container-based applications, powered by the open source Kubernetes technology.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_container1_cli") .arg(Arg::with_name("url") diff --git a/gen/container1/Cargo.toml b/gen/container1/Cargo.toml index 10ea1a0596..cad0ea692d 100644 --- a/gen/container1/Cargo.toml +++ b/gen/container1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-container1" -version = "5.0.2-beta-1+20230104" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Container (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/container1" homepage = "https://cloud.google.com/container-engine/" -documentation = "https://docs.rs/google-container1/5.0.2-beta-1+20230104" +documentation = "https://docs.rs/google-container1/5.0.2+20230104" license = "MIT" keywords = ["container", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/container1/README.md b/gen/container1/README.md index b99728ca35..fe13eab523 100644 --- a/gen/container1/README.md +++ b/gen/container1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-container1` library allows access to all features of the *Google Container* service. -This documentation was generated from *Container* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *container:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Container* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *container:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Container* *v1* API can be found at the [official documentation site](https://cloud.google.com/container-engine/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/Container) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-container1/5.0.2+20230104/google_container1/Container) ... * projects - * [*aggregated usable subnetworks list*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectAggregatedUsableSubnetworkListCall), [*locations clusters complete ip rotation*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterCompleteIpRotationCall), [*locations clusters create*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterCreateCall), [*locations clusters delete*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterDeleteCall), [*locations clusters get*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterGetCall), [*locations clusters get jwks*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterGetJwkCall), [*locations clusters list*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterListCall), [*locations clusters node pools complete upgrade*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolCompleteUpgradeCall), [*locations clusters node pools create*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolCreateCall), [*locations clusters node pools delete*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolDeleteCall), [*locations clusters node pools get*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolGetCall), [*locations clusters node pools list*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolListCall), [*locations clusters node pools rollback*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolRollbackCall), [*locations clusters node pools set autoscaling*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolSetAutoscalingCall), [*locations clusters node pools set management*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolSetManagementCall), [*locations clusters node pools set size*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolSetSizeCall), [*locations clusters node pools update*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterNodePoolUpdateCall), [*locations clusters set addons*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetAddonCall), [*locations clusters set legacy abac*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetLegacyAbacCall), [*locations clusters set locations*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetLocationCall), [*locations clusters set logging*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetLoggingCall), [*locations clusters set maintenance policy*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetMaintenancePolicyCall), [*locations clusters set master auth*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetMasterAuthCall), [*locations clusters set monitoring*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetMonitoringCall), [*locations clusters set network policy*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetNetworkPolicyCall), [*locations clusters set resource labels*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterSetResourceLabelCall), [*locations clusters start ip rotation*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterStartIpRotationCall), [*locations clusters update*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterUpdateCall), [*locations clusters update master*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterUpdateMasterCall), [*locations clusters well-known get openid-configuration*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationClusterWellKnownGetOpenidConfigurationCall), [*locations get server config*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationGetServerConfigCall), [*locations operations cancel*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectLocationOperationListCall), [*zones clusters addons*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterAddonCall), [*zones clusters complete ip rotation*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterCompleteIpRotationCall), [*zones clusters create*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterCreateCall), [*zones clusters delete*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterDeleteCall), [*zones clusters get*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterGetCall), [*zones clusters legacy abac*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterLegacyAbacCall), [*zones clusters list*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterListCall), [*zones clusters locations*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterLocationCall), [*zones clusters logging*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterLoggingCall), [*zones clusters master*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterMasterCall), [*zones clusters monitoring*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterMonitoringCall), [*zones clusters node pools autoscaling*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolAutoscalingCall), [*zones clusters node pools create*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolCreateCall), [*zones clusters node pools delete*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolDeleteCall), [*zones clusters node pools get*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolGetCall), [*zones clusters node pools list*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolListCall), [*zones clusters node pools rollback*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolRollbackCall), [*zones clusters node pools set management*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolSetManagementCall), [*zones clusters node pools set size*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolSetSizeCall), [*zones clusters node pools update*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterNodePoolUpdateCall), [*zones clusters resource labels*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterResourceLabelCall), [*zones clusters set maintenance policy*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterSetMaintenancePolicyCall), [*zones clusters set master auth*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterSetMasterAuthCall), [*zones clusters set network policy*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterSetNetworkPolicyCall), [*zones clusters start ip rotation*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterStartIpRotationCall), [*zones clusters update*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneClusterUpdateCall), [*zones get serverconfig*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneGetServerconfigCall), [*zones operations cancel*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneOperationCancelCall), [*zones operations get*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneOperationGetCall) and [*zones operations list*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/api::ProjectZoneOperationListCall) + * [*aggregated usable subnetworks list*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectAggregatedUsableSubnetworkListCall), [*locations clusters complete ip rotation*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterCompleteIpRotationCall), [*locations clusters create*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterCreateCall), [*locations clusters delete*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterDeleteCall), [*locations clusters get*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterGetCall), [*locations clusters get jwks*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterGetJwkCall), [*locations clusters list*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterListCall), [*locations clusters node pools complete upgrade*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolCompleteUpgradeCall), [*locations clusters node pools create*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolCreateCall), [*locations clusters node pools delete*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolDeleteCall), [*locations clusters node pools get*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolGetCall), [*locations clusters node pools list*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolListCall), [*locations clusters node pools rollback*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolRollbackCall), [*locations clusters node pools set autoscaling*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolSetAutoscalingCall), [*locations clusters node pools set management*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolSetManagementCall), [*locations clusters node pools set size*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolSetSizeCall), [*locations clusters node pools update*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterNodePoolUpdateCall), [*locations clusters set addons*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetAddonCall), [*locations clusters set legacy abac*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetLegacyAbacCall), [*locations clusters set locations*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetLocationCall), [*locations clusters set logging*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetLoggingCall), [*locations clusters set maintenance policy*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetMaintenancePolicyCall), [*locations clusters set master auth*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetMasterAuthCall), [*locations clusters set monitoring*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetMonitoringCall), [*locations clusters set network policy*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetNetworkPolicyCall), [*locations clusters set resource labels*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterSetResourceLabelCall), [*locations clusters start ip rotation*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterStartIpRotationCall), [*locations clusters update*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterUpdateCall), [*locations clusters update master*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterUpdateMasterCall), [*locations clusters well-known get openid-configuration*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationClusterWellKnownGetOpenidConfigurationCall), [*locations get server config*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationGetServerConfigCall), [*locations operations cancel*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectLocationOperationListCall), [*zones clusters addons*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterAddonCall), [*zones clusters complete ip rotation*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterCompleteIpRotationCall), [*zones clusters create*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterCreateCall), [*zones clusters delete*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterDeleteCall), [*zones clusters get*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterGetCall), [*zones clusters legacy abac*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterLegacyAbacCall), [*zones clusters list*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterListCall), [*zones clusters locations*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterLocationCall), [*zones clusters logging*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterLoggingCall), [*zones clusters master*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterMasterCall), [*zones clusters monitoring*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterMonitoringCall), [*zones clusters node pools autoscaling*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolAutoscalingCall), [*zones clusters node pools create*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolCreateCall), [*zones clusters node pools delete*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolDeleteCall), [*zones clusters node pools get*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolGetCall), [*zones clusters node pools list*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolListCall), [*zones clusters node pools rollback*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolRollbackCall), [*zones clusters node pools set management*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolSetManagementCall), [*zones clusters node pools set size*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolSetSizeCall), [*zones clusters node pools update*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterNodePoolUpdateCall), [*zones clusters resource labels*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterResourceLabelCall), [*zones clusters set maintenance policy*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterSetMaintenancePolicyCall), [*zones clusters set master auth*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterSetMasterAuthCall), [*zones clusters set network policy*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterSetNetworkPolicyCall), [*zones clusters start ip rotation*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterStartIpRotationCall), [*zones clusters update*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneClusterUpdateCall), [*zones get serverconfig*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneGetServerconfigCall), [*zones operations cancel*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneOperationCancelCall), [*zones operations get*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneOperationGetCall) and [*zones operations list*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/api::ProjectZoneOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/Container)** +* **[Hub](https://docs.rs/google-container1/5.0.2+20230104/google_container1/Container)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::CallBuilder) -* **[Resources](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::CallBuilder) +* **[Resources](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Part)** + * **[Parts](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -167,17 +167,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -187,29 +187,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Delegate) to the -[Method Builder](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Delegate) to the +[Method Builder](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::RequestValue) and -[decodable](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::RequestValue) and +[decodable](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-container1/5.0.2-beta-1+20230104/google_container1/client::RequestValue) are moved +* [request values](https://docs.rs/google-container1/5.0.2+20230104/google_container1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/container1/src/api.rs b/gen/container1/src/api.rs index f1330a3123..33e8bf6d0d 100644 --- a/gen/container1/src/api.rs +++ b/gen/container1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> Container { Container { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://container.googleapis.com/".to_string(), _root_url: "https://container.googleapis.com/".to_string(), } @@ -134,7 +134,7 @@ impl<'a, S> Container { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/container1/src/client.rs b/gen/container1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/container1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/container1/src/lib.rs b/gen/container1/src/lib.rs index 06bbaec653..3213af84f7 100644 --- a/gen/container1/src/lib.rs +++ b/gen/container1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Container* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *container:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Container* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *container:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Container* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/container-engine/). diff --git a/gen/containeranalysis1-cli/Cargo.toml b/gen/containeranalysis1-cli/Cargo.toml index 696d07b1bd..114fe357f7 100644 --- a/gen/containeranalysis1-cli/Cargo.toml +++ b/gen/containeranalysis1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-containeranalysis1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Container Analysis (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/containeranalysis1-cli" @@ -20,13 +20,13 @@ name = "containeranalysis1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-containeranalysis1] path = "../containeranalysis1" -version = "4.0.1+20220225" +version = "5.0.2+20230106" + diff --git a/gen/containeranalysis1-cli/README.md b/gen/containeranalysis1-cli/README.md index b6bd09a5e5..2645603f29 100644 --- a/gen/containeranalysis1-cli/README.md +++ b/gen/containeranalysis1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Container Analysis* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Container Analysis* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash containeranalysis1 [options] diff --git a/gen/containeranalysis1-cli/mkdocs.yml b/gen/containeranalysis1-cli/mkdocs.yml index fc1becbf5a..ba04c32538 100644 --- a/gen/containeranalysis1-cli/mkdocs.yml +++ b/gen/containeranalysis1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Container Analysis v4.0.1+20220225 +site_name: Container Analysis v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-containeranalysis1-cli site_description: A complete library to interact with Container Analysis (protocol v1) @@ -7,29 +7,30 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/containeranalysi docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_notes-batch-create.md', 'Projects', 'Notes Batch Create'] -- ['projects_notes-create.md', 'Projects', 'Notes Create'] -- ['projects_notes-delete.md', 'Projects', 'Notes Delete'] -- ['projects_notes-get.md', 'Projects', 'Notes Get'] -- ['projects_notes-get-iam-policy.md', 'Projects', 'Notes Get Iam Policy'] -- ['projects_notes-list.md', 'Projects', 'Notes List'] -- ['projects_notes-occurrences-list.md', 'Projects', 'Notes Occurrences List'] -- ['projects_notes-patch.md', 'Projects', 'Notes Patch'] -- ['projects_notes-set-iam-policy.md', 'Projects', 'Notes Set Iam Policy'] -- ['projects_notes-test-iam-permissions.md', 'Projects', 'Notes Test Iam Permissions'] -- ['projects_occurrences-batch-create.md', 'Projects', 'Occurrences Batch Create'] -- ['projects_occurrences-create.md', 'Projects', 'Occurrences Create'] -- ['projects_occurrences-delete.md', 'Projects', 'Occurrences Delete'] -- ['projects_occurrences-get.md', 'Projects', 'Occurrences Get'] -- ['projects_occurrences-get-iam-policy.md', 'Projects', 'Occurrences Get Iam Policy'] -- ['projects_occurrences-get-notes.md', 'Projects', 'Occurrences Get Notes'] -- ['projects_occurrences-get-vulnerability-summary.md', 'Projects', 'Occurrences Get Vulnerability Summary'] -- ['projects_occurrences-list.md', 'Projects', 'Occurrences List'] -- ['projects_occurrences-patch.md', 'Projects', 'Occurrences Patch'] -- ['projects_occurrences-set-iam-policy.md', 'Projects', 'Occurrences Set Iam Policy'] -- ['projects_occurrences-test-iam-permissions.md', 'Projects', 'Occurrences Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Notes Batch Create': 'projects_notes-batch-create.md' + - 'Notes Create': 'projects_notes-create.md' + - 'Notes Delete': 'projects_notes-delete.md' + - 'Notes Get': 'projects_notes-get.md' + - 'Notes Get Iam Policy': 'projects_notes-get-iam-policy.md' + - 'Notes List': 'projects_notes-list.md' + - 'Notes Occurrences List': 'projects_notes-occurrences-list.md' + - 'Notes Patch': 'projects_notes-patch.md' + - 'Notes Set Iam Policy': 'projects_notes-set-iam-policy.md' + - 'Notes Test Iam Permissions': 'projects_notes-test-iam-permissions.md' + - 'Occurrences Batch Create': 'projects_occurrences-batch-create.md' + - 'Occurrences Create': 'projects_occurrences-create.md' + - 'Occurrences Delete': 'projects_occurrences-delete.md' + - 'Occurrences Get': 'projects_occurrences-get.md' + - 'Occurrences Get Iam Policy': 'projects_occurrences-get-iam-policy.md' + - 'Occurrences Get Notes': 'projects_occurrences-get-notes.md' + - 'Occurrences Get Vulnerability Summary': 'projects_occurrences-get-vulnerability-summary.md' + - 'Occurrences List': 'projects_occurrences-list.md' + - 'Occurrences Patch': 'projects_occurrences-patch.md' + - 'Occurrences Set Iam Policy': 'projects_occurrences-set-iam-policy.md' + - 'Occurrences Test Iam Permissions': 'projects_occurrences-test-iam-permissions.md' theme: readthedocs diff --git a/gen/containeranalysis1-cli/src/client.rs b/gen/containeranalysis1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/containeranalysis1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/containeranalysis1-cli/src/main.rs b/gen/containeranalysis1-cli/src/main.rs index 5475c4a376..e1a9a68591 100644 --- a/gen/containeranalysis1-cli/src/main.rs +++ b/gen/containeranalysis1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_containeranalysis1::{api, Error, oauth2}; +use google_containeranalysis1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -179,7 +178,21 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "long-description" => Some(("longDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.architecture" => Some(("package.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.cpe-uri" => Some(("package.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.description" => Some(("package.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.comments" => Some(("package.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.expression" => Some(("package.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.maintainer" => Some(("package.maintainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package.name" => Some(("package.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.package-type" => Some(("package.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.url" => Some(("package.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.epoch" => Some(("package.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "package.version.full-name" => Some(("package.version.fullName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.inclusive" => Some(("package.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package.version.kind" => Some(("package.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.name" => Some(("package.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.revision" => Some(("package.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "related-note-names" => Some(("relatedNoteNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "short-description" => Some(("shortDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -209,10 +222,11 @@ where "vulnerability.cvss-v3.privileges-required" => Some(("vulnerability.cvssV3.privilegesRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.scope" => Some(("vulnerability.cvssV3.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.user-interaction" => Some(("vulnerability.cvssV3.userInteraction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.severity" => Some(("vulnerability.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.source-update-time" => Some(("vulnerability.sourceUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "attack-complexity", "attack-vector", "attestation", "availability-impact", "base-score", "build", "builder-version", "cis-benchmark", "compliance", "confidentiality-impact", "create-time", "cvss-score", "cvss-v3", "deployment", "description", "discovery", "dsse-attestation", "epoch", "expiration-time", "exploitability-score", "fingerprint", "full-name", "hint", "human-readable-name", "identity", "image", "impact-score", "inclusive", "integrity-impact", "kb-article-ids", "kind", "last-published-timestamp", "long-description", "name", "package", "privileges-required", "profile-level", "rationale", "related-note-names", "remediation", "resource-uri", "resource-url", "revision", "scan-instructions", "scope", "severity", "short-description", "source-update-time", "support-url", "title", "update-id", "update-time", "upgrade", "user-interaction", "v1-name", "v2-blob", "v2-name", "version", "vulnerability", "windows-update"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "architecture", "attack-complexity", "attack-vector", "attestation", "availability-impact", "base-score", "build", "builder-version", "cis-benchmark", "comments", "compliance", "confidentiality-impact", "cpe-uri", "create-time", "cvss-score", "cvss-v3", "cvss-version", "deployment", "description", "discovery", "dsse-attestation", "epoch", "expiration-time", "exploitability-score", "expression", "fingerprint", "full-name", "hint", "human-readable-name", "identity", "image", "impact-score", "inclusive", "integrity-impact", "kb-article-ids", "kind", "last-published-timestamp", "license", "long-description", "maintainer", "name", "package", "package-type", "privileges-required", "profile-level", "rationale", "related-note-names", "remediation", "resource-uri", "resource-url", "revision", "scan-instructions", "scope", "severity", "short-description", "source-update-time", "support-url", "title", "update-id", "update-time", "upgrade", "url", "user-interaction", "v1-name", "v2-blob", "v2-name", "version", "vulnerability", "windows-update"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -475,7 +489,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -537,7 +551,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -633,7 +647,21 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "long-description" => Some(("longDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.architecture" => Some(("package.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.cpe-uri" => Some(("package.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.description" => Some(("package.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.comments" => Some(("package.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.expression" => Some(("package.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.maintainer" => Some(("package.maintainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package.name" => Some(("package.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.package-type" => Some(("package.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.url" => Some(("package.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.epoch" => Some(("package.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "package.version.full-name" => Some(("package.version.fullName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.inclusive" => Some(("package.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package.version.kind" => Some(("package.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.name" => Some(("package.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.revision" => Some(("package.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "related-note-names" => Some(("relatedNoteNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "short-description" => Some(("shortDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -663,10 +691,11 @@ where "vulnerability.cvss-v3.privileges-required" => Some(("vulnerability.cvssV3.privilegesRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.scope" => Some(("vulnerability.cvssV3.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.user-interaction" => Some(("vulnerability.cvssV3.userInteraction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.severity" => Some(("vulnerability.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.source-update-time" => Some(("vulnerability.sourceUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "attack-complexity", "attack-vector", "attestation", "availability-impact", "base-score", "build", "builder-version", "cis-benchmark", "compliance", "confidentiality-impact", "create-time", "cvss-score", "cvss-v3", "deployment", "description", "discovery", "dsse-attestation", "epoch", "expiration-time", "exploitability-score", "fingerprint", "full-name", "hint", "human-readable-name", "identity", "image", "impact-score", "inclusive", "integrity-impact", "kb-article-ids", "kind", "last-published-timestamp", "long-description", "name", "package", "privileges-required", "profile-level", "rationale", "related-note-names", "remediation", "resource-uri", "resource-url", "revision", "scan-instructions", "scope", "severity", "short-description", "source-update-time", "support-url", "title", "update-id", "update-time", "upgrade", "user-interaction", "v1-name", "v2-blob", "v2-name", "version", "vulnerability", "windows-update"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "architecture", "attack-complexity", "attack-vector", "attestation", "availability-impact", "base-score", "build", "builder-version", "cis-benchmark", "comments", "compliance", "confidentiality-impact", "cpe-uri", "create-time", "cvss-score", "cvss-v3", "cvss-version", "deployment", "description", "discovery", "dsse-attestation", "epoch", "expiration-time", "exploitability-score", "expression", "fingerprint", "full-name", "hint", "human-readable-name", "identity", "image", "impact-score", "inclusive", "integrity-impact", "kb-article-ids", "kind", "last-published-timestamp", "license", "long-description", "maintainer", "name", "package", "package-type", "privileges-required", "profile-level", "rationale", "related-note-names", "remediation", "resource-uri", "resource-url", "revision", "scan-instructions", "scope", "severity", "short-description", "source-update-time", "support-url", "title", "update-id", "update-time", "upgrade", "url", "user-interaction", "v1-name", "v2-blob", "v2-name", "version", "vulnerability", "windows-update"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -681,7 +710,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1046,6 +1075,18 @@ where "build.intoto-statement.slsa-provenance.recipe.defined-in-material" => Some(("build.intotoStatement.slsaProvenance.recipe.definedInMaterial", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "build.intoto-statement.slsa-provenance.recipe.entry-point" => Some(("build.intotoStatement.slsaProvenance.recipe.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "build.intoto-statement.slsa-provenance.recipe.type" => Some(("build.intotoStatement.slsaProvenance.recipe.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.build-type" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.buildType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.builder.id" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.builder.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.invocation.config-source.digest" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.invocation.configSource.digest", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "build.intoto-statement.slsa-provenance-zero-two.invocation.config-source.entry-point" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.invocation.configSource.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.invocation.config-source.uri" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.invocation.configSource.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.build-finished-on" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.buildFinishedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.build-invocation-id" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.buildInvocationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.build-started-on" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.buildStartedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.completeness.environment" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.completeness.environment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.completeness.materials" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.completeness.materials", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.completeness.parameters" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.completeness.parameters", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.reproducible" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.reproducible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "build.provenance.build-options" => Some(("build.provenance.buildOptions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "build.provenance.builder-version" => Some(("build.provenance.builderVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "build.provenance.create-time" => Some(("build.provenance.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1081,6 +1122,7 @@ where "deployment.resource-uri" => Some(("deployment.resourceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "deployment.undeploy-time" => Some(("deployment.undeployTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deployment.user-email" => Some(("deployment.userEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "discovery.analysis-completed.analysis-type" => Some(("discovery.analysisCompleted.analysisType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "discovery.analysis-status" => Some(("discovery.analysisStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovery.analysis-status-error.code" => Some(("discovery.analysisStatusError.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "discovery.analysis-status-error.message" => Some(("discovery.analysisStatusError.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1115,6 +1157,18 @@ where "dsse-attestation.statement.slsa-provenance.recipe.defined-in-material" => Some(("dsseAttestation.statement.slsaProvenance.recipe.definedInMaterial", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "dsse-attestation.statement.slsa-provenance.recipe.entry-point" => Some(("dsseAttestation.statement.slsaProvenance.recipe.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "dsse-attestation.statement.slsa-provenance.recipe.type" => Some(("dsseAttestation.statement.slsaProvenance.recipe.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.build-type" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.buildType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.builder.id" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.builder.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.invocation.config-source.digest" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.invocation.configSource.digest", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "dsse-attestation.statement.slsa-provenance-zero-two.invocation.config-source.entry-point" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.invocation.configSource.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.invocation.config-source.uri" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.invocation.configSource.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.build-finished-on" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.buildFinishedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.build-invocation-id" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.buildInvocationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.build-started-on" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.buildStartedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.completeness.environment" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.completeness.environment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.completeness.materials" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.completeness.materials", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.completeness.parameters" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.completeness.parameters", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.reproducible" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.reproducible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "envelope.payload" => Some(("envelope.payload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "envelope.payload-type" => Some(("envelope.payloadType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "image.base-resource-url" => Some(("image.baseResourceUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1125,7 +1179,18 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note-name" => Some(("noteName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.architecture" => Some(("package.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.cpe-uri" => Some(("package.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.comments" => Some(("package.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.expression" => Some(("package.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package.name" => Some(("package.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.package-type" => Some(("package.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.epoch" => Some(("package.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "package.version.full-name" => Some(("package.version.fullName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.inclusive" => Some(("package.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package.version.kind" => Some(("package.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.name" => Some(("package.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.revision" => Some(("package.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "remediation" => Some(("remediation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-uri" => Some(("resourceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1148,6 +1213,7 @@ where "upgrade.windows-update.support-url" => Some(("upgrade.windowsUpdate.supportUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "upgrade.windows-update.title" => Some(("upgrade.windowsUpdate.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-score" => Some(("vulnerability.cvssScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvssv3.attack-complexity" => Some(("vulnerability.cvssv3.attackComplexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvssv3.attack-vector" => Some(("vulnerability.cvssv3.attackVector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvssv3.authentication" => Some(("vulnerability.cvssv3.authentication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1167,7 +1233,7 @@ where "vulnerability.short-description" => Some(("vulnerability.shortDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.type" => Some(("vulnerability.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["-type", "address", "alias-context", "analysis-status", "analysis-status-error", "archive-time", "arguments", "artifact-storage-source-uri", "attack-complexity", "attack-vector", "attestation", "authentication", "availability-impact", "base-resource-url", "base-score", "build", "build-finished-on", "build-invocation-id", "build-options", "build-started-on", "builder", "builder-config", "builder-version", "classification", "cloud-repo", "code", "completeness", "compliance", "confidentiality-impact", "config", "context", "continuous-analysis", "cpe", "cpe-uri", "create-time", "creator", "cve", "cvss-score", "cvssv3", "defined-in-material", "deploy-time", "deployment", "description", "discovery", "distance", "distribution", "dsse-attestation", "effective-severity", "end-time", "entry-point", "envelope", "environment", "epoch", "exploitability-score", "fingerprint", "fix-available", "full-name", "gerrit", "gerrit-project", "git", "host-uri", "id", "identity", "image", "impact-score", "inclusive", "integrity-impact", "intoto-provenance", "intoto-statement", "kb-article-ids", "kind", "labels", "last-published-timestamp", "last-scan-time", "logs-uri", "long-description", "materials", "message", "metadata", "name", "non-compliance-reason", "note-name", "package", "parsed-version", "payload", "payload-type", "platform", "predicate-type", "privileges-required", "project-id", "project-repo-id", "provenance", "provenance-bytes", "recipe", "remediation", "repo-id", "repo-name", "reproducible", "resource-uri", "revision", "revision-id", "scope", "serialized-payload", "severity", "short-description", "slsa-provenance", "source-provenance", "start-time", "statement", "support-url", "title", "trigger-id", "type", "uid", "undeploy-time", "update-id", "update-time", "upgrade", "url", "user-email", "user-interaction", "v1-name", "v2-blob", "v2-name", "vulnerability", "windows-update"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["-type", "address", "alias-context", "analysis-completed", "analysis-status", "analysis-status-error", "analysis-type", "architecture", "archive-time", "arguments", "artifact-storage-source-uri", "attack-complexity", "attack-vector", "attestation", "authentication", "availability-impact", "base-resource-url", "base-score", "build", "build-finished-on", "build-invocation-id", "build-options", "build-started-on", "build-type", "builder", "builder-config", "builder-version", "classification", "cloud-repo", "code", "comments", "completeness", "compliance", "confidentiality-impact", "config", "config-source", "context", "continuous-analysis", "cpe", "cpe-uri", "create-time", "creator", "cve", "cvss-score", "cvss-version", "cvssv3", "defined-in-material", "deploy-time", "deployment", "description", "digest", "discovery", "distance", "distribution", "dsse-attestation", "effective-severity", "end-time", "entry-point", "envelope", "environment", "epoch", "exploitability-score", "expression", "fingerprint", "fix-available", "full-name", "gerrit", "gerrit-project", "git", "host-uri", "id", "identity", "image", "impact-score", "inclusive", "integrity-impact", "intoto-provenance", "intoto-statement", "invocation", "kb-article-ids", "kind", "labels", "last-published-timestamp", "last-scan-time", "license", "logs-uri", "long-description", "materials", "message", "metadata", "name", "non-compliance-reason", "note-name", "package", "package-type", "parameters", "parsed-version", "payload", "payload-type", "platform", "predicate-type", "privileges-required", "project-id", "project-repo-id", "provenance", "provenance-bytes", "recipe", "remediation", "repo-id", "repo-name", "reproducible", "resource-uri", "revision", "revision-id", "scope", "serialized-payload", "severity", "short-description", "slsa-provenance", "slsa-provenance-zero-two", "source-provenance", "start-time", "statement", "support-url", "title", "trigger-id", "type", "uid", "undeploy-time", "update-id", "update-time", "upgrade", "uri", "url", "user-email", "user-interaction", "v1-name", "v2-blob", "v2-name", "version", "vulnerability", "windows-update"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1534,7 +1600,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1647,6 +1713,18 @@ where "build.intoto-statement.slsa-provenance.recipe.defined-in-material" => Some(("build.intotoStatement.slsaProvenance.recipe.definedInMaterial", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "build.intoto-statement.slsa-provenance.recipe.entry-point" => Some(("build.intotoStatement.slsaProvenance.recipe.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "build.intoto-statement.slsa-provenance.recipe.type" => Some(("build.intotoStatement.slsaProvenance.recipe.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.build-type" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.buildType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.builder.id" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.builder.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.invocation.config-source.digest" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.invocation.configSource.digest", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "build.intoto-statement.slsa-provenance-zero-two.invocation.config-source.entry-point" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.invocation.configSource.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.invocation.config-source.uri" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.invocation.configSource.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.build-finished-on" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.buildFinishedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.build-invocation-id" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.buildInvocationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.build-started-on" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.buildStartedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.completeness.environment" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.completeness.environment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.completeness.materials" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.completeness.materials", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.completeness.parameters" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.completeness.parameters", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "build.intoto-statement.slsa-provenance-zero-two.metadata.reproducible" => Some(("build.intotoStatement.slsaProvenanceZeroTwo.metadata.reproducible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "build.provenance.build-options" => Some(("build.provenance.buildOptions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "build.provenance.builder-version" => Some(("build.provenance.builderVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "build.provenance.create-time" => Some(("build.provenance.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1682,6 +1760,7 @@ where "deployment.resource-uri" => Some(("deployment.resourceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "deployment.undeploy-time" => Some(("deployment.undeployTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deployment.user-email" => Some(("deployment.userEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "discovery.analysis-completed.analysis-type" => Some(("discovery.analysisCompleted.analysisType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "discovery.analysis-status" => Some(("discovery.analysisStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovery.analysis-status-error.code" => Some(("discovery.analysisStatusError.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "discovery.analysis-status-error.message" => Some(("discovery.analysisStatusError.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1716,6 +1795,18 @@ where "dsse-attestation.statement.slsa-provenance.recipe.defined-in-material" => Some(("dsseAttestation.statement.slsaProvenance.recipe.definedInMaterial", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "dsse-attestation.statement.slsa-provenance.recipe.entry-point" => Some(("dsseAttestation.statement.slsaProvenance.recipe.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "dsse-attestation.statement.slsa-provenance.recipe.type" => Some(("dsseAttestation.statement.slsaProvenance.recipe.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.build-type" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.buildType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.builder.id" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.builder.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.invocation.config-source.digest" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.invocation.configSource.digest", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "dsse-attestation.statement.slsa-provenance-zero-two.invocation.config-source.entry-point" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.invocation.configSource.entryPoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.invocation.config-source.uri" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.invocation.configSource.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.build-finished-on" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.buildFinishedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.build-invocation-id" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.buildInvocationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.build-started-on" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.buildStartedOn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.completeness.environment" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.completeness.environment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.completeness.materials" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.completeness.materials", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.completeness.parameters" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.completeness.parameters", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dsse-attestation.statement.slsa-provenance-zero-two.metadata.reproducible" => Some(("dsseAttestation.statement.slsaProvenanceZeroTwo.metadata.reproducible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "envelope.payload" => Some(("envelope.payload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "envelope.payload-type" => Some(("envelope.payloadType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "image.base-resource-url" => Some(("image.baseResourceUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1726,7 +1817,18 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note-name" => Some(("noteName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.architecture" => Some(("package.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.cpe-uri" => Some(("package.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.comments" => Some(("package.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.expression" => Some(("package.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package.name" => Some(("package.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.package-type" => Some(("package.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.epoch" => Some(("package.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "package.version.full-name" => Some(("package.version.fullName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.inclusive" => Some(("package.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package.version.kind" => Some(("package.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.name" => Some(("package.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.revision" => Some(("package.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "remediation" => Some(("remediation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-uri" => Some(("resourceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1749,6 +1851,7 @@ where "upgrade.windows-update.support-url" => Some(("upgrade.windowsUpdate.supportUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "upgrade.windows-update.title" => Some(("upgrade.windowsUpdate.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-score" => Some(("vulnerability.cvssScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvssv3.attack-complexity" => Some(("vulnerability.cvssv3.attackComplexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvssv3.attack-vector" => Some(("vulnerability.cvssv3.attackVector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvssv3.authentication" => Some(("vulnerability.cvssv3.authentication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1768,7 +1871,7 @@ where "vulnerability.short-description" => Some(("vulnerability.shortDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.type" => Some(("vulnerability.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["-type", "address", "alias-context", "analysis-status", "analysis-status-error", "archive-time", "arguments", "artifact-storage-source-uri", "attack-complexity", "attack-vector", "attestation", "authentication", "availability-impact", "base-resource-url", "base-score", "build", "build-finished-on", "build-invocation-id", "build-options", "build-started-on", "builder", "builder-config", "builder-version", "classification", "cloud-repo", "code", "completeness", "compliance", "confidentiality-impact", "config", "context", "continuous-analysis", "cpe", "cpe-uri", "create-time", "creator", "cve", "cvss-score", "cvssv3", "defined-in-material", "deploy-time", "deployment", "description", "discovery", "distance", "distribution", "dsse-attestation", "effective-severity", "end-time", "entry-point", "envelope", "environment", "epoch", "exploitability-score", "fingerprint", "fix-available", "full-name", "gerrit", "gerrit-project", "git", "host-uri", "id", "identity", "image", "impact-score", "inclusive", "integrity-impact", "intoto-provenance", "intoto-statement", "kb-article-ids", "kind", "labels", "last-published-timestamp", "last-scan-time", "logs-uri", "long-description", "materials", "message", "metadata", "name", "non-compliance-reason", "note-name", "package", "parsed-version", "payload", "payload-type", "platform", "predicate-type", "privileges-required", "project-id", "project-repo-id", "provenance", "provenance-bytes", "recipe", "remediation", "repo-id", "repo-name", "reproducible", "resource-uri", "revision", "revision-id", "scope", "serialized-payload", "severity", "short-description", "slsa-provenance", "source-provenance", "start-time", "statement", "support-url", "title", "trigger-id", "type", "uid", "undeploy-time", "update-id", "update-time", "upgrade", "url", "user-email", "user-interaction", "v1-name", "v2-blob", "v2-name", "vulnerability", "windows-update"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["-type", "address", "alias-context", "analysis-completed", "analysis-status", "analysis-status-error", "analysis-type", "architecture", "archive-time", "arguments", "artifact-storage-source-uri", "attack-complexity", "attack-vector", "attestation", "authentication", "availability-impact", "base-resource-url", "base-score", "build", "build-finished-on", "build-invocation-id", "build-options", "build-started-on", "build-type", "builder", "builder-config", "builder-version", "classification", "cloud-repo", "code", "comments", "completeness", "compliance", "confidentiality-impact", "config", "config-source", "context", "continuous-analysis", "cpe", "cpe-uri", "create-time", "creator", "cve", "cvss-score", "cvss-version", "cvssv3", "defined-in-material", "deploy-time", "deployment", "description", "digest", "discovery", "distance", "distribution", "dsse-attestation", "effective-severity", "end-time", "entry-point", "envelope", "environment", "epoch", "exploitability-score", "expression", "fingerprint", "fix-available", "full-name", "gerrit", "gerrit-project", "git", "host-uri", "id", "identity", "image", "impact-score", "inclusive", "integrity-impact", "intoto-provenance", "intoto-statement", "invocation", "kb-article-ids", "kind", "labels", "last-published-timestamp", "last-scan-time", "license", "logs-uri", "long-description", "materials", "message", "metadata", "name", "non-compliance-reason", "note-name", "package", "package-type", "parameters", "parsed-version", "payload", "payload-type", "platform", "predicate-type", "privileges-required", "project-id", "project-repo-id", "provenance", "provenance-bytes", "recipe", "remediation", "repo-id", "repo-name", "reproducible", "resource-uri", "revision", "revision-id", "scope", "serialized-payload", "severity", "short-description", "slsa-provenance", "slsa-provenance-zero-two", "source-provenance", "start-time", "statement", "support-url", "title", "trigger-id", "type", "uid", "undeploy-time", "update-id", "update-time", "upgrade", "uri", "url", "user-email", "user-interaction", "v1-name", "v2-blob", "v2-name", "version", "vulnerability", "windows-update"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1783,7 +1886,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2259,7 +2362,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2359,7 +2462,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2387,7 +2490,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2515,7 +2618,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2637,7 +2740,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2665,7 +2768,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2693,7 +2796,7 @@ async fn main() { let mut app = App::new("containeranalysis1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230106") .about("An implementation of the Grafeas API, which stores, and enables querying and retrieval of critical metadata about all of your software artifacts.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_containeranalysis1_cli") .arg(Arg::with_name("url") diff --git a/gen/containeranalysis1/Cargo.toml b/gen/containeranalysis1/Cargo.toml index 1f4c0acb4c..285d7f24aa 100644 --- a/gen/containeranalysis1/Cargo.toml +++ b/gen/containeranalysis1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-containeranalysis1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Container Analysis (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/containeranalysis1" homepage = "https://cloud.google.com/container-analysis/api/reference/rest/" -documentation = "https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-containeranalysis1/5.0.2+20230106" license = "MIT" keywords = ["containeranalysis", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/containeranalysis1/README.md b/gen/containeranalysis1/README.md index 2724f1f41d..affe14978b 100644 --- a/gen/containeranalysis1/README.md +++ b/gen/containeranalysis1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-containeranalysis1` library allows access to all features of the *Google Container Analysis* service. -This documentation was generated from *Container Analysis* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Container Analysis* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Container Analysis* *v1* API can be found at the [official documentation site](https://cloud.google.com/container-analysis/api/reference/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/ContainerAnalysis) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/ContainerAnalysis) ... * projects - * [*notes batch create*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteBatchCreateCall), [*notes create*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteCreateCall), [*notes delete*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteDeleteCall), [*notes get*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteGetCall), [*notes get iam policy*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteGetIamPolicyCall), [*notes list*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteListCall), [*notes occurrences list*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteOccurrenceListCall), [*notes patch*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNotePatchCall), [*notes set iam policy*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteSetIamPolicyCall), [*notes test iam permissions*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectNoteTestIamPermissionCall), [*occurrences batch create*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceBatchCreateCall), [*occurrences create*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceCreateCall), [*occurrences delete*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceDeleteCall), [*occurrences get*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceGetCall), [*occurrences get iam policy*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceGetIamPolicyCall), [*occurrences get notes*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceGetNoteCall), [*occurrences get vulnerability summary*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceGetVulnerabilitySummaryCall), [*occurrences list*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceListCall), [*occurrences patch*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrencePatchCall), [*occurrences set iam policy*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceSetIamPolicyCall) and [*occurrences test iam permissions*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/api::ProjectOccurrenceTestIamPermissionCall) + * [*notes batch create*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteBatchCreateCall), [*notes create*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteCreateCall), [*notes delete*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteDeleteCall), [*notes get*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteGetCall), [*notes get iam policy*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteGetIamPolicyCall), [*notes list*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteListCall), [*notes occurrences list*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteOccurrenceListCall), [*notes patch*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNotePatchCall), [*notes set iam policy*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteSetIamPolicyCall), [*notes test iam permissions*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectNoteTestIamPermissionCall), [*occurrences batch create*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceBatchCreateCall), [*occurrences create*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceCreateCall), [*occurrences delete*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceDeleteCall), [*occurrences get*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceGetCall), [*occurrences get iam policy*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceGetIamPolicyCall), [*occurrences get notes*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceGetNoteCall), [*occurrences get vulnerability summary*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceGetVulnerabilitySummaryCall), [*occurrences list*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceListCall), [*occurrences patch*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrencePatchCall), [*occurrences set iam policy*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceSetIamPolicyCall) and [*occurrences test iam permissions*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/api::ProjectOccurrenceTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/ContainerAnalysis)** +* **[Hub](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/ContainerAnalysis)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::CallBuilder) -* **[Resources](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::CallBuilder) +* **[Resources](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Part)** + * **[Parts](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Delegate) to the -[Method Builder](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Delegate) to the +[Method Builder](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::RequestValue) and -[decodable](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::RequestValue) and +[decodable](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-containeranalysis1/5.0.2-beta-1+20230106/google_containeranalysis1/client::RequestValue) are moved +* [request values](https://docs.rs/google-containeranalysis1/5.0.2+20230106/google_containeranalysis1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/containeranalysis1/src/api.rs b/gen/containeranalysis1/src/api.rs index 637310aa4c..05eefa89b9 100644 --- a/gen/containeranalysis1/src/api.rs +++ b/gen/containeranalysis1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> ContainerAnalysis { ContainerAnalysis { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://containeranalysis.googleapis.com/".to_string(), _root_url: "https://containeranalysis.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> ContainerAnalysis { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/containeranalysis1/src/client.rs b/gen/containeranalysis1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/containeranalysis1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/containeranalysis1/src/lib.rs b/gen/containeranalysis1/src/lib.rs index 96fc6c2884..0ed02aefa0 100644 --- a/gen/containeranalysis1/src/lib.rs +++ b/gen/containeranalysis1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Container Analysis* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Container Analysis* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Container Analysis* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/container-analysis/api/reference/rest/). diff --git a/gen/containeranalysis1_beta1-cli/Cargo.toml b/gen/containeranalysis1_beta1-cli/Cargo.toml index 76529b6882..37fa494ed2 100644 --- a/gen/containeranalysis1_beta1-cli/Cargo.toml +++ b/gen/containeranalysis1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-containeranalysis1_beta1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Container Analysis (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/containeranalysis1_beta1-cli" @@ -20,13 +20,13 @@ name = "containeranalysis1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-containeranalysis1_beta1] path = "../containeranalysis1_beta1" -version = "4.0.1+20220225" +version = "5.0.2+20230106" + diff --git a/gen/containeranalysis1_beta1-cli/README.md b/gen/containeranalysis1_beta1-cli/README.md index 8944d6c2e5..981fff9b18 100644 --- a/gen/containeranalysis1_beta1-cli/README.md +++ b/gen/containeranalysis1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Container Analysis* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Container Analysis* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash containeranalysis1-beta1 [options] @@ -51,9 +51,6 @@ containeranalysis1-beta1 [options] occurrences-patch (-r )... [-p ]... [-o ] occurrences-set-iam-policy (-r )... [-p ]... [-o ] occurrences-test-iam-permissions (-r )... [-p ]... [-o ] - scan-configs-get [-p ]... [-o ] - scan-configs-list [-p ]... [-o ] - scan-configs-update (-r )... [-p ]... [-o ] containeranalysis1-beta1 --help Configuration: diff --git a/gen/containeranalysis1_beta1-cli/mkdocs.yml b/gen/containeranalysis1_beta1-cli/mkdocs.yml index 61ef63ca79..4144376469 100644 --- a/gen/containeranalysis1_beta1-cli/mkdocs.yml +++ b/gen/containeranalysis1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Container Analysis v4.0.1+20220225 +site_name: Container Analysis v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-containeranalysis1_beta1-cli site_description: A complete library to interact with Container Analysis (protocol v1beta1) @@ -7,32 +7,30 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/containeranalysi docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_notes-batch-create.md', 'Projects', 'Notes Batch Create'] -- ['projects_notes-create.md', 'Projects', 'Notes Create'] -- ['projects_notes-delete.md', 'Projects', 'Notes Delete'] -- ['projects_notes-get.md', 'Projects', 'Notes Get'] -- ['projects_notes-get-iam-policy.md', 'Projects', 'Notes Get Iam Policy'] -- ['projects_notes-list.md', 'Projects', 'Notes List'] -- ['projects_notes-occurrences-list.md', 'Projects', 'Notes Occurrences List'] -- ['projects_notes-patch.md', 'Projects', 'Notes Patch'] -- ['projects_notes-set-iam-policy.md', 'Projects', 'Notes Set Iam Policy'] -- ['projects_notes-test-iam-permissions.md', 'Projects', 'Notes Test Iam Permissions'] -- ['projects_occurrences-batch-create.md', 'Projects', 'Occurrences Batch Create'] -- ['projects_occurrences-create.md', 'Projects', 'Occurrences Create'] -- ['projects_occurrences-delete.md', 'Projects', 'Occurrences Delete'] -- ['projects_occurrences-get.md', 'Projects', 'Occurrences Get'] -- ['projects_occurrences-get-iam-policy.md', 'Projects', 'Occurrences Get Iam Policy'] -- ['projects_occurrences-get-notes.md', 'Projects', 'Occurrences Get Notes'] -- ['projects_occurrences-get-vulnerability-summary.md', 'Projects', 'Occurrences Get Vulnerability Summary'] -- ['projects_occurrences-list.md', 'Projects', 'Occurrences List'] -- ['projects_occurrences-patch.md', 'Projects', 'Occurrences Patch'] -- ['projects_occurrences-set-iam-policy.md', 'Projects', 'Occurrences Set Iam Policy'] -- ['projects_occurrences-test-iam-permissions.md', 'Projects', 'Occurrences Test Iam Permissions'] -- ['projects_scan-configs-get.md', 'Projects', 'Scan Configs Get'] -- ['projects_scan-configs-list.md', 'Projects', 'Scan Configs List'] -- ['projects_scan-configs-update.md', 'Projects', 'Scan Configs Update'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Notes Batch Create': 'projects_notes-batch-create.md' + - 'Notes Create': 'projects_notes-create.md' + - 'Notes Delete': 'projects_notes-delete.md' + - 'Notes Get': 'projects_notes-get.md' + - 'Notes Get Iam Policy': 'projects_notes-get-iam-policy.md' + - 'Notes List': 'projects_notes-list.md' + - 'Notes Occurrences List': 'projects_notes-occurrences-list.md' + - 'Notes Patch': 'projects_notes-patch.md' + - 'Notes Set Iam Policy': 'projects_notes-set-iam-policy.md' + - 'Notes Test Iam Permissions': 'projects_notes-test-iam-permissions.md' + - 'Occurrences Batch Create': 'projects_occurrences-batch-create.md' + - 'Occurrences Create': 'projects_occurrences-create.md' + - 'Occurrences Delete': 'projects_occurrences-delete.md' + - 'Occurrences Get': 'projects_occurrences-get.md' + - 'Occurrences Get Iam Policy': 'projects_occurrences-get-iam-policy.md' + - 'Occurrences Get Notes': 'projects_occurrences-get-notes.md' + - 'Occurrences Get Vulnerability Summary': 'projects_occurrences-get-vulnerability-summary.md' + - 'Occurrences List': 'projects_occurrences-list.md' + - 'Occurrences Patch': 'projects_occurrences-patch.md' + - 'Occurrences Set Iam Policy': 'projects_occurrences-set-iam-policy.md' + - 'Occurrences Test Iam Permissions': 'projects_occurrences-test-iam-permissions.md' theme: readthedocs diff --git a/gen/containeranalysis1_beta1-cli/src/client.rs b/gen/containeranalysis1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/containeranalysis1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/containeranalysis1_beta1-cli/src/main.rs b/gen/containeranalysis1_beta1-cli/src/main.rs index 058a1739f7..f229687293 100644 --- a/gen/containeranalysis1_beta1-cli/src/main.rs +++ b/gen/containeranalysis1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_containeranalysis1_beta1::{api, Error, oauth2}; +use google_containeranalysis1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -178,7 +177,20 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "long-description" => Some(("longDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.architecture" => Some(("package.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.cpe-uri" => Some(("package.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.description" => Some(("package.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.comments" => Some(("package.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.expression" => Some(("package.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.maintainer" => Some(("package.maintainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package.name" => Some(("package.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.package-type" => Some(("package.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.url" => Some(("package.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.epoch" => Some(("package.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "package.version.inclusive" => Some(("package.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package.version.kind" => Some(("package.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.name" => Some(("package.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.revision" => Some(("package.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "related-note-names" => Some(("relatedNoteNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "sbom.data-licence" => Some(("sbom.dataLicence", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sbom.spdx-version" => Some(("sbom.spdxVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -206,6 +218,18 @@ where "spdx-relationship.type" => Some(("spdxRelationship.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-score" => Some(("vulnerability.cvssScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.attack-complexity" => Some(("vulnerability.cvssV2.attackComplexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.attack-vector" => Some(("vulnerability.cvssV2.attackVector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.authentication" => Some(("vulnerability.cvssV2.authentication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.availability-impact" => Some(("vulnerability.cvssV2.availabilityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.base-score" => Some(("vulnerability.cvssV2.baseScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.confidentiality-impact" => Some(("vulnerability.cvssV2.confidentialityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.exploitability-score" => Some(("vulnerability.cvssV2.exploitabilityScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.impact-score" => Some(("vulnerability.cvssV2.impactScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.integrity-impact" => Some(("vulnerability.cvssV2.integrityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.privileges-required" => Some(("vulnerability.cvssV2.privilegesRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.scope" => Some(("vulnerability.cvssV2.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.user-interaction" => Some(("vulnerability.cvssV2.userInteraction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.attack-complexity" => Some(("vulnerability.cvssV3.attackComplexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.attack-vector" => Some(("vulnerability.cvssV3.attackVector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.availability-impact" => Some(("vulnerability.cvssV3.availabilityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -217,10 +241,12 @@ where "vulnerability.cvss-v3.privileges-required" => Some(("vulnerability.cvssV3.privilegesRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.scope" => Some(("vulnerability.cvssV3.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.user-interaction" => Some(("vulnerability.cvssV3.userInteraction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cwe" => Some(("vulnerability.cwe", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "vulnerability.severity" => Some(("vulnerability.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.source-update-time" => Some(("vulnerability.sourceUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "analyzed", "attack-complexity", "attack-vector", "attestation-authority", "attribution", "availability-impact", "base-image", "base-score", "build", "builder-version", "checksum", "comments", "confidentiality-impact", "copyright", "create-time", "cvss-score", "cvss-v3", "data-licence", "deployable", "detailed-description", "discovery", "download-location", "expected-command", "expiration-time", "exploitability-score", "expression", "file-type", "files-license-info", "fingerprint", "hint", "home-page", "human-readable-name", "impact-score", "integrity-impact", "intoto", "key-id", "key-type", "kind", "license-declared", "long-description", "name", "originator", "package", "package-type", "privileges-required", "public-key", "related-note-names", "resource-uri", "resource-url", "sbom", "scope", "severity", "short-description", "signature", "source-update-time", "spdx-file", "spdx-package", "spdx-relationship", "spdx-version", "step-name", "summary-description", "supplier", "threshold", "title", "type", "update-time", "user-interaction", "v1-name", "v2-blob", "v2-name", "verification-code", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "analyzed", "architecture", "attack-complexity", "attack-vector", "attestation-authority", "attribution", "authentication", "availability-impact", "base-image", "base-score", "build", "builder-version", "checksum", "comments", "confidentiality-impact", "copyright", "cpe-uri", "create-time", "cvss-score", "cvss-v2", "cvss-v3", "cvss-version", "cwe", "data-licence", "deployable", "description", "detailed-description", "discovery", "download-location", "epoch", "expected-command", "expiration-time", "exploitability-score", "expression", "file-type", "files-license-info", "fingerprint", "hint", "home-page", "human-readable-name", "impact-score", "inclusive", "integrity-impact", "intoto", "key-id", "key-type", "kind", "license", "license-declared", "long-description", "maintainer", "name", "originator", "package", "package-type", "privileges-required", "public-key", "related-note-names", "resource-uri", "resource-url", "revision", "sbom", "scope", "severity", "short-description", "signature", "source-update-time", "spdx-file", "spdx-package", "spdx-relationship", "spdx-version", "step-name", "summary-description", "supplier", "threshold", "title", "type", "update-time", "url", "user-interaction", "v1-name", "v2-blob", "v2-name", "verification-code", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -483,7 +509,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -545,7 +571,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -640,7 +666,20 @@ where "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "long-description" => Some(("longDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.architecture" => Some(("package.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.cpe-uri" => Some(("package.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.description" => Some(("package.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.comments" => Some(("package.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.license.expression" => Some(("package.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.maintainer" => Some(("package.maintainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package.name" => Some(("package.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.package-type" => Some(("package.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.url" => Some(("package.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.epoch" => Some(("package.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "package.version.inclusive" => Some(("package.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "package.version.kind" => Some(("package.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.name" => Some(("package.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "package.version.revision" => Some(("package.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "related-note-names" => Some(("relatedNoteNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "sbom.data-licence" => Some(("sbom.dataLicence", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sbom.spdx-version" => Some(("sbom.spdxVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -668,6 +707,18 @@ where "spdx-relationship.type" => Some(("spdxRelationship.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-score" => Some(("vulnerability.cvssScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.attack-complexity" => Some(("vulnerability.cvssV2.attackComplexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.attack-vector" => Some(("vulnerability.cvssV2.attackVector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.authentication" => Some(("vulnerability.cvssV2.authentication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.availability-impact" => Some(("vulnerability.cvssV2.availabilityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.base-score" => Some(("vulnerability.cvssV2.baseScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.confidentiality-impact" => Some(("vulnerability.cvssV2.confidentialityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.exploitability-score" => Some(("vulnerability.cvssV2.exploitabilityScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.impact-score" => Some(("vulnerability.cvssV2.impactScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.integrity-impact" => Some(("vulnerability.cvssV2.integrityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.privileges-required" => Some(("vulnerability.cvssV2.privilegesRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.scope" => Some(("vulnerability.cvssV2.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-v2.user-interaction" => Some(("vulnerability.cvssV2.userInteraction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.attack-complexity" => Some(("vulnerability.cvssV3.attackComplexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.attack-vector" => Some(("vulnerability.cvssV3.attackVector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.availability-impact" => Some(("vulnerability.cvssV3.availabilityImpact", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -679,10 +730,12 @@ where "vulnerability.cvss-v3.privileges-required" => Some(("vulnerability.cvssV3.privilegesRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.scope" => Some(("vulnerability.cvssV3.scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-v3.user-interaction" => Some(("vulnerability.cvssV3.userInteraction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vulnerability.cwe" => Some(("vulnerability.cwe", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "vulnerability.severity" => Some(("vulnerability.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.source-update-time" => Some(("vulnerability.sourceUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "analyzed", "attack-complexity", "attack-vector", "attestation-authority", "attribution", "availability-impact", "base-image", "base-score", "build", "builder-version", "checksum", "comments", "confidentiality-impact", "copyright", "create-time", "cvss-score", "cvss-v3", "data-licence", "deployable", "detailed-description", "discovery", "download-location", "expected-command", "expiration-time", "exploitability-score", "expression", "file-type", "files-license-info", "fingerprint", "hint", "home-page", "human-readable-name", "impact-score", "integrity-impact", "intoto", "key-id", "key-type", "kind", "license-declared", "long-description", "name", "originator", "package", "package-type", "privileges-required", "public-key", "related-note-names", "resource-uri", "resource-url", "sbom", "scope", "severity", "short-description", "signature", "source-update-time", "spdx-file", "spdx-package", "spdx-relationship", "spdx-version", "step-name", "summary-description", "supplier", "threshold", "title", "type", "update-time", "user-interaction", "v1-name", "v2-blob", "v2-name", "verification-code", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analysis-kind", "analyzed", "architecture", "attack-complexity", "attack-vector", "attestation-authority", "attribution", "authentication", "availability-impact", "base-image", "base-score", "build", "builder-version", "checksum", "comments", "confidentiality-impact", "copyright", "cpe-uri", "create-time", "cvss-score", "cvss-v2", "cvss-v3", "cvss-version", "cwe", "data-licence", "deployable", "description", "detailed-description", "discovery", "download-location", "epoch", "expected-command", "expiration-time", "exploitability-score", "expression", "file-type", "files-license-info", "fingerprint", "hint", "home-page", "human-readable-name", "impact-score", "inclusive", "integrity-impact", "intoto", "key-id", "key-type", "kind", "license", "license-declared", "long-description", "maintainer", "name", "originator", "package", "package-type", "privileges-required", "public-key", "related-note-names", "resource-uri", "resource-url", "revision", "sbom", "scope", "severity", "short-description", "signature", "source-update-time", "spdx-file", "spdx-package", "spdx-relationship", "spdx-version", "step-name", "summary-description", "supplier", "threshold", "title", "type", "update-time", "url", "user-interaction", "v1-name", "v2-blob", "v2-name", "verification-code", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -697,7 +750,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1068,12 +1121,25 @@ where "derived-image.derived-image.fingerprint.v1-name" => Some(("derivedImage.derivedImage.fingerprint.v1Name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "derived-image.derived-image.fingerprint.v2-blob" => Some(("derivedImage.derivedImage.fingerprint.v2Blob", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "derived-image.derived-image.fingerprint.v2-name" => Some(("derivedImage.derivedImage.fingerprint.v2Name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "discovered.discovered.analysis-completed.analysis-type" => Some(("discovered.discovered.analysisCompleted.analysisType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "discovered.discovered.analysis-status" => Some(("discovered.discovered.analysisStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovered.discovered.analysis-status-error.code" => Some(("discovered.discovered.analysisStatusError.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "discovered.discovered.analysis-status-error.message" => Some(("discovered.discovered.analysisStatusError.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovered.discovered.continuous-analysis" => Some(("discovered.discovered.continuousAnalysis", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovered.discovered.last-analysis-time" => Some(("discovered.discovered.lastAnalysisTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "envelope.payload" => Some(("envelope.payload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "envelope.payload-type" => Some(("envelope.payloadType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.architecture" => Some(("installation.installation.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.cpe-uri" => Some(("installation.installation.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.license.comments" => Some(("installation.installation.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.license.expression" => Some(("installation.installation.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "installation.installation.name" => Some(("installation.installation.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.package-type" => Some(("installation.installation.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.version.epoch" => Some(("installation.installation.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "installation.installation.version.inclusive" => Some(("installation.installation.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "installation.installation.version.kind" => Some(("installation.installation.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.version.name" => Some(("installation.installation.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.version.revision" => Some(("installation.installation.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "intoto.signed.byproducts.custom-values" => Some(("intoto.signed.byproducts.customValues", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "intoto.signed.command" => Some(("intoto.signed.command", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "intoto.signed.environment.custom-values" => Some(("intoto.signed.environment.customValues", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -1120,13 +1186,14 @@ where "spdx-relationship.type" => Some(("spdxRelationship.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-score" => Some(("vulnerability.cvssScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.effective-severity" => Some(("vulnerability.effectiveSeverity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.long-description" => Some(("vulnerability.longDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.severity" => Some(("vulnerability.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.short-description" => Some(("vulnerability.shortDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.type" => Some(("vulnerability.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "alias-context", "analysis-status", "analysis-status-error", "artifact-storage-source-uri", "attestation", "attributions", "base-resource-url", "build", "build-options", "builder-version", "byproducts", "cloud-repo", "code", "command", "comment", "comments", "config", "content-hash", "content-type", "context", "continuous-analysis", "contributors", "copyright", "create-time", "creator", "creator-comment", "creators", "custom-values", "cvss-score", "deploy-time", "deployment", "derived-image", "discovered", "distance", "document-comment", "effective-severity", "end-time", "environment", "expression", "external-document-refs", "filename", "files-license-info", "fingerprint", "generic-signed-attestation", "gerrit", "gerrit-project", "git", "home-page", "host-uri", "id", "installation", "intoto", "kind", "labels", "last-analysis-time", "license-concluded", "license-list-version", "logs-uri", "long-description", "message", "name", "namespace", "note-name", "notice", "package-type", "pgp-key-id", "pgp-signed-attestation", "platform", "project-id", "project-repo-id", "provenance", "provenance-bytes", "remediation", "repo-id", "repo-name", "resource", "resource-uri", "revision-id", "sbom", "serialized-payload", "severity", "short-description", "signature", "signed", "source", "source-info", "source-provenance", "spdx-file", "spdx-package", "spdx-relationship", "start-time", "summary-description", "target", "title", "trigger-id", "type", "uid", "undeploy-time", "update-time", "uri", "url", "user-email", "v1-name", "v2-blob", "v2-name", "value", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "alias-context", "analysis-completed", "analysis-status", "analysis-status-error", "analysis-type", "architecture", "artifact-storage-source-uri", "attestation", "attributions", "base-resource-url", "build", "build-options", "builder-version", "byproducts", "cloud-repo", "code", "command", "comment", "comments", "config", "content-hash", "content-type", "context", "continuous-analysis", "contributors", "copyright", "cpe-uri", "create-time", "creator", "creator-comment", "creators", "custom-values", "cvss-score", "cvss-version", "deploy-time", "deployment", "derived-image", "discovered", "distance", "document-comment", "effective-severity", "end-time", "envelope", "environment", "epoch", "expression", "external-document-refs", "filename", "files-license-info", "fingerprint", "generic-signed-attestation", "gerrit", "gerrit-project", "git", "home-page", "host-uri", "id", "inclusive", "installation", "intoto", "kind", "labels", "last-analysis-time", "license", "license-concluded", "license-list-version", "logs-uri", "long-description", "message", "name", "namespace", "note-name", "notice", "package-type", "payload", "payload-type", "pgp-key-id", "pgp-signed-attestation", "platform", "project-id", "project-repo-id", "provenance", "provenance-bytes", "remediation", "repo-id", "repo-name", "resource", "resource-uri", "revision", "revision-id", "sbom", "serialized-payload", "severity", "short-description", "signature", "signed", "source", "source-info", "source-provenance", "spdx-file", "spdx-package", "spdx-relationship", "start-time", "summary-description", "target", "title", "trigger-id", "type", "uid", "undeploy-time", "update-time", "uri", "url", "user-email", "v1-name", "v2-blob", "v2-name", "value", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1493,7 +1560,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1612,12 +1679,25 @@ where "derived-image.derived-image.fingerprint.v1-name" => Some(("derivedImage.derivedImage.fingerprint.v1Name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "derived-image.derived-image.fingerprint.v2-blob" => Some(("derivedImage.derivedImage.fingerprint.v2Blob", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "derived-image.derived-image.fingerprint.v2-name" => Some(("derivedImage.derivedImage.fingerprint.v2Name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "discovered.discovered.analysis-completed.analysis-type" => Some(("discovered.discovered.analysisCompleted.analysisType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "discovered.discovered.analysis-status" => Some(("discovered.discovered.analysisStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovered.discovered.analysis-status-error.code" => Some(("discovered.discovered.analysisStatusError.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "discovered.discovered.analysis-status-error.message" => Some(("discovered.discovered.analysisStatusError.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovered.discovered.continuous-analysis" => Some(("discovered.discovered.continuousAnalysis", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "discovered.discovered.last-analysis-time" => Some(("discovered.discovered.lastAnalysisTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "envelope.payload" => Some(("envelope.payload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "envelope.payload-type" => Some(("envelope.payloadType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.architecture" => Some(("installation.installation.architecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.cpe-uri" => Some(("installation.installation.cpeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.license.comments" => Some(("installation.installation.license.comments", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.license.expression" => Some(("installation.installation.license.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "installation.installation.name" => Some(("installation.installation.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.package-type" => Some(("installation.installation.packageType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.version.epoch" => Some(("installation.installation.version.epoch", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "installation.installation.version.inclusive" => Some(("installation.installation.version.inclusive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "installation.installation.version.kind" => Some(("installation.installation.version.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.version.name" => Some(("installation.installation.version.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "installation.installation.version.revision" => Some(("installation.installation.version.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "intoto.signed.byproducts.custom-values" => Some(("intoto.signed.byproducts.customValues", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "intoto.signed.command" => Some(("intoto.signed.command", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "intoto.signed.environment.custom-values" => Some(("intoto.signed.environment.customValues", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -1664,13 +1744,14 @@ where "spdx-relationship.type" => Some(("spdxRelationship.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cvss-score" => Some(("vulnerability.cvssScore", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "vulnerability.cvss-version" => Some(("vulnerability.cvssVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.effective-severity" => Some(("vulnerability.effectiveSeverity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.long-description" => Some(("vulnerability.longDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.severity" => Some(("vulnerability.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.short-description" => Some(("vulnerability.shortDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.type" => Some(("vulnerability.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "alias-context", "analysis-status", "analysis-status-error", "artifact-storage-source-uri", "attestation", "attributions", "base-resource-url", "build", "build-options", "builder-version", "byproducts", "cloud-repo", "code", "command", "comment", "comments", "config", "content-hash", "content-type", "context", "continuous-analysis", "contributors", "copyright", "create-time", "creator", "creator-comment", "creators", "custom-values", "cvss-score", "deploy-time", "deployment", "derived-image", "discovered", "distance", "document-comment", "effective-severity", "end-time", "environment", "expression", "external-document-refs", "filename", "files-license-info", "fingerprint", "generic-signed-attestation", "gerrit", "gerrit-project", "git", "home-page", "host-uri", "id", "installation", "intoto", "kind", "labels", "last-analysis-time", "license-concluded", "license-list-version", "logs-uri", "long-description", "message", "name", "namespace", "note-name", "notice", "package-type", "pgp-key-id", "pgp-signed-attestation", "platform", "project-id", "project-repo-id", "provenance", "provenance-bytes", "remediation", "repo-id", "repo-name", "resource", "resource-uri", "revision-id", "sbom", "serialized-payload", "severity", "short-description", "signature", "signed", "source", "source-info", "source-provenance", "spdx-file", "spdx-package", "spdx-relationship", "start-time", "summary-description", "target", "title", "trigger-id", "type", "uid", "undeploy-time", "update-time", "uri", "url", "user-email", "v1-name", "v2-blob", "v2-name", "value", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "alias-context", "analysis-completed", "analysis-status", "analysis-status-error", "analysis-type", "architecture", "artifact-storage-source-uri", "attestation", "attributions", "base-resource-url", "build", "build-options", "builder-version", "byproducts", "cloud-repo", "code", "command", "comment", "comments", "config", "content-hash", "content-type", "context", "continuous-analysis", "contributors", "copyright", "cpe-uri", "create-time", "creator", "creator-comment", "creators", "custom-values", "cvss-score", "cvss-version", "deploy-time", "deployment", "derived-image", "discovered", "distance", "document-comment", "effective-severity", "end-time", "envelope", "environment", "epoch", "expression", "external-document-refs", "filename", "files-license-info", "fingerprint", "generic-signed-attestation", "gerrit", "gerrit-project", "git", "home-page", "host-uri", "id", "inclusive", "installation", "intoto", "kind", "labels", "last-analysis-time", "license", "license-concluded", "license-list-version", "logs-uri", "long-description", "message", "name", "namespace", "note-name", "notice", "package-type", "payload", "payload-type", "pgp-key-id", "pgp-signed-attestation", "platform", "project-id", "project-repo-id", "provenance", "provenance-bytes", "remediation", "repo-id", "repo-name", "resource", "resource-uri", "revision", "revision-id", "sbom", "serialized-payload", "severity", "short-description", "signature", "signed", "source", "source-info", "source-provenance", "spdx-file", "spdx-package", "spdx-relationship", "start-time", "summary-description", "target", "title", "trigger-id", "type", "uid", "undeploy-time", "update-time", "uri", "url", "user-email", "v1-name", "v2-blob", "v2-name", "value", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1685,7 +1766,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1905,209 +1986,6 @@ where } } - async fn _projects_scan_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().scan_configs_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_scan_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().scan_configs_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_scan_configs_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "enabled" => Some(("enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "enabled", "name", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::ScanConfig = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().scan_configs_update(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -2178,15 +2056,6 @@ where ("occurrences-test-iam-permissions", Some(opt)) => { call_result = self._projects_occurrences_test_iam_permissions(opt, dry_run, &mut err).await; }, - ("scan-configs-get", Some(opt)) => { - call_result = self._projects_scan_configs_get(opt, dry_run, &mut err).await; - }, - ("scan-configs-list", Some(opt)) => { - call_result = self._projects_scan_configs_list(opt, dry_run, &mut err).await; - }, - ("scan-configs-update", Some(opt)) => { - call_result = self._projects_scan_configs_update(opt, dry_run, &mut err).await; - }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2266,7 +2135,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'notes-batch-create', 'notes-create', 'notes-delete', 'notes-get', 'notes-get-iam-policy', 'notes-list', 'notes-occurrences-list', 'notes-patch', 'notes-set-iam-policy', 'notes-test-iam-permissions', 'occurrences-batch-create', 'occurrences-create', 'occurrences-delete', 'occurrences-get', 'occurrences-get-iam-policy', 'occurrences-get-notes', 'occurrences-get-vulnerability-summary', 'occurrences-list', 'occurrences-patch', 'occurrences-set-iam-policy', 'occurrences-test-iam-permissions', 'scan-configs-get', 'scan-configs-list' and 'scan-configs-update'", vec![ + ("projects", "methods: 'notes-batch-create', 'notes-create', 'notes-delete', 'notes-get', 'notes-get-iam-policy', 'notes-list', 'notes-occurrences-list', 'notes-patch', 'notes-set-iam-policy', 'notes-test-iam-permissions', 'occurrences-batch-create', 'occurrences-create', 'occurrences-delete', 'occurrences-get', 'occurrences-get-iam-policy', 'occurrences-get-notes', 'occurrences-get-vulnerability-summary', 'occurrences-list', 'occurrences-patch', 'occurrences-set-iam-policy' and 'occurrences-test-iam-permissions'", vec![ ("notes-batch-create", Some(r##"Creates new notes in batch."##), "Details at http://byron.github.io/google-apis-rs/google_containeranalysis1_beta1_cli/projects_notes-batch-create", @@ -2373,7 +2242,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2473,7 +2342,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2501,7 +2370,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2629,7 +2498,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2751,7 +2620,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2779,79 +2648,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("scan-configs-get", - Some(r##"Gets the specified scan configuration."##), - "Details at http://byron.github.io/google-apis-rs/google_containeranalysis1_beta1_cli/projects_scan-configs-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the scan configuration in the form of `projects/[PROJECT_ID]/scanConfigs/[SCAN_CONFIG_ID]`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("scan-configs-list", - Some(r##"Lists scan configurations for the specified project."##), - "Details at http://byron.github.io/google-apis-rs/google_containeranalysis1_beta1_cli/projects_scan-configs-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The name of the project to list scan configurations for in the form of `projects/[PROJECT_ID]`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("scan-configs-update", - Some(r##"Updates the specified scan configuration."##), - "Details at http://byron.github.io/google-apis-rs/google_containeranalysis1_beta1_cli/projects_scan-configs-update", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the scan configuration in the form of `projects/[PROJECT_ID]/scanConfigs/[SCAN_CONFIG_ID]`."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2879,7 +2676,7 @@ async fn main() { let mut app = App::new("containeranalysis1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230106") .about("An implementation of the Grafeas API, which stores, and enables querying and retrieval of critical metadata about all of your software artifacts.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_containeranalysis1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/containeranalysis1_beta1/Cargo.toml b/gen/containeranalysis1_beta1/Cargo.toml index 63f706ed4f..011bc58f7b 100644 --- a/gen/containeranalysis1_beta1/Cargo.toml +++ b/gen/containeranalysis1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-containeranalysis1_beta1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Container Analysis (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/containeranalysis1_beta1" homepage = "https://cloud.google.com/container-analysis/api/reference/rest/" -documentation = "https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106" license = "MIT" keywords = ["containeranalysis", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/containeranalysis1_beta1/README.md b/gen/containeranalysis1_beta1/README.md index c692713d5c..5b7b30e3e6 100644 --- a/gen/containeranalysis1_beta1/README.md +++ b/gen/containeranalysis1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-containeranalysis1_beta1` library allows access to all features of the *Google Container Analysis* service. -This documentation was generated from *Container Analysis* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Container Analysis* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Container Analysis* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/container-analysis/api/reference/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/ContainerAnalysis) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/ContainerAnalysis) ... * projects - * [*notes batch create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteBatchCreateCall), [*notes create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteCreateCall), [*notes delete*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteDeleteCall), [*notes get*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteGetCall), [*notes get iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteGetIamPolicyCall), [*notes list*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteListCall), [*notes occurrences list*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteOccurrenceListCall), [*notes patch*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNotePatchCall), [*notes set iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteSetIamPolicyCall), [*notes test iam permissions*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectNoteTestIamPermissionCall), [*occurrences batch create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceBatchCreateCall), [*occurrences create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceCreateCall), [*occurrences delete*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceDeleteCall), [*occurrences get*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetCall), [*occurrences get iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetIamPolicyCall), [*occurrences get notes*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetNoteCall), [*occurrences get vulnerability summary*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetVulnerabilitySummaryCall), [*occurrences list*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceListCall), [*occurrences patch*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrencePatchCall), [*occurrences set iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceSetIamPolicyCall) and [*occurrences test iam permissions*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceTestIamPermissionCall) + * [*notes batch create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteBatchCreateCall), [*notes create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteCreateCall), [*notes delete*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteDeleteCall), [*notes get*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteGetCall), [*notes get iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteGetIamPolicyCall), [*notes list*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteListCall), [*notes occurrences list*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteOccurrenceListCall), [*notes patch*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNotePatchCall), [*notes set iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteSetIamPolicyCall), [*notes test iam permissions*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectNoteTestIamPermissionCall), [*occurrences batch create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceBatchCreateCall), [*occurrences create*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceCreateCall), [*occurrences delete*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceDeleteCall), [*occurrences get*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetCall), [*occurrences get iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetIamPolicyCall), [*occurrences get notes*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetNoteCall), [*occurrences get vulnerability summary*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceGetVulnerabilitySummaryCall), [*occurrences list*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceListCall), [*occurrences patch*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrencePatchCall), [*occurrences set iam policy*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceSetIamPolicyCall) and [*occurrences test iam permissions*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/api::ProjectOccurrenceTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/ContainerAnalysis)** +* **[Hub](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/ContainerAnalysis)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-containeranalysis1_beta1/5.0.2-beta-1+20230106/google_containeranalysis1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-containeranalysis1_beta1/5.0.2+20230106/google_containeranalysis1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/containeranalysis1_beta1/src/api.rs b/gen/containeranalysis1_beta1/src/api.rs index 3b12718c8d..cf843831c9 100644 --- a/gen/containeranalysis1_beta1/src/api.rs +++ b/gen/containeranalysis1_beta1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> ContainerAnalysis { ContainerAnalysis { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://containeranalysis.googleapis.com/".to_string(), _root_url: "https://containeranalysis.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> ContainerAnalysis { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/containeranalysis1_beta1/src/client.rs b/gen/containeranalysis1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/containeranalysis1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/containeranalysis1_beta1/src/lib.rs b/gen/containeranalysis1_beta1/src/lib.rs index 2878511419..4839fd5632 100644 --- a/gen/containeranalysis1_beta1/src/lib.rs +++ b/gen/containeranalysis1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Container Analysis* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Container Analysis* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *containeranalysis:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Container Analysis* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/container-analysis/api/reference/rest/). diff --git a/gen/content2-cli/Cargo.toml b/gen/content2-cli/Cargo.toml index 266ae2050b..118e78dff4 100644 --- a/gen/content2-cli/Cargo.toml +++ b/gen/content2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-content2-cli" -version = "4.0.1+20220303" +version = "5.0.2+20220303" authors = ["Sebastian Thiel "] description = "A complete library to interact with Shopping Content (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/content2-cli" @@ -20,13 +20,13 @@ name = "content2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-content2] path = "../content2" -version = "4.0.1+20220303" +version = "5.0.2+20220303" + diff --git a/gen/content2-cli/README.md b/gen/content2-cli/README.md index 99001e4258..012529d7d7 100644 --- a/gen/content2-cli/README.md +++ b/gen/content2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Shopping Content* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Shopping Content* API at revision *20220303*. The CLI is at version *5.0.2*. ```bash content2 [options] diff --git a/gen/content2-cli/mkdocs.yml b/gen/content2-cli/mkdocs.yml index 15a63723de..2a2a63b095 100644 --- a/gen/content2-cli/mkdocs.yml +++ b/gen/content2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Shopping Content v4.0.1+20220303 +site_name: Shopping Content v5.0.2+20220303 site_url: http://byron.github.io/google-apis-rs/google-content2-cli site_description: A complete library to interact with Shopping Content (protocol v2) @@ -7,96 +7,111 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/content2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_authinfo.md', 'Accounts', 'Authinfo'] -- ['accounts_claimwebsite.md', 'Accounts', 'Claimwebsite'] -- ['accounts_custombatch.md', 'Accounts', 'Custombatch'] -- ['accounts_delete.md', 'Accounts', 'Delete'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_insert.md', 'Accounts', 'Insert'] -- ['accounts_link.md', 'Accounts', 'Link'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['accountstatuses_custombatch.md', 'Accountstatuses', 'Custombatch'] -- ['accountstatuses_get.md', 'Accountstatuses', 'Get'] -- ['accountstatuses_list.md', 'Accountstatuses', 'List'] -- ['accounttax_custombatch.md', 'Accounttax', 'Custombatch'] -- ['accounttax_get.md', 'Accounttax', 'Get'] -- ['accounttax_list.md', 'Accounttax', 'List'] -- ['accounttax_update.md', 'Accounttax', 'Update'] -- ['datafeeds_custombatch.md', 'Datafeeds', 'Custombatch'] -- ['datafeeds_delete.md', 'Datafeeds', 'Delete'] -- ['datafeeds_fetchnow.md', 'Datafeeds', 'Fetchnow'] -- ['datafeeds_get.md', 'Datafeeds', 'Get'] -- ['datafeeds_insert.md', 'Datafeeds', 'Insert'] -- ['datafeeds_list.md', 'Datafeeds', 'List'] -- ['datafeeds_update.md', 'Datafeeds', 'Update'] -- ['datafeedstatuses_custombatch.md', 'Datafeedstatuses', 'Custombatch'] -- ['datafeedstatuses_get.md', 'Datafeedstatuses', 'Get'] -- ['datafeedstatuses_list.md', 'Datafeedstatuses', 'List'] -- ['inventory_custombatch.md', 'Inventory', 'Custombatch'] -- ['inventory_set.md', 'Inventory', 'Set'] -- ['liasettings_custombatch.md', 'Liasettings', 'Custombatch'] -- ['liasettings_get.md', 'Liasettings', 'Get'] -- ['liasettings_getaccessiblegmbaccounts.md', 'Liasettings', 'Getaccessiblegmbaccounts'] -- ['liasettings_list.md', 'Liasettings', 'List'] -- ['liasettings_listposdataproviders.md', 'Liasettings', 'Listposdataproviders'] -- ['liasettings_requestgmbaccess.md', 'Liasettings', 'Requestgmbaccess'] -- ['liasettings_requestinventoryverification.md', 'Liasettings', 'Requestinventoryverification'] -- ['liasettings_setinventoryverificationcontact.md', 'Liasettings', 'Setinventoryverificationcontact'] -- ['liasettings_setposdataprovider.md', 'Liasettings', 'Setposdataprovider'] -- ['liasettings_update.md', 'Liasettings', 'Update'] -- ['orderinvoices_createchargeinvoice.md', 'Orderinvoices', 'Createchargeinvoice'] -- ['orderinvoices_createrefundinvoice.md', 'Orderinvoices', 'Createrefundinvoice'] -- ['orderreports_listdisbursements.md', 'Orderreports', 'Listdisbursements'] -- ['orderreports_listtransactions.md', 'Orderreports', 'Listtransactions'] -- ['orderreturns_get.md', 'Orderreturns', 'Get'] -- ['orderreturns_list.md', 'Orderreturns', 'List'] -- ['orders_acknowledge.md', 'Orders', 'Acknowledge'] -- ['orders_advancetestorder.md', 'Orders', 'Advancetestorder'] -- ['orders_cancel.md', 'Orders', 'Cancel'] -- ['orders_cancellineitem.md', 'Orders', 'Cancellineitem'] -- ['orders_canceltestorderbycustomer.md', 'Orders', 'Canceltestorderbycustomer'] -- ['orders_createtestorder.md', 'Orders', 'Createtestorder'] -- ['orders_createtestreturn.md', 'Orders', 'Createtestreturn'] -- ['orders_custombatch.md', 'Orders', 'Custombatch'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_getbymerchantorderid.md', 'Orders', 'Getbymerchantorderid'] -- ['orders_gettestordertemplate.md', 'Orders', 'Gettestordertemplate'] -- ['orders_instorerefundlineitem.md', 'Orders', 'Instorerefundlineitem'] -- ['orders_list.md', 'Orders', 'List'] -- ['orders_refund.md', 'Orders', 'Refund'] -- ['orders_rejectreturnlineitem.md', 'Orders', 'Rejectreturnlineitem'] -- ['orders_returnlineitem.md', 'Orders', 'Returnlineitem'] -- ['orders_returnrefundlineitem.md', 'Orders', 'Returnrefundlineitem'] -- ['orders_setlineitemmetadata.md', 'Orders', 'Setlineitemmetadata'] -- ['orders_shiplineitems.md', 'Orders', 'Shiplineitems'] -- ['orders_updatelineitemshippingdetails.md', 'Orders', 'Updatelineitemshippingdetails'] -- ['orders_updatemerchantorderid.md', 'Orders', 'Updatemerchantorderid'] -- ['orders_updateshipment.md', 'Orders', 'Updateshipment'] -- ['pos_custombatch.md', 'Pos', 'Custombatch'] -- ['pos_delete.md', 'Pos', 'Delete'] -- ['pos_get.md', 'Pos', 'Get'] -- ['pos_insert.md', 'Pos', 'Insert'] -- ['pos_inventory.md', 'Pos', 'Inventory'] -- ['pos_list.md', 'Pos', 'List'] -- ['pos_sale.md', 'Pos', 'Sale'] -- ['products_custombatch.md', 'Products', 'Custombatch'] -- ['products_delete.md', 'Products', 'Delete'] -- ['products_get.md', 'Products', 'Get'] -- ['products_insert.md', 'Products', 'Insert'] -- ['products_list.md', 'Products', 'List'] -- ['productstatuses_custombatch.md', 'Productstatuses', 'Custombatch'] -- ['productstatuses_get.md', 'Productstatuses', 'Get'] -- ['productstatuses_list.md', 'Productstatuses', 'List'] -- ['shippingsettings_custombatch.md', 'Shippingsettings', 'Custombatch'] -- ['shippingsettings_get.md', 'Shippingsettings', 'Get'] -- ['shippingsettings_getsupportedcarriers.md', 'Shippingsettings', 'Getsupportedcarriers'] -- ['shippingsettings_getsupportedholidays.md', 'Shippingsettings', 'Getsupportedholidays'] -- ['shippingsettings_getsupportedpickupservices.md', 'Shippingsettings', 'Getsupportedpickupservices'] -- ['shippingsettings_list.md', 'Shippingsettings', 'List'] -- ['shippingsettings_update.md', 'Shippingsettings', 'Update'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Authinfo': 'accounts_authinfo.md' + - 'Claimwebsite': 'accounts_claimwebsite.md' + - 'Custombatch': 'accounts_custombatch.md' + - 'Delete': 'accounts_delete.md' + - 'Get': 'accounts_get.md' + - 'Insert': 'accounts_insert.md' + - 'Link': 'accounts_link.md' + - 'List': 'accounts_list.md' + - 'Update': 'accounts_update.md' +- 'Accountstatuses': + - 'Custombatch': 'accountstatuses_custombatch.md' + - 'Get': 'accountstatuses_get.md' + - 'List': 'accountstatuses_list.md' +- 'Accounttax': + - 'Custombatch': 'accounttax_custombatch.md' + - 'Get': 'accounttax_get.md' + - 'List': 'accounttax_list.md' + - 'Update': 'accounttax_update.md' +- 'Datafeeds': + - 'Custombatch': 'datafeeds_custombatch.md' + - 'Delete': 'datafeeds_delete.md' + - 'Fetchnow': 'datafeeds_fetchnow.md' + - 'Get': 'datafeeds_get.md' + - 'Insert': 'datafeeds_insert.md' + - 'List': 'datafeeds_list.md' + - 'Update': 'datafeeds_update.md' +- 'Datafeedstatuses': + - 'Custombatch': 'datafeedstatuses_custombatch.md' + - 'Get': 'datafeedstatuses_get.md' + - 'List': 'datafeedstatuses_list.md' +- 'Inventory': + - 'Custombatch': 'inventory_custombatch.md' + - 'Set': 'inventory_set.md' +- 'Liasettings': + - 'Custombatch': 'liasettings_custombatch.md' + - 'Get': 'liasettings_get.md' + - 'Getaccessiblegmbaccounts': 'liasettings_getaccessiblegmbaccounts.md' + - 'List': 'liasettings_list.md' + - 'Listposdataproviders': 'liasettings_listposdataproviders.md' + - 'Requestgmbaccess': 'liasettings_requestgmbaccess.md' + - 'Requestinventoryverification': 'liasettings_requestinventoryverification.md' + - 'Setinventoryverificationcontact': 'liasettings_setinventoryverificationcontact.md' + - 'Setposdataprovider': 'liasettings_setposdataprovider.md' + - 'Update': 'liasettings_update.md' +- 'Orderinvoices': + - 'Createchargeinvoice': 'orderinvoices_createchargeinvoice.md' + - 'Createrefundinvoice': 'orderinvoices_createrefundinvoice.md' +- 'Orderreports': + - 'Listdisbursements': 'orderreports_listdisbursements.md' + - 'Listtransactions': 'orderreports_listtransactions.md' +- 'Orderreturns': + - 'Get': 'orderreturns_get.md' + - 'List': 'orderreturns_list.md' +- 'Orders': + - 'Acknowledge': 'orders_acknowledge.md' + - 'Advancetestorder': 'orders_advancetestorder.md' + - 'Cancel': 'orders_cancel.md' + - 'Cancellineitem': 'orders_cancellineitem.md' + - 'Canceltestorderbycustomer': 'orders_canceltestorderbycustomer.md' + - 'Createtestorder': 'orders_createtestorder.md' + - 'Createtestreturn': 'orders_createtestreturn.md' + - 'Custombatch': 'orders_custombatch.md' + - 'Get': 'orders_get.md' + - 'Getbymerchantorderid': 'orders_getbymerchantorderid.md' + - 'Gettestordertemplate': 'orders_gettestordertemplate.md' + - 'Instorerefundlineitem': 'orders_instorerefundlineitem.md' + - 'List': 'orders_list.md' + - 'Refund': 'orders_refund.md' + - 'Rejectreturnlineitem': 'orders_rejectreturnlineitem.md' + - 'Returnlineitem': 'orders_returnlineitem.md' + - 'Returnrefundlineitem': 'orders_returnrefundlineitem.md' + - 'Setlineitemmetadata': 'orders_setlineitemmetadata.md' + - 'Shiplineitems': 'orders_shiplineitems.md' + - 'Updatelineitemshippingdetails': 'orders_updatelineitemshippingdetails.md' + - 'Updatemerchantorderid': 'orders_updatemerchantorderid.md' + - 'Updateshipment': 'orders_updateshipment.md' +- 'Pos': + - 'Custombatch': 'pos_custombatch.md' + - 'Delete': 'pos_delete.md' + - 'Get': 'pos_get.md' + - 'Insert': 'pos_insert.md' + - 'Inventory': 'pos_inventory.md' + - 'List': 'pos_list.md' + - 'Sale': 'pos_sale.md' +- 'Products': + - 'Custombatch': 'products_custombatch.md' + - 'Delete': 'products_delete.md' + - 'Get': 'products_get.md' + - 'Insert': 'products_insert.md' + - 'List': 'products_list.md' +- 'Productstatuses': + - 'Custombatch': 'productstatuses_custombatch.md' + - 'Get': 'productstatuses_get.md' + - 'List': 'productstatuses_list.md' +- 'Shippingsettings': + - 'Custombatch': 'shippingsettings_custombatch.md' + - 'Get': 'shippingsettings_get.md' + - 'Getsupportedcarriers': 'shippingsettings_getsupportedcarriers.md' + - 'Getsupportedholidays': 'shippingsettings_getsupportedholidays.md' + - 'Getsupportedpickupservices': 'shippingsettings_getsupportedpickupservices.md' + - 'List': 'shippingsettings_list.md' + - 'Update': 'shippingsettings_update.md' theme: readthedocs diff --git a/gen/content2-cli/src/client.rs b/gen/content2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/content2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/content2-cli/src/main.rs b/gen/content2-cli/src/main.rs index 8315c0d7cd..9029555662 100644 --- a/gen/content2-cli/src/main.rs +++ b/gen/content2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_content2::{api, Error, oauth2}; +use google_content2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -110,7 +109,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "overwrite" => { - call = call.overwrite(arg_from_str(value.unwrap_or("false"), err, "overwrite", "boolean")); + call = call.overwrite( value.map(|v| arg_from_str(v, err, "overwrite", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -198,7 +197,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -254,10 +253,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -408,7 +407,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -554,7 +553,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -661,7 +660,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -860,7 +859,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "destinations" => { call = call.add_destinations(value.unwrap_or("")); @@ -951,7 +950,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1062,7 +1061,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1152,7 +1151,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1240,7 +1239,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1296,7 +1295,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1344,7 +1343,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1505,7 +1504,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1564,7 +1563,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1673,7 +1672,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1875,7 +1874,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1963,7 +1962,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2073,7 +2072,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2161,7 +2160,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2324,7 +2323,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2591,7 +2590,7 @@ where call = call.pos_external_account_id(value.unwrap_or("")); }, "pos-data-provider-id" => { - call = call.pos_data_provider_id(value.unwrap_or("")); + call = call.pos_data_provider_id( value.map(|v| arg_from_str(v, err, "pos-data-provider-id", "uint64")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2681,7 +2680,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2933,7 +2932,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "disbursement-start-date" => { call = call.disbursement_start_date(value.unwrap_or("")); @@ -3004,7 +3003,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -3118,7 +3117,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "created-start-date" => { call = call.created_start_date(value.unwrap_or("")); @@ -4128,10 +4127,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "acknowledged" => { - call = call.acknowledged(arg_from_str(value.unwrap_or("false"), err, "acknowledged", "boolean")); + call = call.acknowledged( value.map(|v| arg_from_str(v, err, "acknowledged", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5027,7 +5026,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5083,7 +5082,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5223,7 +5222,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5320,7 +5319,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5470,7 +5469,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5558,7 +5557,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5614,7 +5613,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5831,7 +5830,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5890,10 +5889,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-invalid-inserted-items" => { - call = call.include_invalid_inserted_items(arg_from_str(value.unwrap_or("false"), err, "include-invalid-inserted-items", "boolean")); + call = call.include_invalid_inserted_items( value.map(|v| arg_from_str(v, err, "include-invalid-inserted-items", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5981,7 +5980,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-attributes" => { - call = call.include_attributes(arg_from_str(value.unwrap_or("false"), err, "include-attributes", "boolean")); + call = call.include_attributes( value.map(|v| arg_from_str(v, err, "include-attributes", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6037,7 +6036,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-attributes" => { - call = call.include_attributes(arg_from_str(value.unwrap_or("false"), err, "include-attributes", "boolean")); + call = call.include_attributes( value.map(|v| arg_from_str(v, err, "include-attributes", "boolean")).unwrap_or(false)); }, "destinations" => { call = call.add_destinations(value.unwrap_or("")); @@ -6099,13 +6098,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-invalid-inserted-items" => { - call = call.include_invalid_inserted_items(arg_from_str(value.unwrap_or("false"), err, "include-invalid-inserted-items", "boolean")); + call = call.include_invalid_inserted_items( value.map(|v| arg_from_str(v, err, "include-invalid-inserted-items", "boolean")).unwrap_or(false)); }, "include-attributes" => { - call = call.include_attributes(arg_from_str(value.unwrap_or("false"), err, "include-attributes", "boolean")); + call = call.include_attributes( value.map(|v| arg_from_str(v, err, "include-attributes", "boolean")).unwrap_or(false)); }, "destinations" => { call = call.add_destinations(value.unwrap_or("")); @@ -6196,7 +6195,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6463,7 +6462,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -6552,7 +6551,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "dry-run" => { - call = call.dry_run(arg_from_str(value.unwrap_or("false"), err, "dry-run", "boolean")); + call = call.dry_run( value.map(|v| arg_from_str(v, err, "dry-run", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -9588,7 +9587,7 @@ async fn main() { let mut app = App::new("content2") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20220303") .about("Manage your product listings and accounts for Google Shopping") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_content2_cli") .arg(Arg::with_name("url") diff --git a/gen/content2/Cargo.toml b/gen/content2/Cargo.toml index 00d16c12c9..f692e53a4b 100644 --- a/gen/content2/Cargo.toml +++ b/gen/content2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-content2" -version = "5.0.2-beta-1+20220303" +version = "5.0.2+20220303" authors = ["Sebastian Thiel "] description = "A complete library to interact with Shopping Content (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/content2" homepage = "https://developers.google.com/shopping-content/v2/" -documentation = "https://docs.rs/google-content2/5.0.2-beta-1+20220303" +documentation = "https://docs.rs/google-content2/5.0.2+20220303" license = "MIT" keywords = ["content", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/content2/README.md b/gen/content2/README.md index 9b86f5bca8..2af524a27c 100644 --- a/gen/content2/README.md +++ b/gen/content2/README.md @@ -5,44 +5,44 @@ DO NOT EDIT ! --> The `google-content2` library allows access to all features of the *Google Shopping Content* service. -This documentation was generated from *Shopping Content* crate version *5.0.2-beta-1+20220303*, where *20220303* is the exact revision of the *content:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Shopping Content* crate version *5.0.2+20220303*, where *20220303* is the exact revision of the *content:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Shopping Content* *v2* API can be found at the [official documentation site](https://developers.google.com/shopping-content/v2/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/ShoppingContent) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-content2/5.0.2+20220303/google_content2/ShoppingContent) ... -* [accounts](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::Account) - * [*authinfo*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountAuthinfoCall), [*claimwebsite*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountClaimwebsiteCall), [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountDeleteCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountGetCall), [*insert*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountInsertCall), [*link*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountLinkCall), [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountListCall) and [*update*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountUpdateCall) +* [accounts](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::Account) + * [*authinfo*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountAuthinfoCall), [*claimwebsite*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountClaimwebsiteCall), [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountDeleteCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountGetCall), [*insert*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountInsertCall), [*link*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountLinkCall), [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountListCall) and [*update*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountUpdateCall) * accountstatuses - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountstatusCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountstatusGetCall) and [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccountstatusListCall) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountstatusCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountstatusGetCall) and [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccountstatusListCall) * accounttax - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccounttaxCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccounttaxGetCall), [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccounttaxListCall) and [*update*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::AccounttaxUpdateCall) -* [datafeeds](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::Datafeed) - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedDeleteCall), [*fetchnow*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedFetchnowCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedGetCall), [*insert*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedInsertCall), [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedListCall) and [*update*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedUpdateCall) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccounttaxCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccounttaxGetCall), [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccounttaxListCall) and [*update*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::AccounttaxUpdateCall) +* [datafeeds](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::Datafeed) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedDeleteCall), [*fetchnow*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedFetchnowCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedGetCall), [*insert*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedInsertCall), [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedListCall) and [*update*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedUpdateCall) * datafeedstatuses - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedstatusCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedstatusGetCall) and [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::DatafeedstatusListCall) -* [inventory](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::Inventory) - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::InventoryCustombatchCall) and [*set*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::InventorySetCall) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedstatusCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedstatusGetCall) and [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::DatafeedstatusListCall) +* [inventory](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::Inventory) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::InventoryCustombatchCall) and [*set*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::InventorySetCall) * liasettings - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingGetCall), [*getaccessiblegmbaccounts*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingGetaccessiblegmbaccountCall), [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingListCall), [*listposdataproviders*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingListposdataproviderCall), [*requestgmbaccess*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingRequestgmbaccesCall), [*requestinventoryverification*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingRequestinventoryverificationCall), [*setinventoryverificationcontact*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingSetinventoryverificationcontactCall), [*setposdataprovider*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingSetposdataproviderCall) and [*update*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::LiasettingUpdateCall) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingGetCall), [*getaccessiblegmbaccounts*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingGetaccessiblegmbaccountCall), [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingListCall), [*listposdataproviders*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingListposdataproviderCall), [*requestgmbaccess*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingRequestgmbaccesCall), [*requestinventoryverification*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingRequestinventoryverificationCall), [*setinventoryverificationcontact*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingSetinventoryverificationcontactCall), [*setposdataprovider*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingSetposdataproviderCall) and [*update*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::LiasettingUpdateCall) * orderinvoices - * [*createchargeinvoice*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderinvoiceCreatechargeinvoiceCall) and [*createrefundinvoice*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderinvoiceCreaterefundinvoiceCall) + * [*createchargeinvoice*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderinvoiceCreatechargeinvoiceCall) and [*createrefundinvoice*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderinvoiceCreaterefundinvoiceCall) * orderreports - * [*listdisbursements*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderreportListdisbursementCall) and [*listtransactions*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderreportListtransactionCall) + * [*listdisbursements*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderreportListdisbursementCall) and [*listtransactions*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderreportListtransactionCall) * orderreturns - * [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderreturnGetCall) and [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderreturnListCall) -* [orders](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::Order) - * [*acknowledge*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderAcknowledgeCall), [*advancetestorder*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderAdvancetestorderCall), [*cancel*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderCancelCall), [*cancellineitem*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderCancellineitemCall), [*canceltestorderbycustomer*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderCanceltestorderbycustomerCall), [*createtestorder*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderCreatetestorderCall), [*createtestreturn*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderCreatetestreturnCall), [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderGetCall), [*getbymerchantorderid*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderGetbymerchantorderidCall), [*gettestordertemplate*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderGettestordertemplateCall), [*instorerefundlineitem*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderInstorerefundlineitemCall), [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderListCall), [*refund*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderRefundCall), [*rejectreturnlineitem*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderRejectreturnlineitemCall), [*returnlineitem*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderReturnlineitemCall), [*returnrefundlineitem*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderReturnrefundlineitemCall), [*setlineitemmetadata*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderSetlineitemmetadataCall), [*shiplineitems*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderShiplineitemCall), [*updatelineitemshippingdetails*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderUpdatelineitemshippingdetailCall), [*updatemerchantorderid*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderUpdatemerchantorderidCall) and [*updateshipment*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::OrderUpdateshipmentCall) + * [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderreturnGetCall) and [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderreturnListCall) +* [orders](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::Order) + * [*acknowledge*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderAcknowledgeCall), [*advancetestorder*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderAdvancetestorderCall), [*cancel*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderCancelCall), [*cancellineitem*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderCancellineitemCall), [*canceltestorderbycustomer*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderCanceltestorderbycustomerCall), [*createtestorder*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderCreatetestorderCall), [*createtestreturn*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderCreatetestreturnCall), [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderGetCall), [*getbymerchantorderid*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderGetbymerchantorderidCall), [*gettestordertemplate*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderGettestordertemplateCall), [*instorerefundlineitem*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderInstorerefundlineitemCall), [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderListCall), [*refund*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderRefundCall), [*rejectreturnlineitem*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderRejectreturnlineitemCall), [*returnlineitem*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderReturnlineitemCall), [*returnrefundlineitem*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderReturnrefundlineitemCall), [*setlineitemmetadata*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderSetlineitemmetadataCall), [*shiplineitems*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderShiplineitemCall), [*updatelineitemshippingdetails*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderUpdatelineitemshippingdetailCall), [*updatemerchantorderid*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderUpdatemerchantorderidCall) and [*updateshipment*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::OrderUpdateshipmentCall) * pos - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::PoCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::PoDeleteCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::PoGetCall), [*insert*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::PoInsertCall), [*inventory*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::PoInventoryCall), [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::PoListCall) and [*sale*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::PoSaleCall) -* [products](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::Product) - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductDeleteCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductGetCall), [*insert*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductInsertCall) and [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductListCall) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::PoCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::PoDeleteCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::PoGetCall), [*insert*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::PoInsertCall), [*inventory*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::PoInventoryCall), [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::PoListCall) and [*sale*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::PoSaleCall) +* [products](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::Product) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductCustombatchCall), [*delete*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductDeleteCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductGetCall), [*insert*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductInsertCall) and [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductListCall) * productstatuses - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductstatusCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductstatusGetCall) and [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ProductstatusListCall) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductstatusCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductstatusGetCall) and [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ProductstatusListCall) * shippingsettings - * [*custombatch*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ShippingsettingCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ShippingsettingGetCall), [*getsupportedcarriers*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ShippingsettingGetsupportedcarrierCall), [*getsupportedholidays*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ShippingsettingGetsupportedholidayCall), [*getsupportedpickupservices*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ShippingsettingGetsupportedpickupserviceCall), [*list*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ShippingsettingListCall) and [*update*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/api::ShippingsettingUpdateCall) + * [*custombatch*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ShippingsettingCustombatchCall), [*get*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ShippingsettingGetCall), [*getsupportedcarriers*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ShippingsettingGetsupportedcarrierCall), [*getsupportedholidays*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ShippingsettingGetsupportedholidayCall), [*getsupportedpickupservices*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ShippingsettingGetsupportedpickupserviceCall), [*list*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ShippingsettingListCall) and [*update*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/api::ShippingsettingUpdateCall) @@ -51,17 +51,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/ShoppingContent)** +* **[Hub](https://docs.rs/google-content2/5.0.2+20220303/google_content2/ShoppingContent)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::CallBuilder) -* **[Resources](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::CallBuilder) +* **[Resources](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Part)** + * **[Parts](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -174,17 +174,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -194,29 +194,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Delegate) to the -[Method Builder](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Delegate) to the +[Method Builder](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::RequestValue) and -[decodable](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::RequestValue) and +[decodable](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-content2/5.0.2-beta-1+20220303/google_content2/client::RequestValue) are moved +* [request values](https://docs.rs/google-content2/5.0.2+20220303/google_content2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/content2/src/api.rs b/gen/content2/src/api.rs index 053df7ae1b..c786396198 100644 --- a/gen/content2/src/api.rs +++ b/gen/content2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> ShoppingContent { ShoppingContent { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://shoppingcontent.googleapis.com/content/v2/".to_string(), _root_url: "https://shoppingcontent.googleapis.com/".to_string(), } @@ -179,7 +179,7 @@ impl<'a, S> ShoppingContent { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/content2/src/client.rs b/gen/content2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/content2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/content2/src/lib.rs b/gen/content2/src/lib.rs index 4439797d52..513c5241cc 100644 --- a/gen/content2/src/lib.rs +++ b/gen/content2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Shopping Content* crate version *5.0.2-beta-1+20220303*, where *20220303* is the exact revision of the *content:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Shopping Content* crate version *5.0.2+20220303*, where *20220303* is the exact revision of the *content:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Shopping Content* *v2* API can be found at the //! [official documentation site](https://developers.google.com/shopping-content/v2/). diff --git a/gen/content2_sandbox-cli/Cargo.toml b/gen/content2_sandbox-cli/Cargo.toml index 4769def3fb..3354399748 100644 --- a/gen/content2_sandbox-cli/Cargo.toml +++ b/gen/content2_sandbox-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-content2_sandbox-cli" -version = "4.0.1+20181009" +version = "5.0.2+20181009" authors = ["Sebastian Thiel "] description = "A complete library to interact with Shopping Content (protocol v2sandbox)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/content2_sandbox-cli" @@ -20,13 +20,13 @@ name = "content2-sandbox" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-content2_sandbox] path = "../content2_sandbox" -version = "4.0.1+20181009" +version = "5.0.2+20181009" + diff --git a/gen/content2_sandbox-cli/README.md b/gen/content2_sandbox-cli/README.md index ad9ce1d313..ba5ebcd8b4 100644 --- a/gen/content2_sandbox-cli/README.md +++ b/gen/content2_sandbox-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Shopping Content* API at revision *20181009*. The CLI is at version *4.0.1*. +This documentation was generated from the *Shopping Content* API at revision *20181009*. The CLI is at version *5.0.2*. ```bash content2-sandbox [options] diff --git a/gen/content2_sandbox-cli/mkdocs.yml b/gen/content2_sandbox-cli/mkdocs.yml index c515e5394d..462c2a9ce3 100644 --- a/gen/content2_sandbox-cli/mkdocs.yml +++ b/gen/content2_sandbox-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Shopping Content v4.0.1+20181009 +site_name: Shopping Content v5.0.2+20181009 site_url: http://byron.github.io/google-apis-rs/google-content2_sandbox-cli site_description: A complete library to interact with Shopping Content (protocol v2sandbox) @@ -7,38 +7,42 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/content2_sandbox docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['orderinvoices_createchargeinvoice.md', 'Orderinvoices', 'Createchargeinvoice'] -- ['orderinvoices_createrefundinvoice.md', 'Orderinvoices', 'Createrefundinvoice'] -- ['orderpayments_notifyauthapproved.md', 'Orderpayments', 'Notifyauthapproved'] -- ['orderpayments_notifyauthdeclined.md', 'Orderpayments', 'Notifyauthdeclined'] -- ['orderpayments_notifycharge.md', 'Orderpayments', 'Notifycharge'] -- ['orderpayments_notifyrefund.md', 'Orderpayments', 'Notifyrefund'] -- ['orderreturns_get.md', 'Orderreturns', 'Get'] -- ['orderreturns_list.md', 'Orderreturns', 'List'] -- ['orders_acknowledge.md', 'Orders', 'Acknowledge'] -- ['orders_advancetestorder.md', 'Orders', 'Advancetestorder'] -- ['orders_cancel.md', 'Orders', 'Cancel'] -- ['orders_cancellineitem.md', 'Orders', 'Cancellineitem'] -- ['orders_canceltestorderbycustomer.md', 'Orders', 'Canceltestorderbycustomer'] -- ['orders_createtestorder.md', 'Orders', 'Createtestorder'] -- ['orders_createtestreturn.md', 'Orders', 'Createtestreturn'] -- ['orders_custombatch.md', 'Orders', 'Custombatch'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_getbymerchantorderid.md', 'Orders', 'Getbymerchantorderid'] -- ['orders_gettestordertemplate.md', 'Orders', 'Gettestordertemplate'] -- ['orders_instorerefundlineitem.md', 'Orders', 'Instorerefundlineitem'] -- ['orders_list.md', 'Orders', 'List'] -- ['orders_refund.md', 'Orders', 'Refund'] -- ['orders_rejectreturnlineitem.md', 'Orders', 'Rejectreturnlineitem'] -- ['orders_returnlineitem.md', 'Orders', 'Returnlineitem'] -- ['orders_returnrefundlineitem.md', 'Orders', 'Returnrefundlineitem'] -- ['orders_setlineitemmetadata.md', 'Orders', 'Setlineitemmetadata'] -- ['orders_shiplineitems.md', 'Orders', 'Shiplineitems'] -- ['orders_updatelineitemshippingdetails.md', 'Orders', 'Updatelineitemshippingdetails'] -- ['orders_updatemerchantorderid.md', 'Orders', 'Updatemerchantorderid'] -- ['orders_updateshipment.md', 'Orders', 'Updateshipment'] +nav: +- Home: 'index.md' +- 'Orderinvoices': + - 'Createchargeinvoice': 'orderinvoices_createchargeinvoice.md' + - 'Createrefundinvoice': 'orderinvoices_createrefundinvoice.md' +- 'Orderpayments': + - 'Notifyauthapproved': 'orderpayments_notifyauthapproved.md' + - 'Notifyauthdeclined': 'orderpayments_notifyauthdeclined.md' + - 'Notifycharge': 'orderpayments_notifycharge.md' + - 'Notifyrefund': 'orderpayments_notifyrefund.md' +- 'Orderreturns': + - 'Get': 'orderreturns_get.md' + - 'List': 'orderreturns_list.md' +- 'Orders': + - 'Acknowledge': 'orders_acknowledge.md' + - 'Advancetestorder': 'orders_advancetestorder.md' + - 'Cancel': 'orders_cancel.md' + - 'Cancellineitem': 'orders_cancellineitem.md' + - 'Canceltestorderbycustomer': 'orders_canceltestorderbycustomer.md' + - 'Createtestorder': 'orders_createtestorder.md' + - 'Createtestreturn': 'orders_createtestreturn.md' + - 'Custombatch': 'orders_custombatch.md' + - 'Get': 'orders_get.md' + - 'Getbymerchantorderid': 'orders_getbymerchantorderid.md' + - 'Gettestordertemplate': 'orders_gettestordertemplate.md' + - 'Instorerefundlineitem': 'orders_instorerefundlineitem.md' + - 'List': 'orders_list.md' + - 'Refund': 'orders_refund.md' + - 'Rejectreturnlineitem': 'orders_rejectreturnlineitem.md' + - 'Returnlineitem': 'orders_returnlineitem.md' + - 'Returnrefundlineitem': 'orders_returnrefundlineitem.md' + - 'Setlineitemmetadata': 'orders_setlineitemmetadata.md' + - 'Shiplineitems': 'orders_shiplineitems.md' + - 'Updatelineitemshippingdetails': 'orders_updatelineitemshippingdetails.md' + - 'Updatemerchantorderid': 'orders_updatemerchantorderid.md' + - 'Updateshipment': 'orders_updateshipment.md' theme: readthedocs diff --git a/gen/content2_sandbox-cli/src/client.rs b/gen/content2_sandbox-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/content2_sandbox-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/content2_sandbox-cli/src/main.rs b/gen/content2_sandbox-cli/src/main.rs index 5ef94c3f87..859383e701 100644 --- a/gen/content2_sandbox-cli/src/main.rs +++ b/gen/content2_sandbox-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_content2_sandbox::{api, Error, oauth2}; +use google_content2_sandbox::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -656,7 +655,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "created-start-date" => { call = call.created_start_date(value.unwrap_or("")); @@ -1665,10 +1664,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "acknowledged" => { - call = call.acknowledged(arg_from_str(value.unwrap_or("false"), err, "acknowledged", "boolean")); + call = call.acknowledged( value.map(|v| arg_from_str(v, err, "acknowledged", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3686,7 +3685,7 @@ async fn main() { let mut app = App::new("content2-sandbox") .author("Sebastian Thiel ") - .version("4.0.1+20181009") + .version("5.0.2+20181009") .about("Manages product items, inventory, and Merchant Center accounts for Google Shopping.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_content2_sandbox_cli") .arg(Arg::with_name("url") diff --git a/gen/content2_sandbox/Cargo.toml b/gen/content2_sandbox/Cargo.toml index 57dcf77390..a8d8125d80 100644 --- a/gen/content2_sandbox/Cargo.toml +++ b/gen/content2_sandbox/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-content2_sandbox" -version = "5.0.2-beta-1+20181009" +version = "5.0.2+20181009" authors = ["Sebastian Thiel "] description = "A complete library to interact with Shopping Content (protocol v2sandbox)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/content2_sandbox" homepage = "https://developers.google.com/shopping-content" -documentation = "https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009" +documentation = "https://docs.rs/google-content2_sandbox/5.0.2+20181009" license = "MIT" keywords = ["content", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/content2_sandbox/README.md b/gen/content2_sandbox/README.md index c1eaebdc58..e14f723acf 100644 --- a/gen/content2_sandbox/README.md +++ b/gen/content2_sandbox/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-content2_sandbox` library allows access to all features of the *Google Shopping Content* service. -This documentation was generated from *Shopping Content* crate version *5.0.2-beta-1+20181009*, where *20181009* is the exact revision of the *content:v2sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Shopping Content* crate version *5.0.2+20181009*, where *20181009* is the exact revision of the *content:v2sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Shopping Content* *v2_sandbox* API can be found at the [official documentation site](https://developers.google.com/shopping-content). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/ShoppingContent) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/ShoppingContent) ... * orderinvoices - * [*createchargeinvoice*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderinvoiceCreatechargeinvoiceCall) and [*createrefundinvoice*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderinvoiceCreaterefundinvoiceCall) + * [*createchargeinvoice*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderinvoiceCreatechargeinvoiceCall) and [*createrefundinvoice*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderinvoiceCreaterefundinvoiceCall) * orderpayments - * [*notifyauthapproved*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderpaymentNotifyauthapprovedCall), [*notifyauthdeclined*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderpaymentNotifyauthdeclinedCall), [*notifycharge*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderpaymentNotifychargeCall) and [*notifyrefund*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderpaymentNotifyrefundCall) + * [*notifyauthapproved*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderpaymentNotifyauthapprovedCall), [*notifyauthdeclined*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderpaymentNotifyauthdeclinedCall), [*notifycharge*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderpaymentNotifychargeCall) and [*notifyrefund*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderpaymentNotifyrefundCall) * orderreturns - * [*get*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderreturnGetCall) and [*list*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderreturnListCall) -* [orders](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::Order) - * [*acknowledge*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderAcknowledgeCall), [*advancetestorder*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderAdvancetestorderCall), [*cancel*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderCancelCall), [*cancellineitem*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderCancellineitemCall), [*canceltestorderbycustomer*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderCanceltestorderbycustomerCall), [*createtestorder*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderCreatetestorderCall), [*createtestreturn*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderCreatetestreturnCall), [*custombatch*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderCustombatchCall), [*get*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderGetCall), [*getbymerchantorderid*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderGetbymerchantorderidCall), [*gettestordertemplate*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderGettestordertemplateCall), [*instorerefundlineitem*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderInstorerefundlineitemCall), [*list*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderListCall), [*refund*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderRefundCall), [*rejectreturnlineitem*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderRejectreturnlineitemCall), [*returnlineitem*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderReturnlineitemCall), [*returnrefundlineitem*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderReturnrefundlineitemCall), [*setlineitemmetadata*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderSetlineitemmetadataCall), [*shiplineitems*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderShiplineitemCall), [*updatelineitemshippingdetails*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderUpdatelineitemshippingdetailCall), [*updatemerchantorderid*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderUpdatemerchantorderidCall) and [*updateshipment*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/api::OrderUpdateshipmentCall) + * [*get*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderreturnGetCall) and [*list*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderreturnListCall) +* [orders](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::Order) + * [*acknowledge*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderAcknowledgeCall), [*advancetestorder*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderAdvancetestorderCall), [*cancel*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderCancelCall), [*cancellineitem*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderCancellineitemCall), [*canceltestorderbycustomer*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderCanceltestorderbycustomerCall), [*createtestorder*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderCreatetestorderCall), [*createtestreturn*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderCreatetestreturnCall), [*custombatch*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderCustombatchCall), [*get*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderGetCall), [*getbymerchantorderid*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderGetbymerchantorderidCall), [*gettestordertemplate*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderGettestordertemplateCall), [*instorerefundlineitem*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderInstorerefundlineitemCall), [*list*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderListCall), [*refund*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderRefundCall), [*rejectreturnlineitem*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderRejectreturnlineitemCall), [*returnlineitem*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderReturnlineitemCall), [*returnrefundlineitem*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderReturnrefundlineitemCall), [*setlineitemmetadata*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderSetlineitemmetadataCall), [*shiplineitems*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderShiplineitemCall), [*updatelineitemshippingdetails*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderUpdatelineitemshippingdetailCall), [*updatemerchantorderid*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderUpdatemerchantorderidCall) and [*updateshipment*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/api::OrderUpdateshipmentCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/ShoppingContent)** +* **[Hub](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/ShoppingContent)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::CallBuilder) -* **[Resources](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::CallBuilder) +* **[Resources](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Part)** + * **[Parts](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::CallBuilder)** +* **[Activities](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -152,17 +152,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -172,29 +172,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Delegate) to the -[Method Builder](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Delegate) to the +[Method Builder](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::RequestValue) and -[decodable](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::RequestValue) and +[decodable](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-content2_sandbox/5.0.2-beta-1+20181009/google_content2_sandbox/client::RequestValue) are moved +* [request values](https://docs.rs/google-content2_sandbox/5.0.2+20181009/google_content2_sandbox/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/content2_sandbox/src/api.rs b/gen/content2_sandbox/src/api.rs index d84b30bf1d..18df31a6dc 100644 --- a/gen/content2_sandbox/src/api.rs +++ b/gen/content2_sandbox/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> ShoppingContent { ShoppingContent { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/content/v2sandbox/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -146,7 +146,7 @@ impl<'a, S> ShoppingContent { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/content2_sandbox/src/client.rs b/gen/content2_sandbox/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/content2_sandbox/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/content2_sandbox/src/lib.rs b/gen/content2_sandbox/src/lib.rs index c718deb785..9672e44499 100644 --- a/gen/content2_sandbox/src/lib.rs +++ b/gen/content2_sandbox/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Shopping Content* crate version *5.0.2-beta-1+20181009*, where *20181009* is the exact revision of the *content:v2sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Shopping Content* crate version *5.0.2+20181009*, where *20181009* is the exact revision of the *content:v2sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Shopping Content* *v2_sandbox* API can be found at the //! [official documentation site](https://developers.google.com/shopping-content). diff --git a/gen/coordinate1-cli/Cargo.toml b/gen/coordinate1-cli/Cargo.toml index 169a37f705..3e9e66a514 100644 --- a/gen/coordinate1-cli/Cargo.toml +++ b/gen/coordinate1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-coordinate1-cli" -version = "4.0.1+20150811" +version = "5.0.2+20150811" authors = ["Sebastian Thiel "] description = "A complete library to interact with coordinate (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/coordinate1-cli" @@ -20,13 +20,13 @@ name = "coordinate1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-coordinate1] path = "../coordinate1" -version = "4.0.1+20150811" +version = "5.0.2+20150811" + diff --git a/gen/coordinate1-cli/README.md b/gen/coordinate1-cli/README.md index 4c52e4b8d5..fa206351fa 100644 --- a/gen/coordinate1-cli/README.md +++ b/gen/coordinate1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *coordinate* API at revision *20150811*. The CLI is at version *4.0.1*. +This documentation was generated from the *coordinate* API at revision *20150811*. The CLI is at version *5.0.2*. ```bash coordinate1 [options] diff --git a/gen/coordinate1-cli/mkdocs.yml b/gen/coordinate1-cli/mkdocs.yml index ff617b9850..03138ee4cd 100644 --- a/gen/coordinate1-cli/mkdocs.yml +++ b/gen/coordinate1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: coordinate v4.0.1+20150811 +site_name: coordinate v5.0.2+20150811 site_url: http://byron.github.io/google-apis-rs/google-coordinate1-cli site_description: A complete library to interact with coordinate (protocol v1) @@ -7,20 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/coordinate1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['custom-field-def_list.md', 'Custom Field Def', 'List'] -- ['jobs_get.md', 'Jobs', 'Get'] -- ['jobs_insert.md', 'Jobs', 'Insert'] -- ['jobs_list.md', 'Jobs', 'List'] -- ['jobs_patch.md', 'Jobs', 'Patch'] -- ['jobs_update.md', 'Jobs', 'Update'] -- ['location_list.md', 'Location', 'List'] -- ['schedule_get.md', 'Schedule', 'Get'] -- ['schedule_patch.md', 'Schedule', 'Patch'] -- ['schedule_update.md', 'Schedule', 'Update'] -- ['team_list.md', 'Team', 'List'] -- ['worker_list.md', 'Worker', 'List'] +nav: +- Home: 'index.md' +- 'Custom Field Def': + - 'List': 'custom-field-def_list.md' +- 'Jobs': + - 'Get': 'jobs_get.md' + - 'Insert': 'jobs_insert.md' + - 'List': 'jobs_list.md' + - 'Patch': 'jobs_patch.md' + - 'Update': 'jobs_update.md' +- 'Location': + - 'List': 'location_list.md' +- 'Schedule': + - 'Get': 'schedule_get.md' + - 'Patch': 'schedule_patch.md' + - 'Update': 'schedule_update.md' +- 'Team': + - 'List': 'team_list.md' +- 'Worker': + - 'List': 'worker_list.md' theme: readthedocs diff --git a/gen/coordinate1-cli/src/client.rs b/gen/coordinate1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/coordinate1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/coordinate1-cli/src/main.rs b/gen/coordinate1-cli/src/main.rs index afa69c50b4..d05dfaba6d 100644 --- a/gen/coordinate1-cli/src/main.rs +++ b/gen/coordinate1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_coordinate1::{api, Error, oauth2}; +use google_coordinate1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -281,13 +280,13 @@ where call = call.page_token(value.unwrap_or("")); }, "omit-job-changes" => { - call = call.omit_job_changes(arg_from_str(value.unwrap_or("false"), err, "omit-job-changes", "boolean")); + call = call.omit_job_changes( value.map(|v| arg_from_str(v, err, "omit-job-changes", "boolean")).unwrap_or(false)); }, "min-modified-timestamp-ms" => { - call = call.min_modified_timestamp_ms(value.unwrap_or("")); + call = call.min_modified_timestamp_ms( value.map(|v| arg_from_str(v, err, "min-modified-timestamp-ms", "uint64")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -398,10 +397,10 @@ where call = call.note(value.unwrap_or("")); }, "lng" => { - call = call.lng(arg_from_str(value.unwrap_or("0.0"), err, "lng", "number")); + call = call.lng( value.map(|v| arg_from_str(v, err, "lng", "double")).unwrap_or(0.0)); }, "lat" => { - call = call.lat(arg_from_str(value.unwrap_or("0.0"), err, "lat", "number")); + call = call.lat( value.map(|v| arg_from_str(v, err, "lat", "double")).unwrap_or(0.0)); }, "customer-phone-number" => { call = call.customer_phone_number(value.unwrap_or("")); @@ -527,10 +526,10 @@ where call = call.note(value.unwrap_or("")); }, "lng" => { - call = call.lng(arg_from_str(value.unwrap_or("0.0"), err, "lng", "number")); + call = call.lng( value.map(|v| arg_from_str(v, err, "lng", "double")).unwrap_or(0.0)); }, "lat" => { - call = call.lat(arg_from_str(value.unwrap_or("0.0"), err, "lat", "number")); + call = call.lat( value.map(|v| arg_from_str(v, err, "lat", "double")).unwrap_or(0.0)); }, "customer-phone-number" => { call = call.customer_phone_number(value.unwrap_or("")); @@ -604,7 +603,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -749,16 +748,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "uint64")).unwrap_or(0)); }, "end-time" => { - call = call.end_time(value.unwrap_or("")); + call = call.end_time( value.map(|v| arg_from_str(v, err, "end-time", "uint64")).unwrap_or(0)); }, "duration" => { - call = call.duration(value.unwrap_or("")); + call = call.duration( value.map(|v| arg_from_str(v, err, "duration", "uint64")).unwrap_or(0)); }, "all-day" => { - call = call.all_day(arg_from_str(value.unwrap_or("false"), err, "all-day", "boolean")); + call = call.all_day( value.map(|v| arg_from_str(v, err, "all-day", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -851,16 +850,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "uint64")).unwrap_or(0)); }, "end-time" => { - call = call.end_time(value.unwrap_or("")); + call = call.end_time( value.map(|v| arg_from_str(v, err, "end-time", "uint64")).unwrap_or(0)); }, "duration" => { - call = call.duration(value.unwrap_or("")); + call = call.duration( value.map(|v| arg_from_str(v, err, "duration", "uint64")).unwrap_or(0)); }, "all-day" => { - call = call.all_day(arg_from_str(value.unwrap_or("false"), err, "all-day", "boolean")); + call = call.all_day( value.map(|v| arg_from_str(v, err, "all-day", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -916,13 +915,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "worker" => { - call = call.worker(arg_from_str(value.unwrap_or("false"), err, "worker", "boolean")); + call = call.worker( value.map(|v| arg_from_str(v, err, "worker", "boolean")).unwrap_or(false)); }, "dispatcher" => { - call = call.dispatcher(arg_from_str(value.unwrap_or("false"), err, "dispatcher", "boolean")); + call = call.dispatcher( value.map(|v| arg_from_str(v, err, "dispatcher", "boolean")).unwrap_or(false)); }, "admin" => { - call = call.admin(arg_from_str(value.unwrap_or("false"), err, "admin", "boolean")); + call = call.admin( value.map(|v| arg_from_str(v, err, "admin", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1564,7 +1563,7 @@ async fn main() { let mut app = App::new("coordinate1") .author("Sebastian Thiel ") - .version("4.0.1+20150811") + .version("5.0.2+20150811") .about("Lets you view and manage jobs in a Coordinate team.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_coordinate1_cli") .arg(Arg::with_name("url") diff --git a/gen/coordinate1/Cargo.toml b/gen/coordinate1/Cargo.toml index de3db67e65..b524707aad 100644 --- a/gen/coordinate1/Cargo.toml +++ b/gen/coordinate1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-coordinate1" -version = "5.0.2-beta-1+20150811" +version = "5.0.2+20150811" authors = ["Sebastian Thiel "] description = "A complete library to interact with coordinate (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/coordinate1" homepage = "https://developers.google.com/coordinate/" -documentation = "https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811" +documentation = "https://docs.rs/google-coordinate1/5.0.2+20150811" license = "MIT" keywords = ["coordinate", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/coordinate1/README.md b/gen/coordinate1/README.md index b5b8980adc..b24310c4fd 100644 --- a/gen/coordinate1/README.md +++ b/gen/coordinate1/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-coordinate1` library allows access to all features of the *Google coordinate* service. -This documentation was generated from *coordinate* crate version *5.0.2-beta-1+20150811*, where *20150811* is the exact revision of the *coordinate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *coordinate* crate version *5.0.2+20150811*, where *20150811* is the exact revision of the *coordinate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *coordinate* *v1* API can be found at the [official documentation site](https://developers.google.com/coordinate/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/Coordinate) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/Coordinate) ... -* [custom field def](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::CustomFieldDef) - * [*list*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::CustomFieldDefListCall) -* [jobs](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::Job) - * [*get*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::JobGetCall), [*insert*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::JobInsertCall), [*list*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::JobListCall), [*patch*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::JobPatchCall) and [*update*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::JobUpdateCall) -* [location](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::Location) - * [*list*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::LocationListCall) -* [schedule](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::Schedule) - * [*get*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::ScheduleGetCall), [*patch*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::SchedulePatchCall) and [*update*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::ScheduleUpdateCall) -* [team](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::Team) - * [*list*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::TeamListCall) -* [worker](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::Worker) - * [*list*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/api::WorkerListCall) +* [custom field def](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::CustomFieldDef) + * [*list*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::CustomFieldDefListCall) +* [jobs](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::Job) + * [*get*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::JobGetCall), [*insert*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::JobInsertCall), [*list*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::JobListCall), [*patch*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::JobPatchCall) and [*update*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::JobUpdateCall) +* [location](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::Location) + * [*list*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::LocationListCall) +* [schedule](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::Schedule) + * [*get*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::ScheduleGetCall), [*patch*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::SchedulePatchCall) and [*update*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::ScheduleUpdateCall) +* [team](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::Team) + * [*list*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::TeamListCall) +* [worker](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::Worker) + * [*list*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/api::WorkerListCall) @@ -33,17 +33,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/Coordinate)** +* **[Hub](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/Coordinate)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::CallBuilder) -* **[Resources](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::CallBuilder) +* **[Resources](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Part)** + * **[Parts](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -148,17 +148,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -168,29 +168,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Delegate) to the -[Method Builder](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Delegate) to the +[Method Builder](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::RequestValue) and -[decodable](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::RequestValue) and +[decodable](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-coordinate1/5.0.2-beta-1+20150811/google_coordinate1/client::RequestValue) are moved +* [request values](https://docs.rs/google-coordinate1/5.0.2+20150811/google_coordinate1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/coordinate1/src/api.rs b/gen/coordinate1/src/api.rs index 769ec8a661..5f8f75410b 100644 --- a/gen/coordinate1/src/api.rs +++ b/gen/coordinate1/src/api.rs @@ -139,7 +139,7 @@ impl<'a, S> Coordinate { Coordinate { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/coordinate/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -165,7 +165,7 @@ impl<'a, S> Coordinate { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/coordinate1/src/client.rs b/gen/coordinate1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/coordinate1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/coordinate1/src/lib.rs b/gen/coordinate1/src/lib.rs index c4ab58e619..ab491a144d 100644 --- a/gen/coordinate1/src/lib.rs +++ b/gen/coordinate1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *coordinate* crate version *5.0.2-beta-1+20150811*, where *20150811* is the exact revision of the *coordinate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *coordinate* crate version *5.0.2+20150811*, where *20150811* is the exact revision of the *coordinate:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *coordinate* *v1* API can be found at the //! [official documentation site](https://developers.google.com/coordinate/). diff --git a/gen/customsearch1-cli/Cargo.toml b/gen/customsearch1-cli/Cargo.toml index 301e544795..a7081ea68e 100644 --- a/gen/customsearch1-cli/Cargo.toml +++ b/gen/customsearch1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-customsearch1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with CustomSearch API (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/customsearch1-cli" @@ -20,13 +20,13 @@ name = "customsearch1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-customsearch1] path = "../customsearch1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/customsearch1-cli/README.md b/gen/customsearch1-cli/README.md index e295129c29..bdd5ffbd41 100644 --- a/gen/customsearch1-cli/README.md +++ b/gen/customsearch1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *CustomSearch API* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *CustomSearch API* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash customsearch1 [options] diff --git a/gen/customsearch1-cli/mkdocs.yml b/gen/customsearch1-cli/mkdocs.yml index f83a0ef3cc..de136fa309 100644 --- a/gen/customsearch1-cli/mkdocs.yml +++ b/gen/customsearch1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: CustomSearch API v4.0.1+20220305 +site_name: CustomSearch API v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-customsearch1-cli site_description: A complete library to interact with CustomSearch API (protocol v1) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/customsearch1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['cse_list.md', 'Cse', 'List'] -- ['cse_siterestrict-list.md', 'Cse', 'Siterestrict List'] +nav: +- Home: 'index.md' +- 'Cse': + - 'List': 'cse_list.md' + - 'Siterestrict List': 'cse_siterestrict-list.md' theme: readthedocs diff --git a/gen/customsearch1-cli/src/client.rs b/gen/customsearch1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/customsearch1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/customsearch1-cli/src/main.rs b/gen/customsearch1-cli/src/main.rs index fdc25dbd62..9192265359 100644 --- a/gen/customsearch1-cli/src/main.rs +++ b/gen/customsearch1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_customsearch1::{api, Error, oauth2}; +use google_customsearch1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start" => { - call = call.start(arg_from_str(value.unwrap_or("-0"), err, "start", "integer")); + call = call.start( value.map(|v| arg_from_str(v, err, "start", "uint32")).unwrap_or(0)); }, "sort" => { call = call.sort(value.unwrap_or("")); @@ -88,7 +87,7 @@ where call = call.or_terms(value.unwrap_or("")); }, "num" => { - call = call.num(arg_from_str(value.unwrap_or("-0"), err, "num", "integer")); + call = call.num( value.map(|v| arg_from_str(v, err, "num", "int32")).unwrap_or(-0)); }, "lr" => { call = call.lr(value.unwrap_or("")); @@ -201,7 +200,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start" => { - call = call.start(arg_from_str(value.unwrap_or("-0"), err, "start", "integer")); + call = call.start( value.map(|v| arg_from_str(v, err, "start", "uint32")).unwrap_or(0)); }, "sort" => { call = call.sort(value.unwrap_or("")); @@ -231,7 +230,7 @@ where call = call.or_terms(value.unwrap_or("")); }, "num" => { - call = call.num(arg_from_str(value.unwrap_or("-0"), err, "num", "integer")); + call = call.num( value.map(|v| arg_from_str(v, err, "num", "int32")).unwrap_or(-0)); }, "lr" => { call = call.lr(value.unwrap_or("")); @@ -468,7 +467,7 @@ async fn main() { let mut app = App::new("customsearch1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("Searches over a website or collection of websites") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_customsearch1_cli") .arg(Arg::with_name("folder") diff --git a/gen/customsearch1/Cargo.toml b/gen/customsearch1/Cargo.toml index f976d26124..fc5a24dd12 100644 --- a/gen/customsearch1/Cargo.toml +++ b/gen/customsearch1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-customsearch1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with CustomSearch API (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/customsearch1" homepage = "https://developers.google.com/custom-search/v1/introduction" -documentation = "https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-customsearch1/5.0.2+20230123" license = "MIT" keywords = ["customsearch", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/customsearch1/README.md b/gen/customsearch1/README.md index 90f7209ce7..f9d1396c44 100644 --- a/gen/customsearch1/README.md +++ b/gen/customsearch1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-customsearch1` library allows access to all features of the *Google CustomSearch API* service. -This documentation was generated from *CustomSearch API* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *customsearch:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *CustomSearch API* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *customsearch:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *CustomSearch API* *v1* API can be found at the [official documentation site](https://developers.google.com/custom-search/v1/introduction). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/CustomSearchAPI) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/CustomSearchAPI) ... * cse - * [*list*](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/api::CseListCall) and [*siterestrict list*](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/api::CseSiterestrictListCall) + * [*list*](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/api::CseListCall) and [*siterestrict list*](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/api::CseSiterestrictListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/CustomSearchAPI)** +* **[Hub](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/CustomSearchAPI)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::CallBuilder) -* **[Resources](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::CallBuilder) +* **[Resources](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Part)** + * **[Parts](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -150,17 +150,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -170,29 +170,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Delegate) to the -[Method Builder](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Delegate) to the +[Method Builder](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::RequestValue) and -[decodable](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::RequestValue) and +[decodable](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-customsearch1/5.0.2-beta-1+20230123/google_customsearch1/client::RequestValue) are moved +* [request values](https://docs.rs/google-customsearch1/5.0.2+20230123/google_customsearch1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/customsearch1/src/api.rs b/gen/customsearch1/src/api.rs index 11c6e8b4c8..9763bccea2 100644 --- a/gen/customsearch1/src/api.rs +++ b/gen/customsearch1/src/api.rs @@ -128,7 +128,7 @@ impl<'a, S> CustomSearchAPI { CustomSearchAPI { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://customsearch.googleapis.com/".to_string(), _root_url: "https://customsearch.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> CustomSearchAPI { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/customsearch1/src/client.rs b/gen/customsearch1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/customsearch1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/customsearch1/src/lib.rs b/gen/customsearch1/src/lib.rs index 9eafb52738..39df969ac5 100644 --- a/gen/customsearch1/src/lib.rs +++ b/gen/customsearch1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *CustomSearch API* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *customsearch:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *CustomSearch API* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *customsearch:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *CustomSearch API* *v1* API can be found at the //! [official documentation site](https://developers.google.com/custom-search/v1/introduction). diff --git a/gen/datacatalog1-cli/Cargo.toml b/gen/datacatalog1-cli/Cargo.toml index 4c754c7d7e..d4d17e26cc 100644 --- a/gen/datacatalog1-cli/Cargo.toml +++ b/gen/datacatalog1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datacatalog1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Catalog (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datacatalog1-cli" @@ -20,13 +20,13 @@ name = "datacatalog1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datacatalog1] path = "../datacatalog1" -version = "4.0.1+20220224" +version = "5.0.2+20230117" + diff --git a/gen/datacatalog1-cli/README.md b/gen/datacatalog1-cli/README.md index 654682bfca..dc0b9f7668 100644 --- a/gen/datacatalog1-cli/README.md +++ b/gen/datacatalog1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Data Catalog* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Data Catalog* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash datacatalog1 [options] @@ -40,6 +40,7 @@ datacatalog1 [options] locations-entry-groups-entries-delete [-p ]... [-o ] locations-entry-groups-entries-get [-p ]... [-o ] locations-entry-groups-entries-get-iam-policy (-r )... [-p ]... [-o ] + locations-entry-groups-entries-import (-r )... [-p ]... [-o ] locations-entry-groups-entries-list [-p ]... [-o ] locations-entry-groups-entries-modify-entry-contacts (-r )... [-p ]... [-o ] locations-entry-groups-entries-modify-entry-overview (-r )... [-p ]... [-o ] @@ -61,6 +62,10 @@ datacatalog1 [options] locations-entry-groups-tags-list [-p ]... [-o ] locations-entry-groups-tags-patch (-r )... [-p ]... [-o ] locations-entry-groups-test-iam-permissions (-r )... [-p ]... [-o ] + locations-operations-cancel [-p ]... [-o ] + locations-operations-delete [-p ]... [-o ] + locations-operations-get [-p ]... [-o ] + locations-operations-list [-p ]... [-o ] locations-tag-templates-create (-r )... [-p ]... [-o ] locations-tag-templates-delete [-p ]... [-o ] locations-tag-templates-fields-create (-r )... [-p ]... [-o ] diff --git a/gen/datacatalog1-cli/mkdocs.yml b/gen/datacatalog1-cli/mkdocs.yml index 4fe491218a..2978ba7c4f 100644 --- a/gen/datacatalog1-cli/mkdocs.yml +++ b/gen/datacatalog1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Data Catalog v4.0.1+20220224 +site_name: Data Catalog v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-datacatalog1-cli site_description: A complete library to interact with Data Catalog (protocol v1) @@ -7,68 +7,76 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datacatalog1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['catalog_search.md', 'Catalog', 'Search'] -- ['entries_lookup.md', 'Entries', 'Lookup'] -- ['projects_locations-entry-groups-create.md', 'Projects', 'Locations Entry Groups Create'] -- ['projects_locations-entry-groups-delete.md', 'Projects', 'Locations Entry Groups Delete'] -- ['projects_locations-entry-groups-entries-create.md', 'Projects', 'Locations Entry Groups Entries Create'] -- ['projects_locations-entry-groups-entries-delete.md', 'Projects', 'Locations Entry Groups Entries Delete'] -- ['projects_locations-entry-groups-entries-get.md', 'Projects', 'Locations Entry Groups Entries Get'] -- ['projects_locations-entry-groups-entries-get-iam-policy.md', 'Projects', 'Locations Entry Groups Entries Get Iam Policy'] -- ['projects_locations-entry-groups-entries-list.md', 'Projects', 'Locations Entry Groups Entries List'] -- ['projects_locations-entry-groups-entries-modify-entry-contacts.md', 'Projects', 'Locations Entry Groups Entries Modify Entry Contacts'] -- ['projects_locations-entry-groups-entries-modify-entry-overview.md', 'Projects', 'Locations Entry Groups Entries Modify Entry Overview'] -- ['projects_locations-entry-groups-entries-patch.md', 'Projects', 'Locations Entry Groups Entries Patch'] -- ['projects_locations-entry-groups-entries-star.md', 'Projects', 'Locations Entry Groups Entries Star'] -- ['projects_locations-entry-groups-entries-tags-create.md', 'Projects', 'Locations Entry Groups Entries Tags Create'] -- ['projects_locations-entry-groups-entries-tags-delete.md', 'Projects', 'Locations Entry Groups Entries Tags Delete'] -- ['projects_locations-entry-groups-entries-tags-list.md', 'Projects', 'Locations Entry Groups Entries Tags List'] -- ['projects_locations-entry-groups-entries-tags-patch.md', 'Projects', 'Locations Entry Groups Entries Tags Patch'] -- ['projects_locations-entry-groups-entries-test-iam-permissions.md', 'Projects', 'Locations Entry Groups Entries Test Iam Permissions'] -- ['projects_locations-entry-groups-entries-unstar.md', 'Projects', 'Locations Entry Groups Entries Unstar'] -- ['projects_locations-entry-groups-get.md', 'Projects', 'Locations Entry Groups Get'] -- ['projects_locations-entry-groups-get-iam-policy.md', 'Projects', 'Locations Entry Groups Get Iam Policy'] -- ['projects_locations-entry-groups-list.md', 'Projects', 'Locations Entry Groups List'] -- ['projects_locations-entry-groups-patch.md', 'Projects', 'Locations Entry Groups Patch'] -- ['projects_locations-entry-groups-set-iam-policy.md', 'Projects', 'Locations Entry Groups Set Iam Policy'] -- ['projects_locations-entry-groups-tags-create.md', 'Projects', 'Locations Entry Groups Tags Create'] -- ['projects_locations-entry-groups-tags-delete.md', 'Projects', 'Locations Entry Groups Tags Delete'] -- ['projects_locations-entry-groups-tags-list.md', 'Projects', 'Locations Entry Groups Tags List'] -- ['projects_locations-entry-groups-tags-patch.md', 'Projects', 'Locations Entry Groups Tags Patch'] -- ['projects_locations-entry-groups-test-iam-permissions.md', 'Projects', 'Locations Entry Groups Test Iam Permissions'] -- ['projects_locations-tag-templates-create.md', 'Projects', 'Locations Tag Templates Create'] -- ['projects_locations-tag-templates-delete.md', 'Projects', 'Locations Tag Templates Delete'] -- ['projects_locations-tag-templates-fields-create.md', 'Projects', 'Locations Tag Templates Fields Create'] -- ['projects_locations-tag-templates-fields-delete.md', 'Projects', 'Locations Tag Templates Fields Delete'] -- ['projects_locations-tag-templates-fields-enum-values-rename.md', 'Projects', 'Locations Tag Templates Fields Enum Values Rename'] -- ['projects_locations-tag-templates-fields-patch.md', 'Projects', 'Locations Tag Templates Fields Patch'] -- ['projects_locations-tag-templates-fields-rename.md', 'Projects', 'Locations Tag Templates Fields Rename'] -- ['projects_locations-tag-templates-get.md', 'Projects', 'Locations Tag Templates Get'] -- ['projects_locations-tag-templates-get-iam-policy.md', 'Projects', 'Locations Tag Templates Get Iam Policy'] -- ['projects_locations-tag-templates-patch.md', 'Projects', 'Locations Tag Templates Patch'] -- ['projects_locations-tag-templates-set-iam-policy.md', 'Projects', 'Locations Tag Templates Set Iam Policy'] -- ['projects_locations-tag-templates-test-iam-permissions.md', 'Projects', 'Locations Tag Templates Test Iam Permissions'] -- ['projects_locations-taxonomies-create.md', 'Projects', 'Locations Taxonomies Create'] -- ['projects_locations-taxonomies-delete.md', 'Projects', 'Locations Taxonomies Delete'] -- ['projects_locations-taxonomies-export.md', 'Projects', 'Locations Taxonomies Export'] -- ['projects_locations-taxonomies-get.md', 'Projects', 'Locations Taxonomies Get'] -- ['projects_locations-taxonomies-get-iam-policy.md', 'Projects', 'Locations Taxonomies Get Iam Policy'] -- ['projects_locations-taxonomies-import.md', 'Projects', 'Locations Taxonomies Import'] -- ['projects_locations-taxonomies-list.md', 'Projects', 'Locations Taxonomies List'] -- ['projects_locations-taxonomies-patch.md', 'Projects', 'Locations Taxonomies Patch'] -- ['projects_locations-taxonomies-policy-tags-create.md', 'Projects', 'Locations Taxonomies Policy Tags Create'] -- ['projects_locations-taxonomies-policy-tags-delete.md', 'Projects', 'Locations Taxonomies Policy Tags Delete'] -- ['projects_locations-taxonomies-policy-tags-get.md', 'Projects', 'Locations Taxonomies Policy Tags Get'] -- ['projects_locations-taxonomies-policy-tags-get-iam-policy.md', 'Projects', 'Locations Taxonomies Policy Tags Get Iam Policy'] -- ['projects_locations-taxonomies-policy-tags-list.md', 'Projects', 'Locations Taxonomies Policy Tags List'] -- ['projects_locations-taxonomies-policy-tags-patch.md', 'Projects', 'Locations Taxonomies Policy Tags Patch'] -- ['projects_locations-taxonomies-policy-tags-set-iam-policy.md', 'Projects', 'Locations Taxonomies Policy Tags Set Iam Policy'] -- ['projects_locations-taxonomies-policy-tags-test-iam-permissions.md', 'Projects', 'Locations Taxonomies Policy Tags Test Iam Permissions'] -- ['projects_locations-taxonomies-replace.md', 'Projects', 'Locations Taxonomies Replace'] -- ['projects_locations-taxonomies-set-iam-policy.md', 'Projects', 'Locations Taxonomies Set Iam Policy'] -- ['projects_locations-taxonomies-test-iam-permissions.md', 'Projects', 'Locations Taxonomies Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Catalog': + - 'Search': 'catalog_search.md' +- 'Entries': + - 'Lookup': 'entries_lookup.md' +- 'Projects': + - 'Locations Entry Groups Create': 'projects_locations-entry-groups-create.md' + - 'Locations Entry Groups Delete': 'projects_locations-entry-groups-delete.md' + - 'Locations Entry Groups Entries Create': 'projects_locations-entry-groups-entries-create.md' + - 'Locations Entry Groups Entries Delete': 'projects_locations-entry-groups-entries-delete.md' + - 'Locations Entry Groups Entries Get': 'projects_locations-entry-groups-entries-get.md' + - 'Locations Entry Groups Entries Get Iam Policy': 'projects_locations-entry-groups-entries-get-iam-policy.md' + - 'Locations Entry Groups Entries Import': 'projects_locations-entry-groups-entries-import.md' + - 'Locations Entry Groups Entries List': 'projects_locations-entry-groups-entries-list.md' + - 'Locations Entry Groups Entries Modify Entry Contacts': 'projects_locations-entry-groups-entries-modify-entry-contacts.md' + - 'Locations Entry Groups Entries Modify Entry Overview': 'projects_locations-entry-groups-entries-modify-entry-overview.md' + - 'Locations Entry Groups Entries Patch': 'projects_locations-entry-groups-entries-patch.md' + - 'Locations Entry Groups Entries Star': 'projects_locations-entry-groups-entries-star.md' + - 'Locations Entry Groups Entries Tags Create': 'projects_locations-entry-groups-entries-tags-create.md' + - 'Locations Entry Groups Entries Tags Delete': 'projects_locations-entry-groups-entries-tags-delete.md' + - 'Locations Entry Groups Entries Tags List': 'projects_locations-entry-groups-entries-tags-list.md' + - 'Locations Entry Groups Entries Tags Patch': 'projects_locations-entry-groups-entries-tags-patch.md' + - 'Locations Entry Groups Entries Test Iam Permissions': 'projects_locations-entry-groups-entries-test-iam-permissions.md' + - 'Locations Entry Groups Entries Unstar': 'projects_locations-entry-groups-entries-unstar.md' + - 'Locations Entry Groups Get': 'projects_locations-entry-groups-get.md' + - 'Locations Entry Groups Get Iam Policy': 'projects_locations-entry-groups-get-iam-policy.md' + - 'Locations Entry Groups List': 'projects_locations-entry-groups-list.md' + - 'Locations Entry Groups Patch': 'projects_locations-entry-groups-patch.md' + - 'Locations Entry Groups Set Iam Policy': 'projects_locations-entry-groups-set-iam-policy.md' + - 'Locations Entry Groups Tags Create': 'projects_locations-entry-groups-tags-create.md' + - 'Locations Entry Groups Tags Delete': 'projects_locations-entry-groups-tags-delete.md' + - 'Locations Entry Groups Tags List': 'projects_locations-entry-groups-tags-list.md' + - 'Locations Entry Groups Tags Patch': 'projects_locations-entry-groups-tags-patch.md' + - 'Locations Entry Groups Test Iam Permissions': 'projects_locations-entry-groups-test-iam-permissions.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Tag Templates Create': 'projects_locations-tag-templates-create.md' + - 'Locations Tag Templates Delete': 'projects_locations-tag-templates-delete.md' + - 'Locations Tag Templates Fields Create': 'projects_locations-tag-templates-fields-create.md' + - 'Locations Tag Templates Fields Delete': 'projects_locations-tag-templates-fields-delete.md' + - 'Locations Tag Templates Fields Enum Values Rename': 'projects_locations-tag-templates-fields-enum-values-rename.md' + - 'Locations Tag Templates Fields Patch': 'projects_locations-tag-templates-fields-patch.md' + - 'Locations Tag Templates Fields Rename': 'projects_locations-tag-templates-fields-rename.md' + - 'Locations Tag Templates Get': 'projects_locations-tag-templates-get.md' + - 'Locations Tag Templates Get Iam Policy': 'projects_locations-tag-templates-get-iam-policy.md' + - 'Locations Tag Templates Patch': 'projects_locations-tag-templates-patch.md' + - 'Locations Tag Templates Set Iam Policy': 'projects_locations-tag-templates-set-iam-policy.md' + - 'Locations Tag Templates Test Iam Permissions': 'projects_locations-tag-templates-test-iam-permissions.md' + - 'Locations Taxonomies Create': 'projects_locations-taxonomies-create.md' + - 'Locations Taxonomies Delete': 'projects_locations-taxonomies-delete.md' + - 'Locations Taxonomies Export': 'projects_locations-taxonomies-export.md' + - 'Locations Taxonomies Get': 'projects_locations-taxonomies-get.md' + - 'Locations Taxonomies Get Iam Policy': 'projects_locations-taxonomies-get-iam-policy.md' + - 'Locations Taxonomies Import': 'projects_locations-taxonomies-import.md' + - 'Locations Taxonomies List': 'projects_locations-taxonomies-list.md' + - 'Locations Taxonomies Patch': 'projects_locations-taxonomies-patch.md' + - 'Locations Taxonomies Policy Tags Create': 'projects_locations-taxonomies-policy-tags-create.md' + - 'Locations Taxonomies Policy Tags Delete': 'projects_locations-taxonomies-policy-tags-delete.md' + - 'Locations Taxonomies Policy Tags Get': 'projects_locations-taxonomies-policy-tags-get.md' + - 'Locations Taxonomies Policy Tags Get Iam Policy': 'projects_locations-taxonomies-policy-tags-get-iam-policy.md' + - 'Locations Taxonomies Policy Tags List': 'projects_locations-taxonomies-policy-tags-list.md' + - 'Locations Taxonomies Policy Tags Patch': 'projects_locations-taxonomies-policy-tags-patch.md' + - 'Locations Taxonomies Policy Tags Set Iam Policy': 'projects_locations-taxonomies-policy-tags-set-iam-policy.md' + - 'Locations Taxonomies Policy Tags Test Iam Permissions': 'projects_locations-taxonomies-policy-tags-test-iam-permissions.md' + - 'Locations Taxonomies Replace': 'projects_locations-taxonomies-replace.md' + - 'Locations Taxonomies Set Iam Policy': 'projects_locations-taxonomies-set-iam-policy.md' + - 'Locations Taxonomies Test Iam Permissions': 'projects_locations-taxonomies-test-iam-permissions.md' theme: readthedocs diff --git a/gen/datacatalog1-cli/src/client.rs b/gen/datacatalog1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datacatalog1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datacatalog1-cli/src/main.rs b/gen/datacatalog1-cli/src/main.rs index 365978f375..c4086c2b06 100644 --- a/gen/datacatalog1-cli/src/main.rs +++ b/gen/datacatalog1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datacatalog1::{api, Error, oauth2}; +use google_datacatalog1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -308,7 +307,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -398,6 +397,9 @@ where "data-source-connection-spec.bigquery-connection-spec.cloud-sql.type" => Some(("dataSourceConnectionSpec.bigqueryConnectionSpec.cloudSql.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "data-source-connection-spec.bigquery-connection-spec.connection-type" => Some(("dataSourceConnectionSpec.bigqueryConnectionSpec.connectionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "data-source-connection-spec.bigquery-connection-spec.has-credential" => Some(("dataSourceConnectionSpec.bigqueryConnectionSpec.hasCredential", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "database-table-spec.database-view-spec.base-table" => Some(("databaseTableSpec.databaseViewSpec.baseTable", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database-table-spec.database-view-spec.sql-query" => Some(("databaseTableSpec.databaseViewSpec.sqlQuery", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database-table-spec.database-view-spec.view-type" => Some(("databaseTableSpec.databaseViewSpec.viewType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "database-table-spec.dataplex-table.dataplex-spec.asset" => Some(("databaseTableSpec.dataplexTable.dataplexSpec.asset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "database-table-spec.dataplex-table.dataplex-spec.compression-format" => Some(("databaseTableSpec.dataplexTable.dataplexSpec.compressionFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "database-table-spec.dataplex-table.dataplex-spec.data-format.avro.text" => Some(("databaseTableSpec.dataplexTable.dataplexSpec.dataFormat.avro.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -419,6 +421,12 @@ where "integrated-system" => Some(("integratedSystem", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "linked-resource" => Some(("linkedResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-instance-display-name" => Some(("lookerSystemSpec.parentInstanceDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-instance-id" => Some(("lookerSystemSpec.parentInstanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-model-display-name" => Some(("lookerSystemSpec.parentModelDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-model-id" => Some(("lookerSystemSpec.parentModelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-view-display-name" => Some(("lookerSystemSpec.parentViewDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-view-id" => Some(("lookerSystemSpec.parentViewId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "personal-details.star-time" => Some(("personalDetails.starTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "personal-details.starred" => Some(("personalDetails.starred", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -430,12 +438,16 @@ where "source-system-timestamps.create-time" => Some(("sourceSystemTimestamps.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-system-timestamps.expire-time" => Some(("sourceSystemTimestamps.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-system-timestamps.update-time" => Some(("sourceSystemTimestamps.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-database-system-spec.database-version" => Some(("sqlDatabaseSystemSpec.databaseVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-database-system-spec.instance-host" => Some(("sqlDatabaseSystemSpec.instanceHost", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-database-system-spec.sql-engine" => Some(("sqlDatabaseSystemSpec.sqlEngine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "usage-signal.favorite-count" => Some(("usageSignal.favoriteCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "usage-signal.update-time" => Some(("usageSignal.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-specified-system" => Some(("userSpecifiedSystem", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-specified-type" => Some(("userSpecifiedType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["asset", "avro", "bigquery-connection-spec", "bigquery-date-sharded-spec", "bigquery-routine-spec", "bigquery-table-spec", "business-context", "cloud-sql", "compression-format", "connection-type", "create-time", "data-format", "data-source", "data-source-connection-spec", "database", "database-table-spec", "dataplex-fileset", "dataplex-spec", "dataplex-table", "dataset", "definition-body", "description", "display-name", "entry-overview", "expire-time", "file-pattern", "file-patterns", "file-type", "fileset-spec", "fully-qualified-name", "gcs-fileset-spec", "grouped-entry", "has-credential", "imported-libraries", "instance-id", "integrated-system", "labels", "language", "latest-shard-resource", "linked-resource", "name", "overview", "personal-details", "project-id", "protobuf", "resource", "return-type", "routine-spec", "routine-type", "service", "shard-count", "source-entry", "source-system-timestamps", "star-time", "starred", "storage-properties", "table-prefix", "table-source-type", "table-spec", "text", "thrift", "type", "update-time", "usage-signal", "user-managed", "user-specified-system", "user-specified-type", "view-query", "view-spec"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["asset", "avro", "base-table", "bigquery-connection-spec", "bigquery-date-sharded-spec", "bigquery-routine-spec", "bigquery-table-spec", "business-context", "cloud-sql", "compression-format", "connection-type", "create-time", "data-format", "data-source", "data-source-connection-spec", "database", "database-table-spec", "database-version", "database-view-spec", "dataplex-fileset", "dataplex-spec", "dataplex-table", "dataset", "definition-body", "description", "display-name", "entry-overview", "expire-time", "favorite-count", "file-pattern", "file-patterns", "file-type", "fileset-spec", "fully-qualified-name", "gcs-fileset-spec", "grouped-entry", "has-credential", "imported-libraries", "instance-host", "instance-id", "integrated-system", "labels", "language", "latest-shard-resource", "linked-resource", "looker-system-spec", "name", "overview", "parent-instance-display-name", "parent-instance-id", "parent-model-display-name", "parent-model-id", "parent-view-display-name", "parent-view-id", "personal-details", "project-id", "protobuf", "resource", "return-type", "routine-spec", "routine-type", "service", "shard-count", "source-entry", "source-system-timestamps", "sql-database-system-spec", "sql-engine", "sql-query", "star-time", "starred", "storage-properties", "table-prefix", "table-source-type", "table-spec", "text", "thrift", "type", "update-time", "usage-signal", "user-managed", "user-specified-system", "user-specified-type", "view-query", "view-spec", "view-type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -688,6 +700,91 @@ where } } + async fn _projects_locations_entry_groups_entries_import(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "gcs-bucket-path" => Some(("gcsBucketPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["gcs-bucket-path"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDatacatalogV1ImportEntriesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_entry_groups_entries_import(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_entry_groups_entries_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_entry_groups_entries_list(opt.value_of("parent").unwrap_or("")); @@ -695,13 +792,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -960,6 +1057,9 @@ where "data-source-connection-spec.bigquery-connection-spec.cloud-sql.type" => Some(("dataSourceConnectionSpec.bigqueryConnectionSpec.cloudSql.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "data-source-connection-spec.bigquery-connection-spec.connection-type" => Some(("dataSourceConnectionSpec.bigqueryConnectionSpec.connectionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "data-source-connection-spec.bigquery-connection-spec.has-credential" => Some(("dataSourceConnectionSpec.bigqueryConnectionSpec.hasCredential", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "database-table-spec.database-view-spec.base-table" => Some(("databaseTableSpec.databaseViewSpec.baseTable", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database-table-spec.database-view-spec.sql-query" => Some(("databaseTableSpec.databaseViewSpec.sqlQuery", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database-table-spec.database-view-spec.view-type" => Some(("databaseTableSpec.databaseViewSpec.viewType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "database-table-spec.dataplex-table.dataplex-spec.asset" => Some(("databaseTableSpec.dataplexTable.dataplexSpec.asset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "database-table-spec.dataplex-table.dataplex-spec.compression-format" => Some(("databaseTableSpec.dataplexTable.dataplexSpec.compressionFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "database-table-spec.dataplex-table.dataplex-spec.data-format.avro.text" => Some(("databaseTableSpec.dataplexTable.dataplexSpec.dataFormat.avro.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -981,6 +1081,12 @@ where "integrated-system" => Some(("integratedSystem", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "linked-resource" => Some(("linkedResource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-instance-display-name" => Some(("lookerSystemSpec.parentInstanceDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-instance-id" => Some(("lookerSystemSpec.parentInstanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-model-display-name" => Some(("lookerSystemSpec.parentModelDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-model-id" => Some(("lookerSystemSpec.parentModelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-view-display-name" => Some(("lookerSystemSpec.parentViewDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "looker-system-spec.parent-view-id" => Some(("lookerSystemSpec.parentViewId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "personal-details.star-time" => Some(("personalDetails.starTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "personal-details.starred" => Some(("personalDetails.starred", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -992,12 +1098,16 @@ where "source-system-timestamps.create-time" => Some(("sourceSystemTimestamps.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-system-timestamps.expire-time" => Some(("sourceSystemTimestamps.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-system-timestamps.update-time" => Some(("sourceSystemTimestamps.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-database-system-spec.database-version" => Some(("sqlDatabaseSystemSpec.databaseVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-database-system-spec.instance-host" => Some(("sqlDatabaseSystemSpec.instanceHost", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-database-system-spec.sql-engine" => Some(("sqlDatabaseSystemSpec.sqlEngine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "usage-signal.favorite-count" => Some(("usageSignal.favoriteCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "usage-signal.update-time" => Some(("usageSignal.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-specified-system" => Some(("userSpecifiedSystem", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-specified-type" => Some(("userSpecifiedType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["asset", "avro", "bigquery-connection-spec", "bigquery-date-sharded-spec", "bigquery-routine-spec", "bigquery-table-spec", "business-context", "cloud-sql", "compression-format", "connection-type", "create-time", "data-format", "data-source", "data-source-connection-spec", "database", "database-table-spec", "dataplex-fileset", "dataplex-spec", "dataplex-table", "dataset", "definition-body", "description", "display-name", "entry-overview", "expire-time", "file-pattern", "file-patterns", "file-type", "fileset-spec", "fully-qualified-name", "gcs-fileset-spec", "grouped-entry", "has-credential", "imported-libraries", "instance-id", "integrated-system", "labels", "language", "latest-shard-resource", "linked-resource", "name", "overview", "personal-details", "project-id", "protobuf", "resource", "return-type", "routine-spec", "routine-type", "service", "shard-count", "source-entry", "source-system-timestamps", "star-time", "starred", "storage-properties", "table-prefix", "table-source-type", "table-spec", "text", "thrift", "type", "update-time", "usage-signal", "user-managed", "user-specified-system", "user-specified-type", "view-query", "view-spec"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["asset", "avro", "base-table", "bigquery-connection-spec", "bigquery-date-sharded-spec", "bigquery-routine-spec", "bigquery-table-spec", "business-context", "cloud-sql", "compression-format", "connection-type", "create-time", "data-format", "data-source", "data-source-connection-spec", "database", "database-table-spec", "database-version", "database-view-spec", "dataplex-fileset", "dataplex-spec", "dataplex-table", "dataset", "definition-body", "description", "display-name", "entry-overview", "expire-time", "favorite-count", "file-pattern", "file-patterns", "file-type", "fileset-spec", "fully-qualified-name", "gcs-fileset-spec", "grouped-entry", "has-credential", "imported-libraries", "instance-host", "instance-id", "integrated-system", "labels", "language", "latest-shard-resource", "linked-resource", "looker-system-spec", "name", "overview", "parent-instance-display-name", "parent-instance-id", "parent-model-display-name", "parent-model-id", "parent-view-display-name", "parent-view-id", "personal-details", "project-id", "protobuf", "resource", "return-type", "routine-spec", "routine-type", "service", "shard-count", "source-entry", "source-system-timestamps", "sql-database-system-spec", "sql-engine", "sql-query", "star-time", "starred", "storage-properties", "table-prefix", "table-source-type", "table-spec", "text", "thrift", "type", "update-time", "usage-signal", "user-managed", "user-specified-system", "user-specified-type", "view-query", "view-spec", "view-type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1012,7 +1122,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1295,7 +1405,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1387,7 +1497,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1612,7 +1722,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1756,7 +1866,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1850,7 +1960,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2135,7 +2245,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2227,7 +2337,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2361,6 +2471,224 @@ where } } + async fn _projects_locations_operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_cancel(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_list(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_tag_templates_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2459,7 +2787,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2609,7 +2937,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2788,7 +3116,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3101,7 +3429,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3349,11 +3677,13 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "policy-tag-count" => Some(("policyTagCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "service.identity" => Some(("service.identity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service.name" => Some(("service.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.create-time" => Some(("taxonomyTimestamps.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.expire-time" => Some(("taxonomyTimestamps.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.update-time" => Some(("taxonomyTimestamps.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "name", "policy-tag-count", "taxonomy-timestamps", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "identity", "name", "policy-tag-count", "service", "taxonomy-timestamps", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3475,7 +3805,7 @@ where call = call.add_taxonomies(value.unwrap_or("")); }, "serialized-taxonomies" => { - call = call.serialized_taxonomies(arg_from_str(value.unwrap_or("false"), err, "serialized-taxonomies", "boolean")); + call = call.serialized_taxonomies( value.map(|v| arg_from_str(v, err, "serialized-taxonomies", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3756,7 +4086,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); }, _ => { let mut found = false; @@ -3771,7 +4104,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -3833,11 +4166,13 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "policy-tag-count" => Some(("policyTagCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "service.identity" => Some(("service.identity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service.name" => Some(("service.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.create-time" => Some(("taxonomyTimestamps.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.expire-time" => Some(("taxonomyTimestamps.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.update-time" => Some(("taxonomyTimestamps.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "name", "policy-tag-count", "taxonomy-timestamps", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "identity", "name", "policy-tag-count", "service", "taxonomy-timestamps", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3852,7 +4187,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4189,7 +4524,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4282,7 +4617,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4807,6 +5142,9 @@ where ("locations-entry-groups-entries-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_entry_groups_entries_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-entry-groups-entries-import", Some(opt)) => { + call_result = self._projects_locations_entry_groups_entries_import(opt, dry_run, &mut err).await; + }, ("locations-entry-groups-entries-list", Some(opt)) => { call_result = self._projects_locations_entry_groups_entries_list(opt, dry_run, &mut err).await; }, @@ -4870,6 +5208,18 @@ where ("locations-entry-groups-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_entry_groups_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-operations-cancel", Some(opt)) => { + call_result = self._projects_locations_operations_cancel(opt, dry_run, &mut err).await; + }, + ("locations-operations-delete", Some(opt)) => { + call_result = self._projects_locations_operations_delete(opt, dry_run, &mut err).await; + }, + ("locations-operations-get", Some(opt)) => { + call_result = self._projects_locations_operations_get(opt, dry_run, &mut err).await; + }, + ("locations-operations-list", Some(opt)) => { + call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; + }, ("locations-tag-templates-create", Some(opt)) => { call_result = self._projects_locations_tag_templates_create(opt, dry_run, &mut err).await; }, @@ -5086,7 +5436,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'locations-entry-groups-create', 'locations-entry-groups-delete', 'locations-entry-groups-entries-create', 'locations-entry-groups-entries-delete', 'locations-entry-groups-entries-get', 'locations-entry-groups-entries-get-iam-policy', 'locations-entry-groups-entries-list', 'locations-entry-groups-entries-modify-entry-contacts', 'locations-entry-groups-entries-modify-entry-overview', 'locations-entry-groups-entries-patch', 'locations-entry-groups-entries-star', 'locations-entry-groups-entries-tags-create', 'locations-entry-groups-entries-tags-delete', 'locations-entry-groups-entries-tags-list', 'locations-entry-groups-entries-tags-patch', 'locations-entry-groups-entries-test-iam-permissions', 'locations-entry-groups-entries-unstar', 'locations-entry-groups-get', 'locations-entry-groups-get-iam-policy', 'locations-entry-groups-list', 'locations-entry-groups-patch', 'locations-entry-groups-set-iam-policy', 'locations-entry-groups-tags-create', 'locations-entry-groups-tags-delete', 'locations-entry-groups-tags-list', 'locations-entry-groups-tags-patch', 'locations-entry-groups-test-iam-permissions', 'locations-tag-templates-create', 'locations-tag-templates-delete', 'locations-tag-templates-fields-create', 'locations-tag-templates-fields-delete', 'locations-tag-templates-fields-enum-values-rename', 'locations-tag-templates-fields-patch', 'locations-tag-templates-fields-rename', 'locations-tag-templates-get', 'locations-tag-templates-get-iam-policy', 'locations-tag-templates-patch', 'locations-tag-templates-set-iam-policy', 'locations-tag-templates-test-iam-permissions', 'locations-taxonomies-create', 'locations-taxonomies-delete', 'locations-taxonomies-export', 'locations-taxonomies-get', 'locations-taxonomies-get-iam-policy', 'locations-taxonomies-import', 'locations-taxonomies-list', 'locations-taxonomies-patch', 'locations-taxonomies-policy-tags-create', 'locations-taxonomies-policy-tags-delete', 'locations-taxonomies-policy-tags-get', 'locations-taxonomies-policy-tags-get-iam-policy', 'locations-taxonomies-policy-tags-list', 'locations-taxonomies-policy-tags-patch', 'locations-taxonomies-policy-tags-set-iam-policy', 'locations-taxonomies-policy-tags-test-iam-permissions', 'locations-taxonomies-replace', 'locations-taxonomies-set-iam-policy' and 'locations-taxonomies-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-entry-groups-create', 'locations-entry-groups-delete', 'locations-entry-groups-entries-create', 'locations-entry-groups-entries-delete', 'locations-entry-groups-entries-get', 'locations-entry-groups-entries-get-iam-policy', 'locations-entry-groups-entries-import', 'locations-entry-groups-entries-list', 'locations-entry-groups-entries-modify-entry-contacts', 'locations-entry-groups-entries-modify-entry-overview', 'locations-entry-groups-entries-patch', 'locations-entry-groups-entries-star', 'locations-entry-groups-entries-tags-create', 'locations-entry-groups-entries-tags-delete', 'locations-entry-groups-entries-tags-list', 'locations-entry-groups-entries-tags-patch', 'locations-entry-groups-entries-test-iam-permissions', 'locations-entry-groups-entries-unstar', 'locations-entry-groups-get', 'locations-entry-groups-get-iam-policy', 'locations-entry-groups-list', 'locations-entry-groups-patch', 'locations-entry-groups-set-iam-policy', 'locations-entry-groups-tags-create', 'locations-entry-groups-tags-delete', 'locations-entry-groups-tags-list', 'locations-entry-groups-tags-patch', 'locations-entry-groups-test-iam-permissions', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-tag-templates-create', 'locations-tag-templates-delete', 'locations-tag-templates-fields-create', 'locations-tag-templates-fields-delete', 'locations-tag-templates-fields-enum-values-rename', 'locations-tag-templates-fields-patch', 'locations-tag-templates-fields-rename', 'locations-tag-templates-get', 'locations-tag-templates-get-iam-policy', 'locations-tag-templates-patch', 'locations-tag-templates-set-iam-policy', 'locations-tag-templates-test-iam-permissions', 'locations-taxonomies-create', 'locations-taxonomies-delete', 'locations-taxonomies-export', 'locations-taxonomies-get', 'locations-taxonomies-get-iam-policy', 'locations-taxonomies-import', 'locations-taxonomies-list', 'locations-taxonomies-patch', 'locations-taxonomies-policy-tags-create', 'locations-taxonomies-policy-tags-delete', 'locations-taxonomies-policy-tags-get', 'locations-taxonomies-policy-tags-get-iam-policy', 'locations-taxonomies-policy-tags-list', 'locations-taxonomies-policy-tags-patch', 'locations-taxonomies-policy-tags-set-iam-policy', 'locations-taxonomies-policy-tags-test-iam-permissions', 'locations-taxonomies-replace', 'locations-taxonomies-set-iam-policy' and 'locations-taxonomies-test-iam-permissions'", vec![ ("locations-entry-groups-create", Some(r##"Creates an entry group. An entry group contains logically related entries together with [Cloud Identity and Access Management](/data-catalog/docs/concepts/iam) policies. These policies specify users who can create, edit, and view entries within entry groups. Data Catalog automatically creates entry groups with names that start with the `@` symbol for the following resources: * BigQuery entries (`@bigquery`) * Pub/Sub topics (`@pubsub`) * Dataproc Metastore services (`@dataproc_metastore_{SERVICE_NAME_HASH}`) You can create your own entry groups for Cloud Storage fileset entries and custom entries together with the corresponding IAM policies. User-created entry groups can't contain the `@` symbol, it is reserved for automatically created groups. Entry groups, like entries, can be searched. A maximum of 10,000 entry groups may be created per organization across all locations. You must enable the Data Catalog API in the project identified by the `parent` parameter. For more information, see [Data Catalog resource project](https://cloud.google.com/data-catalog/docs/concepts/resource-project)."##), "Details at http://byron.github.io/google-apis-rs/google_datacatalog1_cli/projects_locations-entry-groups-create", @@ -5215,7 +5565,35 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-entry-groups-entries-import", + Some(r##"Imports entries from some source (e.g. dump in a Cloud Storage bucket) to the Data Catalog. Dump here is a snapshot of the third-party system state, that needs to be ingested in the Data Catalog. Import of entries is a sync operation that reconciles state of the third-party system and Data Catalog. ImportEntries is a long-running operation done in the background, so this method returns long-running operation resource. The resource can be queried with Operations.GetOperation which contains metadata and response."##), + "Details at http://byron.github.io/google-apis-rs/google_datacatalog1_cli/projects_locations-entry-groups-entries-import", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Target entry group for ingested entries."##), Some(true), Some(false)), @@ -5477,7 +5855,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5555,7 +5933,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5633,7 +6011,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5761,7 +6139,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5777,6 +6155,94 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-cancel", + Some(r##"Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`."##), + "Details at http://byron.github.io/google-apis-rs/google_datacatalog1_cli/projects_locations-operations-cancel", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource to be cancelled."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-delete", + Some(r##"Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`."##), + "Details at http://byron.github.io/google-apis-rs/google_datacatalog1_cli/projects_locations-operations-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource to be deleted."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_datacatalog1_cli/projects_locations-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-list", + Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), + "Details at http://byron.github.io/google-apis-rs/google_datacatalog1_cli/projects_locations-operations-list", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation's parent resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5995,7 +6461,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6051,7 +6517,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6079,7 +6545,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6201,7 +6667,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6379,7 +6845,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6457,7 +6923,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6485,7 +6951,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6541,7 +7007,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6569,7 +7035,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6597,7 +7063,7 @@ async fn main() { let mut app = App::new("datacatalog1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230117") .about("A fully managed and highly scalable data discovery and metadata management service. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datacatalog1_cli") .arg(Arg::with_name("url") diff --git a/gen/datacatalog1/Cargo.toml b/gen/datacatalog1/Cargo.toml index 5bbf3548bf..59bfc6a27b 100644 --- a/gen/datacatalog1/Cargo.toml +++ b/gen/datacatalog1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datacatalog1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Catalog (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datacatalog1" homepage = "https://cloud.google.com/data-catalog/docs/" -documentation = "https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-datacatalog1/5.0.2+20230117" license = "MIT" keywords = ["datacatalog", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datacatalog1/README.md b/gen/datacatalog1/README.md index da462aabd7..d5a9ddb1dd 100644 --- a/gen/datacatalog1/README.md +++ b/gen/datacatalog1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-datacatalog1` library allows access to all features of the *Google Data Catalog* service. -This documentation was generated from *Data Catalog* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *datacatalog:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Data Catalog* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *datacatalog:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Data Catalog* *v1* API can be found at the [official documentation site](https://cloud.google.com/data-catalog/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/DataCatalog) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/DataCatalog) ... * catalog - * [*search*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::CatalogSearchCall) + * [*search*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::CatalogSearchCall) * entries - * [*lookup*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::EntryLookupCall) + * [*lookup*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::EntryLookupCall) * projects - * [*locations entry groups create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupCreateCall), [*locations entry groups delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupDeleteCall), [*locations entry groups entries create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryCreateCall), [*locations entry groups entries delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryDeleteCall), [*locations entry groups entries get*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryGetCall), [*locations entry groups entries get iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryGetIamPolicyCall), [*locations entry groups entries import*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryImportCall), [*locations entry groups entries list*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryListCall), [*locations entry groups entries modify entry contacts*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryModifyEntryContactCall), [*locations entry groups entries modify entry overview*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryModifyEntryOverviewCall), [*locations entry groups entries patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryPatchCall), [*locations entry groups entries star*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryStarCall), [*locations entry groups entries tags create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagCreateCall), [*locations entry groups entries tags delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagDeleteCall), [*locations entry groups entries tags list*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagListCall), [*locations entry groups entries tags patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagPatchCall), [*locations entry groups entries test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTestIamPermissionCall), [*locations entry groups entries unstar*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryUnstarCall), [*locations entry groups get*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupGetCall), [*locations entry groups get iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupGetIamPolicyCall), [*locations entry groups list*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupListCall), [*locations entry groups patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupPatchCall), [*locations entry groups set iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupSetIamPolicyCall), [*locations entry groups tags create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagCreateCall), [*locations entry groups tags delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagDeleteCall), [*locations entry groups tags list*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagListCall), [*locations entry groups tags patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagPatchCall), [*locations entry groups test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTestIamPermissionCall), [*locations operations cancel*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationOperationListCall), [*locations tag templates create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateCreateCall), [*locations tag templates delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateDeleteCall), [*locations tag templates fields create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldCreateCall), [*locations tag templates fields delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldDeleteCall), [*locations tag templates fields enum values rename*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldEnumValueRenameCall), [*locations tag templates fields patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldPatchCall), [*locations tag templates fields rename*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldRenameCall), [*locations tag templates get*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateGetCall), [*locations tag templates get iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateGetIamPolicyCall), [*locations tag templates patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplatePatchCall), [*locations tag templates set iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateSetIamPolicyCall), [*locations tag templates test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTagTemplateTestIamPermissionCall), [*locations taxonomies create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyCreateCall), [*locations taxonomies delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyDeleteCall), [*locations taxonomies export*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyExportCall), [*locations taxonomies get*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyGetCall), [*locations taxonomies get iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyGetIamPolicyCall), [*locations taxonomies import*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyImportCall), [*locations taxonomies list*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyListCall), [*locations taxonomies patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPatchCall), [*locations taxonomies policy tags create*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagCreateCall), [*locations taxonomies policy tags delete*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagDeleteCall), [*locations taxonomies policy tags get*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagGetCall), [*locations taxonomies policy tags get iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagGetIamPolicyCall), [*locations taxonomies policy tags list*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagListCall), [*locations taxonomies policy tags patch*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagPatchCall), [*locations taxonomies policy tags set iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagSetIamPolicyCall), [*locations taxonomies policy tags test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagTestIamPermissionCall), [*locations taxonomies replace*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyReplaceCall), [*locations taxonomies set iam policy*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomySetIamPolicyCall) and [*locations taxonomies test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/api::ProjectLocationTaxonomyTestIamPermissionCall) + * [*locations entry groups create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupCreateCall), [*locations entry groups delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupDeleteCall), [*locations entry groups entries create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryCreateCall), [*locations entry groups entries delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryDeleteCall), [*locations entry groups entries get*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryGetCall), [*locations entry groups entries get iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryGetIamPolicyCall), [*locations entry groups entries import*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryImportCall), [*locations entry groups entries list*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryListCall), [*locations entry groups entries modify entry contacts*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryModifyEntryContactCall), [*locations entry groups entries modify entry overview*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryModifyEntryOverviewCall), [*locations entry groups entries patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryPatchCall), [*locations entry groups entries star*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryStarCall), [*locations entry groups entries tags create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagCreateCall), [*locations entry groups entries tags delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagDeleteCall), [*locations entry groups entries tags list*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagListCall), [*locations entry groups entries tags patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTagPatchCall), [*locations entry groups entries test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryTestIamPermissionCall), [*locations entry groups entries unstar*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupEntryUnstarCall), [*locations entry groups get*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupGetCall), [*locations entry groups get iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupGetIamPolicyCall), [*locations entry groups list*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupListCall), [*locations entry groups patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupPatchCall), [*locations entry groups set iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupSetIamPolicyCall), [*locations entry groups tags create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagCreateCall), [*locations entry groups tags delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagDeleteCall), [*locations entry groups tags list*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagListCall), [*locations entry groups tags patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTagPatchCall), [*locations entry groups test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationEntryGroupTestIamPermissionCall), [*locations operations cancel*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationOperationListCall), [*locations tag templates create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateCreateCall), [*locations tag templates delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateDeleteCall), [*locations tag templates fields create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldCreateCall), [*locations tag templates fields delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldDeleteCall), [*locations tag templates fields enum values rename*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldEnumValueRenameCall), [*locations tag templates fields patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldPatchCall), [*locations tag templates fields rename*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateFieldRenameCall), [*locations tag templates get*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateGetCall), [*locations tag templates get iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateGetIamPolicyCall), [*locations tag templates patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplatePatchCall), [*locations tag templates set iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateSetIamPolicyCall), [*locations tag templates test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTagTemplateTestIamPermissionCall), [*locations taxonomies create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyCreateCall), [*locations taxonomies delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyDeleteCall), [*locations taxonomies export*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyExportCall), [*locations taxonomies get*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyGetCall), [*locations taxonomies get iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyGetIamPolicyCall), [*locations taxonomies import*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyImportCall), [*locations taxonomies list*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyListCall), [*locations taxonomies patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPatchCall), [*locations taxonomies policy tags create*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagCreateCall), [*locations taxonomies policy tags delete*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagDeleteCall), [*locations taxonomies policy tags get*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagGetCall), [*locations taxonomies policy tags get iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagGetIamPolicyCall), [*locations taxonomies policy tags list*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagListCall), [*locations taxonomies policy tags patch*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagPatchCall), [*locations taxonomies policy tags set iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagSetIamPolicyCall), [*locations taxonomies policy tags test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyPolicyTagTestIamPermissionCall), [*locations taxonomies replace*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyReplaceCall), [*locations taxonomies set iam policy*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomySetIamPolicyCall) and [*locations taxonomies test iam permissions*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/api::ProjectLocationTaxonomyTestIamPermissionCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/DataCatalog)** +* **[Hub](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/DataCatalog)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Part)** + * **[Parts](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::RequestValue) and -[decodable](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::RequestValue) and +[decodable](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datacatalog1/5.0.2-beta-1+20230117/google_datacatalog1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datacatalog1/5.0.2+20230117/google_datacatalog1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datacatalog1/src/api.rs b/gen/datacatalog1/src/api.rs index 2308cf6aa8..ddf254b09e 100644 --- a/gen/datacatalog1/src/api.rs +++ b/gen/datacatalog1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> DataCatalog { DataCatalog { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datacatalog.googleapis.com/".to_string(), _root_url: "https://datacatalog.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> DataCatalog { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datacatalog1/src/client.rs b/gen/datacatalog1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datacatalog1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datacatalog1/src/lib.rs b/gen/datacatalog1/src/lib.rs index 9f3d44bcfe..17fb0689f7 100644 --- a/gen/datacatalog1/src/lib.rs +++ b/gen/datacatalog1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Data Catalog* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *datacatalog:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Data Catalog* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *datacatalog:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Data Catalog* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/data-catalog/docs/). diff --git a/gen/datacatalog1_beta1-cli/Cargo.toml b/gen/datacatalog1_beta1-cli/Cargo.toml index 7978ed1222..0e83037ec2 100644 --- a/gen/datacatalog1_beta1-cli/Cargo.toml +++ b/gen/datacatalog1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datacatalog1_beta1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Catalog (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datacatalog1_beta1-cli" @@ -20,13 +20,13 @@ name = "datacatalog1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datacatalog1_beta1] path = "../datacatalog1_beta1" -version = "4.0.1+20220224" +version = "5.0.2+20230117" + diff --git a/gen/datacatalog1_beta1-cli/README.md b/gen/datacatalog1_beta1-cli/README.md index 56aaf946a9..2b0af226a7 100644 --- a/gen/datacatalog1_beta1-cli/README.md +++ b/gen/datacatalog1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Data Catalog* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Data Catalog* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash datacatalog1-beta1 [options] diff --git a/gen/datacatalog1_beta1-cli/mkdocs.yml b/gen/datacatalog1_beta1-cli/mkdocs.yml index 18a4d9babb..a78ee9536c 100644 --- a/gen/datacatalog1_beta1-cli/mkdocs.yml +++ b/gen/datacatalog1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Data Catalog v4.0.1+20220224 +site_name: Data Catalog v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-datacatalog1_beta1-cli site_description: A complete library to interact with Data Catalog (protocol v1beta1) @@ -7,63 +7,66 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datacatalog1_bet docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['catalog_search.md', 'Catalog', 'Search'] -- ['entries_lookup.md', 'Entries', 'Lookup'] -- ['projects_locations-entry-groups-create.md', 'Projects', 'Locations Entry Groups Create'] -- ['projects_locations-entry-groups-delete.md', 'Projects', 'Locations Entry Groups Delete'] -- ['projects_locations-entry-groups-entries-create.md', 'Projects', 'Locations Entry Groups Entries Create'] -- ['projects_locations-entry-groups-entries-delete.md', 'Projects', 'Locations Entry Groups Entries Delete'] -- ['projects_locations-entry-groups-entries-get.md', 'Projects', 'Locations Entry Groups Entries Get'] -- ['projects_locations-entry-groups-entries-get-iam-policy.md', 'Projects', 'Locations Entry Groups Entries Get Iam Policy'] -- ['projects_locations-entry-groups-entries-list.md', 'Projects', 'Locations Entry Groups Entries List'] -- ['projects_locations-entry-groups-entries-patch.md', 'Projects', 'Locations Entry Groups Entries Patch'] -- ['projects_locations-entry-groups-entries-tags-create.md', 'Projects', 'Locations Entry Groups Entries Tags Create'] -- ['projects_locations-entry-groups-entries-tags-delete.md', 'Projects', 'Locations Entry Groups Entries Tags Delete'] -- ['projects_locations-entry-groups-entries-tags-list.md', 'Projects', 'Locations Entry Groups Entries Tags List'] -- ['projects_locations-entry-groups-entries-tags-patch.md', 'Projects', 'Locations Entry Groups Entries Tags Patch'] -- ['projects_locations-entry-groups-entries-test-iam-permissions.md', 'Projects', 'Locations Entry Groups Entries Test Iam Permissions'] -- ['projects_locations-entry-groups-get.md', 'Projects', 'Locations Entry Groups Get'] -- ['projects_locations-entry-groups-get-iam-policy.md', 'Projects', 'Locations Entry Groups Get Iam Policy'] -- ['projects_locations-entry-groups-list.md', 'Projects', 'Locations Entry Groups List'] -- ['projects_locations-entry-groups-patch.md', 'Projects', 'Locations Entry Groups Patch'] -- ['projects_locations-entry-groups-set-iam-policy.md', 'Projects', 'Locations Entry Groups Set Iam Policy'] -- ['projects_locations-entry-groups-tags-create.md', 'Projects', 'Locations Entry Groups Tags Create'] -- ['projects_locations-entry-groups-tags-delete.md', 'Projects', 'Locations Entry Groups Tags Delete'] -- ['projects_locations-entry-groups-tags-list.md', 'Projects', 'Locations Entry Groups Tags List'] -- ['projects_locations-entry-groups-tags-patch.md', 'Projects', 'Locations Entry Groups Tags Patch'] -- ['projects_locations-entry-groups-test-iam-permissions.md', 'Projects', 'Locations Entry Groups Test Iam Permissions'] -- ['projects_locations-tag-templates-create.md', 'Projects', 'Locations Tag Templates Create'] -- ['projects_locations-tag-templates-delete.md', 'Projects', 'Locations Tag Templates Delete'] -- ['projects_locations-tag-templates-fields-create.md', 'Projects', 'Locations Tag Templates Fields Create'] -- ['projects_locations-tag-templates-fields-delete.md', 'Projects', 'Locations Tag Templates Fields Delete'] -- ['projects_locations-tag-templates-fields-enum-values-rename.md', 'Projects', 'Locations Tag Templates Fields Enum Values Rename'] -- ['projects_locations-tag-templates-fields-patch.md', 'Projects', 'Locations Tag Templates Fields Patch'] -- ['projects_locations-tag-templates-fields-rename.md', 'Projects', 'Locations Tag Templates Fields Rename'] -- ['projects_locations-tag-templates-get.md', 'Projects', 'Locations Tag Templates Get'] -- ['projects_locations-tag-templates-get-iam-policy.md', 'Projects', 'Locations Tag Templates Get Iam Policy'] -- ['projects_locations-tag-templates-patch.md', 'Projects', 'Locations Tag Templates Patch'] -- ['projects_locations-tag-templates-set-iam-policy.md', 'Projects', 'Locations Tag Templates Set Iam Policy'] -- ['projects_locations-tag-templates-test-iam-permissions.md', 'Projects', 'Locations Tag Templates Test Iam Permissions'] -- ['projects_locations-taxonomies-create.md', 'Projects', 'Locations Taxonomies Create'] -- ['projects_locations-taxonomies-delete.md', 'Projects', 'Locations Taxonomies Delete'] -- ['projects_locations-taxonomies-export.md', 'Projects', 'Locations Taxonomies Export'] -- ['projects_locations-taxonomies-get.md', 'Projects', 'Locations Taxonomies Get'] -- ['projects_locations-taxonomies-get-iam-policy.md', 'Projects', 'Locations Taxonomies Get Iam Policy'] -- ['projects_locations-taxonomies-import.md', 'Projects', 'Locations Taxonomies Import'] -- ['projects_locations-taxonomies-list.md', 'Projects', 'Locations Taxonomies List'] -- ['projects_locations-taxonomies-patch.md', 'Projects', 'Locations Taxonomies Patch'] -- ['projects_locations-taxonomies-policy-tags-create.md', 'Projects', 'Locations Taxonomies Policy Tags Create'] -- ['projects_locations-taxonomies-policy-tags-delete.md', 'Projects', 'Locations Taxonomies Policy Tags Delete'] -- ['projects_locations-taxonomies-policy-tags-get.md', 'Projects', 'Locations Taxonomies Policy Tags Get'] -- ['projects_locations-taxonomies-policy-tags-get-iam-policy.md', 'Projects', 'Locations Taxonomies Policy Tags Get Iam Policy'] -- ['projects_locations-taxonomies-policy-tags-list.md', 'Projects', 'Locations Taxonomies Policy Tags List'] -- ['projects_locations-taxonomies-policy-tags-patch.md', 'Projects', 'Locations Taxonomies Policy Tags Patch'] -- ['projects_locations-taxonomies-policy-tags-set-iam-policy.md', 'Projects', 'Locations Taxonomies Policy Tags Set Iam Policy'] -- ['projects_locations-taxonomies-policy-tags-test-iam-permissions.md', 'Projects', 'Locations Taxonomies Policy Tags Test Iam Permissions'] -- ['projects_locations-taxonomies-set-iam-policy.md', 'Projects', 'Locations Taxonomies Set Iam Policy'] -- ['projects_locations-taxonomies-test-iam-permissions.md', 'Projects', 'Locations Taxonomies Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Catalog': + - 'Search': 'catalog_search.md' +- 'Entries': + - 'Lookup': 'entries_lookup.md' +- 'Projects': + - 'Locations Entry Groups Create': 'projects_locations-entry-groups-create.md' + - 'Locations Entry Groups Delete': 'projects_locations-entry-groups-delete.md' + - 'Locations Entry Groups Entries Create': 'projects_locations-entry-groups-entries-create.md' + - 'Locations Entry Groups Entries Delete': 'projects_locations-entry-groups-entries-delete.md' + - 'Locations Entry Groups Entries Get': 'projects_locations-entry-groups-entries-get.md' + - 'Locations Entry Groups Entries Get Iam Policy': 'projects_locations-entry-groups-entries-get-iam-policy.md' + - 'Locations Entry Groups Entries List': 'projects_locations-entry-groups-entries-list.md' + - 'Locations Entry Groups Entries Patch': 'projects_locations-entry-groups-entries-patch.md' + - 'Locations Entry Groups Entries Tags Create': 'projects_locations-entry-groups-entries-tags-create.md' + - 'Locations Entry Groups Entries Tags Delete': 'projects_locations-entry-groups-entries-tags-delete.md' + - 'Locations Entry Groups Entries Tags List': 'projects_locations-entry-groups-entries-tags-list.md' + - 'Locations Entry Groups Entries Tags Patch': 'projects_locations-entry-groups-entries-tags-patch.md' + - 'Locations Entry Groups Entries Test Iam Permissions': 'projects_locations-entry-groups-entries-test-iam-permissions.md' + - 'Locations Entry Groups Get': 'projects_locations-entry-groups-get.md' + - 'Locations Entry Groups Get Iam Policy': 'projects_locations-entry-groups-get-iam-policy.md' + - 'Locations Entry Groups List': 'projects_locations-entry-groups-list.md' + - 'Locations Entry Groups Patch': 'projects_locations-entry-groups-patch.md' + - 'Locations Entry Groups Set Iam Policy': 'projects_locations-entry-groups-set-iam-policy.md' + - 'Locations Entry Groups Tags Create': 'projects_locations-entry-groups-tags-create.md' + - 'Locations Entry Groups Tags Delete': 'projects_locations-entry-groups-tags-delete.md' + - 'Locations Entry Groups Tags List': 'projects_locations-entry-groups-tags-list.md' + - 'Locations Entry Groups Tags Patch': 'projects_locations-entry-groups-tags-patch.md' + - 'Locations Entry Groups Test Iam Permissions': 'projects_locations-entry-groups-test-iam-permissions.md' + - 'Locations Tag Templates Create': 'projects_locations-tag-templates-create.md' + - 'Locations Tag Templates Delete': 'projects_locations-tag-templates-delete.md' + - 'Locations Tag Templates Fields Create': 'projects_locations-tag-templates-fields-create.md' + - 'Locations Tag Templates Fields Delete': 'projects_locations-tag-templates-fields-delete.md' + - 'Locations Tag Templates Fields Enum Values Rename': 'projects_locations-tag-templates-fields-enum-values-rename.md' + - 'Locations Tag Templates Fields Patch': 'projects_locations-tag-templates-fields-patch.md' + - 'Locations Tag Templates Fields Rename': 'projects_locations-tag-templates-fields-rename.md' + - 'Locations Tag Templates Get': 'projects_locations-tag-templates-get.md' + - 'Locations Tag Templates Get Iam Policy': 'projects_locations-tag-templates-get-iam-policy.md' + - 'Locations Tag Templates Patch': 'projects_locations-tag-templates-patch.md' + - 'Locations Tag Templates Set Iam Policy': 'projects_locations-tag-templates-set-iam-policy.md' + - 'Locations Tag Templates Test Iam Permissions': 'projects_locations-tag-templates-test-iam-permissions.md' + - 'Locations Taxonomies Create': 'projects_locations-taxonomies-create.md' + - 'Locations Taxonomies Delete': 'projects_locations-taxonomies-delete.md' + - 'Locations Taxonomies Export': 'projects_locations-taxonomies-export.md' + - 'Locations Taxonomies Get': 'projects_locations-taxonomies-get.md' + - 'Locations Taxonomies Get Iam Policy': 'projects_locations-taxonomies-get-iam-policy.md' + - 'Locations Taxonomies Import': 'projects_locations-taxonomies-import.md' + - 'Locations Taxonomies List': 'projects_locations-taxonomies-list.md' + - 'Locations Taxonomies Patch': 'projects_locations-taxonomies-patch.md' + - 'Locations Taxonomies Policy Tags Create': 'projects_locations-taxonomies-policy-tags-create.md' + - 'Locations Taxonomies Policy Tags Delete': 'projects_locations-taxonomies-policy-tags-delete.md' + - 'Locations Taxonomies Policy Tags Get': 'projects_locations-taxonomies-policy-tags-get.md' + - 'Locations Taxonomies Policy Tags Get Iam Policy': 'projects_locations-taxonomies-policy-tags-get-iam-policy.md' + - 'Locations Taxonomies Policy Tags List': 'projects_locations-taxonomies-policy-tags-list.md' + - 'Locations Taxonomies Policy Tags Patch': 'projects_locations-taxonomies-policy-tags-patch.md' + - 'Locations Taxonomies Policy Tags Set Iam Policy': 'projects_locations-taxonomies-policy-tags-set-iam-policy.md' + - 'Locations Taxonomies Policy Tags Test Iam Permissions': 'projects_locations-taxonomies-policy-tags-test-iam-permissions.md' + - 'Locations Taxonomies Set Iam Policy': 'projects_locations-taxonomies-set-iam-policy.md' + - 'Locations Taxonomies Test Iam Permissions': 'projects_locations-taxonomies-test-iam-permissions.md' theme: readthedocs diff --git a/gen/datacatalog1_beta1-cli/src/client.rs b/gen/datacatalog1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datacatalog1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datacatalog1_beta1-cli/src/main.rs b/gen/datacatalog1_beta1-cli/src/main.rs index e662cbcf14..94d7ea7e59 100644 --- a/gen/datacatalog1_beta1-cli/src/main.rs +++ b/gen/datacatalog1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datacatalog1_beta1::{api, Error, oauth2}; +use google_datacatalog1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -303,7 +302,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -655,13 +654,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -768,7 +767,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -967,7 +966,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1059,7 +1058,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1200,7 +1199,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1344,7 +1343,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1438,7 +1437,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1723,7 +1722,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1815,7 +1814,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2046,7 +2045,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2196,7 +2195,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2375,7 +2374,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2687,7 +2686,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2935,11 +2934,13 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "policy-tag-count" => Some(("policyTagCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "service.identity" => Some(("service.identity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service.name" => Some(("service.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.create-time" => Some(("taxonomyTimestamps.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.expire-time" => Some(("taxonomyTimestamps.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.update-time" => Some(("taxonomyTimestamps.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "name", "policy-tag-count", "taxonomy-timestamps", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "identity", "name", "policy-tag-count", "service", "taxonomy-timestamps", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3061,7 +3062,7 @@ where call = call.add_taxonomies(value.unwrap_or("")); }, "serialized-taxonomies" => { - call = call.serialized_taxonomies(arg_from_str(value.unwrap_or("false"), err, "serialized-taxonomies", "boolean")); + call = call.serialized_taxonomies( value.map(|v| arg_from_str(v, err, "serialized-taxonomies", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3341,7 +3342,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); }, _ => { let mut found = false; @@ -3356,7 +3360,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -3418,11 +3422,13 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "policy-tag-count" => Some(("policyTagCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "service.identity" => Some(("service.identity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service.name" => Some(("service.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.create-time" => Some(("taxonomyTimestamps.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.expire-time" => Some(("taxonomyTimestamps.expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "taxonomy-timestamps.update-time" => Some(("taxonomyTimestamps.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "name", "policy-tag-count", "taxonomy-timestamps", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activated-policy-types", "create-time", "description", "display-name", "expire-time", "identity", "name", "policy-tag-count", "service", "taxonomy-timestamps", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3437,7 +3443,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3774,7 +3780,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3867,7 +3873,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4698,7 +4704,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4876,7 +4882,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4926,7 +4932,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5004,7 +5010,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5132,7 +5138,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5366,7 +5372,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5422,7 +5428,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5450,7 +5456,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5572,7 +5578,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5750,7 +5756,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5828,7 +5834,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5856,7 +5862,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5884,7 +5890,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5912,7 +5918,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5940,7 +5946,7 @@ async fn main() { let mut app = App::new("datacatalog1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230117") .about("A fully managed and highly scalable data discovery and metadata management service. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datacatalog1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/datacatalog1_beta1/Cargo.toml b/gen/datacatalog1_beta1/Cargo.toml index eaeea47056..45b2ad97d5 100644 --- a/gen/datacatalog1_beta1/Cargo.toml +++ b/gen/datacatalog1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datacatalog1_beta1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Catalog (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datacatalog1_beta1" homepage = "https://cloud.google.com/data-catalog/docs/" -documentation = "https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117" license = "MIT" keywords = ["datacatalog", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datacatalog1_beta1/README.md b/gen/datacatalog1_beta1/README.md index 9abb7fa621..3e50f5e062 100644 --- a/gen/datacatalog1_beta1/README.md +++ b/gen/datacatalog1_beta1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-datacatalog1_beta1` library allows access to all features of the *Google Data Catalog* service. -This documentation was generated from *Data Catalog* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *datacatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Data Catalog* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *datacatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Data Catalog* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/data-catalog/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/DataCatalog) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/DataCatalog) ... * catalog - * [*search*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::CatalogSearchCall) + * [*search*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::CatalogSearchCall) * entries - * [*lookup*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::EntryLookupCall) + * [*lookup*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::EntryLookupCall) * projects - * [*locations entry groups create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupCreateCall), [*locations entry groups delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupDeleteCall), [*locations entry groups entries create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryCreateCall), [*locations entry groups entries delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryDeleteCall), [*locations entry groups entries get*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryGetCall), [*locations entry groups entries get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryGetIamPolicyCall), [*locations entry groups entries list*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryListCall), [*locations entry groups entries patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryPatchCall), [*locations entry groups entries tags create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagCreateCall), [*locations entry groups entries tags delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagDeleteCall), [*locations entry groups entries tags list*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagListCall), [*locations entry groups entries tags patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagPatchCall), [*locations entry groups entries test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTestIamPermissionCall), [*locations entry groups get*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupGetCall), [*locations entry groups get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupGetIamPolicyCall), [*locations entry groups list*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupListCall), [*locations entry groups patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupPatchCall), [*locations entry groups set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupSetIamPolicyCall), [*locations entry groups tags create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagCreateCall), [*locations entry groups tags delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagDeleteCall), [*locations entry groups tags list*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagListCall), [*locations entry groups tags patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagPatchCall), [*locations entry groups test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTestIamPermissionCall), [*locations tag templates create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateCreateCall), [*locations tag templates delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateDeleteCall), [*locations tag templates fields create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldCreateCall), [*locations tag templates fields delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldDeleteCall), [*locations tag templates fields enum values rename*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldEnumValueRenameCall), [*locations tag templates fields patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldPatchCall), [*locations tag templates fields rename*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldRenameCall), [*locations tag templates get*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateGetCall), [*locations tag templates get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateGetIamPolicyCall), [*locations tag templates patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplatePatchCall), [*locations tag templates set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateSetIamPolicyCall), [*locations tag templates test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateTestIamPermissionCall), [*locations taxonomies create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyCreateCall), [*locations taxonomies delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyDeleteCall), [*locations taxonomies export*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyExportCall), [*locations taxonomies get*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyGetCall), [*locations taxonomies get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyGetIamPolicyCall), [*locations taxonomies import*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyImportCall), [*locations taxonomies list*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyListCall), [*locations taxonomies patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPatchCall), [*locations taxonomies policy tags create*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagCreateCall), [*locations taxonomies policy tags delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagDeleteCall), [*locations taxonomies policy tags get*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagGetCall), [*locations taxonomies policy tags get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagGetIamPolicyCall), [*locations taxonomies policy tags list*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagListCall), [*locations taxonomies policy tags patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagPatchCall), [*locations taxonomies policy tags set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagSetIamPolicyCall), [*locations taxonomies policy tags test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagTestIamPermissionCall), [*locations taxonomies set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomySetIamPolicyCall) and [*locations taxonomies test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyTestIamPermissionCall) + * [*locations entry groups create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupCreateCall), [*locations entry groups delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupDeleteCall), [*locations entry groups entries create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryCreateCall), [*locations entry groups entries delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryDeleteCall), [*locations entry groups entries get*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryGetCall), [*locations entry groups entries get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryGetIamPolicyCall), [*locations entry groups entries list*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryListCall), [*locations entry groups entries patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryPatchCall), [*locations entry groups entries tags create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagCreateCall), [*locations entry groups entries tags delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagDeleteCall), [*locations entry groups entries tags list*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagListCall), [*locations entry groups entries tags patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTagPatchCall), [*locations entry groups entries test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupEntryTestIamPermissionCall), [*locations entry groups get*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupGetCall), [*locations entry groups get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupGetIamPolicyCall), [*locations entry groups list*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupListCall), [*locations entry groups patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupPatchCall), [*locations entry groups set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupSetIamPolicyCall), [*locations entry groups tags create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagCreateCall), [*locations entry groups tags delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagDeleteCall), [*locations entry groups tags list*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagListCall), [*locations entry groups tags patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTagPatchCall), [*locations entry groups test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationEntryGroupTestIamPermissionCall), [*locations tag templates create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateCreateCall), [*locations tag templates delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateDeleteCall), [*locations tag templates fields create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldCreateCall), [*locations tag templates fields delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldDeleteCall), [*locations tag templates fields enum values rename*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldEnumValueRenameCall), [*locations tag templates fields patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldPatchCall), [*locations tag templates fields rename*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateFieldRenameCall), [*locations tag templates get*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateGetCall), [*locations tag templates get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateGetIamPolicyCall), [*locations tag templates patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplatePatchCall), [*locations tag templates set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateSetIamPolicyCall), [*locations tag templates test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTagTemplateTestIamPermissionCall), [*locations taxonomies create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyCreateCall), [*locations taxonomies delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyDeleteCall), [*locations taxonomies export*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyExportCall), [*locations taxonomies get*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyGetCall), [*locations taxonomies get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyGetIamPolicyCall), [*locations taxonomies import*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyImportCall), [*locations taxonomies list*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyListCall), [*locations taxonomies patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPatchCall), [*locations taxonomies policy tags create*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagCreateCall), [*locations taxonomies policy tags delete*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagDeleteCall), [*locations taxonomies policy tags get*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagGetCall), [*locations taxonomies policy tags get iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagGetIamPolicyCall), [*locations taxonomies policy tags list*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagListCall), [*locations taxonomies policy tags patch*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagPatchCall), [*locations taxonomies policy tags set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagSetIamPolicyCall), [*locations taxonomies policy tags test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyPolicyTagTestIamPermissionCall), [*locations taxonomies set iam policy*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomySetIamPolicyCall) and [*locations taxonomies test iam permissions*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/api::ProjectLocationTaxonomyTestIamPermissionCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/DataCatalog)** +* **[Hub](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/DataCatalog)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -136,17 +136,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -156,29 +156,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datacatalog1_beta1/5.0.2-beta-1+20230117/google_datacatalog1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datacatalog1_beta1/5.0.2+20230117/google_datacatalog1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datacatalog1_beta1/src/api.rs b/gen/datacatalog1_beta1/src/api.rs index 37e2787927..b4ddb2d3e9 100644 --- a/gen/datacatalog1_beta1/src/api.rs +++ b/gen/datacatalog1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> DataCatalog { DataCatalog { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datacatalog.googleapis.com/".to_string(), _root_url: "https://datacatalog.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> DataCatalog { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datacatalog1_beta1/src/client.rs b/gen/datacatalog1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datacatalog1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datacatalog1_beta1/src/lib.rs b/gen/datacatalog1_beta1/src/lib.rs index 071e6bd81a..4f028e4558 100644 --- a/gen/datacatalog1_beta1/src/lib.rs +++ b/gen/datacatalog1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Data Catalog* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *datacatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Data Catalog* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *datacatalog:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Data Catalog* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/data-catalog/docs/). diff --git a/gen/datafusion1-cli/Cargo.toml b/gen/datafusion1-cli/Cargo.toml index 6ee8ea73ee..4413e9139b 100644 --- a/gen/datafusion1-cli/Cargo.toml +++ b/gen/datafusion1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datafusion1-cli" -version = "4.0.1+20211028" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Fusion (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datafusion1-cli" @@ -20,13 +20,13 @@ name = "datafusion1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datafusion1] path = "../datafusion1" -version = "4.0.1+20211028" +version = "5.0.2+20221213" + diff --git a/gen/datafusion1-cli/README.md b/gen/datafusion1-cli/README.md index d9439ec419..e29877ad9e 100644 --- a/gen/datafusion1-cli/README.md +++ b/gen/datafusion1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Data Fusion* API at revision *20211028*. The CLI is at version *4.0.1*. +This documentation was generated from the *Data Fusion* API at revision *20221213*. The CLI is at version *5.0.2*. ```bash datafusion1 [options] @@ -33,6 +33,9 @@ datafusion1 [options] locations-get [-p ]... [-o ] locations-instances-create (-r )... [-p ]... [-o ] locations-instances-delete [-p ]... [-o ] + locations-instances-dns-peerings-create (-r )... [-p ]... [-o ] + locations-instances-dns-peerings-delete [-p ]... [-o ] + locations-instances-dns-peerings-list [-p ]... [-o ] locations-instances-get [-p ]... [-o ] locations-instances-get-iam-policy [-p ]... [-o ] locations-instances-list [-p ]... [-o ] diff --git a/gen/datafusion1-cli/mkdocs.yml b/gen/datafusion1-cli/mkdocs.yml index 61dc092805..f18624724b 100644 --- a/gen/datafusion1-cli/mkdocs.yml +++ b/gen/datafusion1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Data Fusion v4.0.1+20211028 +site_name: Data Fusion v5.0.2+20221213 site_url: http://byron.github.io/google-apis-rs/google-datafusion1-cli site_description: A complete library to interact with Data Fusion (protocol v1) @@ -7,24 +7,28 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datafusion1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-get-iam-policy.md', 'Projects', 'Locations Instances Get Iam Policy'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-restart.md', 'Projects', 'Locations Instances Restart'] -- ['projects_locations-instances-set-iam-policy.md', 'Projects', 'Locations Instances Set Iam Policy'] -- ['projects_locations-instances-test-iam-permissions.md', 'Projects', 'Locations Instances Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-versions-list.md', 'Projects', 'Locations Versions List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Dns Peerings Create': 'projects_locations-instances-dns-peerings-create.md' + - 'Locations Instances Dns Peerings Delete': 'projects_locations-instances-dns-peerings-delete.md' + - 'Locations Instances Dns Peerings List': 'projects_locations-instances-dns-peerings-list.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances Get Iam Policy': 'projects_locations-instances-get-iam-policy.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Restart': 'projects_locations-instances-restart.md' + - 'Locations Instances Set Iam Policy': 'projects_locations-instances-set-iam-policy.md' + - 'Locations Instances Test Iam Permissions': 'projects_locations-instances-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Versions List': 'projects_locations-versions-list.md' theme: readthedocs diff --git a/gen/datafusion1-cli/src/client.rs b/gen/datafusion1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datafusion1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datafusion1-cli/src/main.rs b/gen/datafusion1-cli/src/main.rs index dba3481e1f..cbb7372fe5 100644 --- a/gen/datafusion1-cli/src/main.rs +++ b/gen/datafusion1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datafusion1::{api, Error, oauth2}; +use google_datafusion1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -136,6 +135,9 @@ where "enable-rbac" => Some(("enableRbac", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-logging" => Some(("enableStackdriverLogging", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-monitoring" => Some(("enableStackdriverMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "enable-zone-separation" => Some(("enableZoneSeparation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.enabled" => Some(("eventPublishConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.topic" => Some(("eventPublishConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gcs-bucket" => Some(("gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -154,7 +156,7 @@ where "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "type", "update-time", "version", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "enable-zone-separation", "enabled", "event-publish-config", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "topic", "type", "update-time", "version", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -270,6 +272,210 @@ where } } + async fn _projects_locations_instances_dns_peerings_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "domain" => Some(("domain", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-network" => Some(("targetNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-project" => Some(("targetProject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "domain", "name", "target-network", "target-project"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DnsPeering = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_dns_peerings_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "dns-peering-id" => { + call = call.dns_peering_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["dns-peering-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_dns_peerings_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_instances_dns_peerings_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_dns_peerings_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_instances_dns_peerings_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_instances_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_instances_get(opt.value_of("name").unwrap_or("")); @@ -329,7 +535,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -388,7 +594,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -476,6 +682,9 @@ where "enable-rbac" => Some(("enableRbac", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-logging" => Some(("enableStackdriverLogging", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-monitoring" => Some(("enableStackdriverMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "enable-zone-separation" => Some(("enableZoneSeparation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.enabled" => Some(("eventPublishConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.topic" => Some(("eventPublishConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gcs-bucket" => Some(("gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -494,7 +703,7 @@ where "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "type", "update-time", "version", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "enable-zone-separation", "enabled", "event-publish-config", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "topic", "type", "update-time", "version", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -509,7 +718,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -824,10 +1033,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "include-unrevealed-locations" => { - call = call.include_unrevealed_locations(arg_from_str(value.unwrap_or("false"), err, "include-unrevealed-locations", "boolean")); + call = call.include_unrevealed_locations( value.map(|v| arg_from_str(v, err, "include-unrevealed-locations", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1077,7 +1286,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1139,10 +1348,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "latest-patch-only" => { - call = call.latest_patch_only(arg_from_str(value.unwrap_or("false"), err, "latest-patch-only", "boolean")); + call = call.latest_patch_only( value.map(|v| arg_from_str(v, err, "latest-patch-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1207,6 +1416,15 @@ where ("locations-instances-delete", Some(opt)) => { call_result = self._projects_locations_instances_delete(opt, dry_run, &mut err).await; }, + ("locations-instances-dns-peerings-create", Some(opt)) => { + call_result = self._projects_locations_instances_dns_peerings_create(opt, dry_run, &mut err).await; + }, + ("locations-instances-dns-peerings-delete", Some(opt)) => { + call_result = self._projects_locations_instances_dns_peerings_delete(opt, dry_run, &mut err).await; + }, + ("locations-instances-dns-peerings-list", Some(opt)) => { + call_result = self._projects_locations_instances_dns_peerings_list(opt, dry_run, &mut err).await; + }, ("locations-instances-get", Some(opt)) => { call_result = self._projects_locations_instances_get(opt, dry_run, &mut err).await; }, @@ -1325,7 +1543,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-get-iam-policy', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-restart', 'locations-instances-set-iam-policy', 'locations-instances-test-iam-permissions', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list' and 'locations-versions-list'", vec![ + ("projects", "methods: 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-dns-peerings-create', 'locations-instances-dns-peerings-delete', 'locations-instances-dns-peerings-list', 'locations-instances-get', 'locations-instances-get-iam-policy', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-restart', 'locations-instances-set-iam-policy', 'locations-instances-test-iam-permissions', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list' and 'locations-versions-list'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_datafusion1_cli/projects_locations-get", @@ -1392,6 +1610,78 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-dns-peerings-create", + Some(r##"Creates DNS peering on the given resource."##), + "Details at http://byron.github.io/google-apis-rs/google_datafusion1_cli/projects_locations-instances-dns-peerings-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource on which DNS peering will be created."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-dns-peerings-delete", + Some(r##"Deletes DNS peering on the given resource."##), + "Details at http://byron.github.io/google-apis-rs/google_datafusion1_cli/projects_locations-instances-dns-peerings-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the DNS peering zone to delete. Format: projects/{project}/locations/{location}/instances/{instance}/dnsPeerings/{dns_peering}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-dns-peerings-list", + Some(r##"Lists DNS peerings for a given resource."##), + "Details at http://byron.github.io/google-apis-rs/google_datafusion1_cli/projects_locations-instances-dns-peerings-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent, which owns this collection of dns peerings. Format: projects/{project}/locations/{location}/instances/{instance}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1426,7 +1716,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1526,7 +1816,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1554,7 +1844,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1720,7 +2010,7 @@ async fn main() { let mut app = App::new("datafusion1") .author("Sebastian Thiel ") - .version("4.0.1+20211028") + .version("5.0.2+20221213") .about("Cloud Data Fusion is a fully-managed, cloud native, enterprise data integration service for quickly building and managing data pipelines. It provides a graphical interface to increase time efficiency and reduce complexity, and allows business users, developers, and data scientists to easily and reliably build scalable data integration solutions to cleanse, prepare, blend, transfer and transform data without having to wrestle with infrastructure.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datafusion1_cli") .arg(Arg::with_name("url") diff --git a/gen/datafusion1/Cargo.toml b/gen/datafusion1/Cargo.toml index 13e1c9c4a4..c3f52e9793 100644 --- a/gen/datafusion1/Cargo.toml +++ b/gen/datafusion1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datafusion1" -version = "5.0.2-beta-1+20221213" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Fusion (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datafusion1" homepage = "https://cloud.google.com/data-fusion/docs" -documentation = "https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213" +documentation = "https://docs.rs/google-datafusion1/5.0.2+20221213" license = "MIT" keywords = ["datafusion", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datafusion1/README.md b/gen/datafusion1/README.md index f6154cd17c..23ff35b695 100644 --- a/gen/datafusion1/README.md +++ b/gen/datafusion1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datafusion1` library allows access to all features of the *Google Data Fusion* service. -This documentation was generated from *Data Fusion* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *datafusion:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Data Fusion* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *datafusion:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Data Fusion* *v1* API can be found at the [official documentation site](https://cloud.google.com/data-fusion/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/DataFusion) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/DataFusion) ... * projects - * [*locations get*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceDeleteCall), [*locations instances dns peerings create*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceDnsPeeringCreateCall), [*locations instances dns peerings delete*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceDnsPeeringDeleteCall), [*locations instances dns peerings list*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceDnsPeeringListCall), [*locations instances get*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceGetCall), [*locations instances get iam policy*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceGetIamPolicyCall), [*locations instances list*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstancePatchCall), [*locations instances restart*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceRestartCall), [*locations instances set iam policy*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceSetIamPolicyCall), [*locations instances test iam permissions*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationInstanceTestIamPermissionCall), [*locations list*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationOperationListCall) and [*locations versions list*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/api::ProjectLocationVersionListCall) + * [*locations get*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceDeleteCall), [*locations instances dns peerings create*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceDnsPeeringCreateCall), [*locations instances dns peerings delete*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceDnsPeeringDeleteCall), [*locations instances dns peerings list*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceDnsPeeringListCall), [*locations instances get*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceGetCall), [*locations instances get iam policy*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceGetIamPolicyCall), [*locations instances list*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstancePatchCall), [*locations instances restart*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceRestartCall), [*locations instances set iam policy*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceSetIamPolicyCall), [*locations instances test iam permissions*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationInstanceTestIamPermissionCall), [*locations list*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationOperationListCall) and [*locations versions list*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/api::ProjectLocationVersionListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/DataFusion)** +* **[Hub](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/DataFusion)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Part)** + * **[Parts](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::RequestValue) and -[decodable](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::RequestValue) and +[decodable](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datafusion1/5.0.2-beta-1+20221213/google_datafusion1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datafusion1/5.0.2+20221213/google_datafusion1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datafusion1/src/api.rs b/gen/datafusion1/src/api.rs index a5a35d088f..f66f729148 100644 --- a/gen/datafusion1/src/api.rs +++ b/gen/datafusion1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> DataFusion { DataFusion { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datafusion.googleapis.com/".to_string(), _root_url: "https://datafusion.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> DataFusion { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datafusion1/src/client.rs b/gen/datafusion1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datafusion1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datafusion1/src/lib.rs b/gen/datafusion1/src/lib.rs index 596afcee78..cd2e0633f1 100644 --- a/gen/datafusion1/src/lib.rs +++ b/gen/datafusion1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Data Fusion* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *datafusion:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Data Fusion* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *datafusion:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Data Fusion* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/data-fusion/docs). diff --git a/gen/datafusion1_beta1-cli/Cargo.toml b/gen/datafusion1_beta1-cli/Cargo.toml index a4bb604b73..2dc0b3ff65 100644 --- a/gen/datafusion1_beta1-cli/Cargo.toml +++ b/gen/datafusion1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datafusion1_beta1-cli" -version = "4.0.1+20211028" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Fusion (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datafusion1_beta1-cli" @@ -20,13 +20,13 @@ name = "datafusion1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datafusion1_beta1] path = "../datafusion1_beta1" -version = "4.0.1+20211028" +version = "5.0.2+20221213" + diff --git a/gen/datafusion1_beta1-cli/README.md b/gen/datafusion1_beta1-cli/README.md index 9ffb4a27f0..524d45886b 100644 --- a/gen/datafusion1_beta1-cli/README.md +++ b/gen/datafusion1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Data Fusion* API at revision *20211028*. The CLI is at version *4.0.1*. +This documentation was generated from the *Data Fusion* API at revision *20221213*. The CLI is at version *5.0.2*. ```bash datafusion1-beta1 [options] @@ -33,9 +33,9 @@ datafusion1-beta1 [options] locations-get [-p ]... [-o ] locations-instances-create (-r )... [-p ]... [-o ] locations-instances-delete [-p ]... [-o ] - locations-instances-dns-peerings-add (-r )... [-p ]... [-o ] + locations-instances-dns-peerings-create (-r )... [-p ]... [-o ] + locations-instances-dns-peerings-delete [-p ]... [-o ] locations-instances-dns-peerings-list [-p ]... [-o ] - locations-instances-dns-peerings-remove (-r )... [-p ]... [-o ] locations-instances-get [-p ]... [-o ] locations-instances-get-iam-policy [-p ]... [-o ] locations-instances-list [-p ]... [-o ] diff --git a/gen/datafusion1_beta1-cli/mkdocs.yml b/gen/datafusion1_beta1-cli/mkdocs.yml index 69ae4d5aa1..958bc58250 100644 --- a/gen/datafusion1_beta1-cli/mkdocs.yml +++ b/gen/datafusion1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Data Fusion v4.0.1+20211028 +site_name: Data Fusion v5.0.2+20221213 site_url: http://byron.github.io/google-apis-rs/google-datafusion1_beta1-cli site_description: A complete library to interact with Data Fusion (protocol v1beta1) @@ -7,33 +7,34 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datafusion1_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-dns-peerings-add.md', 'Projects', 'Locations Instances Dns Peerings Add'] -- ['projects_locations-instances-dns-peerings-list.md', 'Projects', 'Locations Instances Dns Peerings List'] -- ['projects_locations-instances-dns-peerings-remove.md', 'Projects', 'Locations Instances Dns Peerings Remove'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-get-iam-policy.md', 'Projects', 'Locations Instances Get Iam Policy'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-namespaces-get-iam-policy.md', 'Projects', 'Locations Instances Namespaces Get Iam Policy'] -- ['projects_locations-instances-namespaces-list.md', 'Projects', 'Locations Instances Namespaces List'] -- ['projects_locations-instances-namespaces-set-iam-policy.md', 'Projects', 'Locations Instances Namespaces Set Iam Policy'] -- ['projects_locations-instances-namespaces-test-iam-permissions.md', 'Projects', 'Locations Instances Namespaces Test Iam Permissions'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-restart.md', 'Projects', 'Locations Instances Restart'] -- ['projects_locations-instances-set-iam-policy.md', 'Projects', 'Locations Instances Set Iam Policy'] -- ['projects_locations-instances-test-iam-permissions.md', 'Projects', 'Locations Instances Test Iam Permissions'] -- ['projects_locations-instances-upgrade.md', 'Projects', 'Locations Instances Upgrade'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-remove-iam-policy.md', 'Projects', 'Locations Remove Iam Policy'] -- ['projects_locations-versions-list.md', 'Projects', 'Locations Versions List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Dns Peerings Create': 'projects_locations-instances-dns-peerings-create.md' + - 'Locations Instances Dns Peerings Delete': 'projects_locations-instances-dns-peerings-delete.md' + - 'Locations Instances Dns Peerings List': 'projects_locations-instances-dns-peerings-list.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances Get Iam Policy': 'projects_locations-instances-get-iam-policy.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Namespaces Get Iam Policy': 'projects_locations-instances-namespaces-get-iam-policy.md' + - 'Locations Instances Namespaces List': 'projects_locations-instances-namespaces-list.md' + - 'Locations Instances Namespaces Set Iam Policy': 'projects_locations-instances-namespaces-set-iam-policy.md' + - 'Locations Instances Namespaces Test Iam Permissions': 'projects_locations-instances-namespaces-test-iam-permissions.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Restart': 'projects_locations-instances-restart.md' + - 'Locations Instances Set Iam Policy': 'projects_locations-instances-set-iam-policy.md' + - 'Locations Instances Test Iam Permissions': 'projects_locations-instances-test-iam-permissions.md' + - 'Locations Instances Upgrade': 'projects_locations-instances-upgrade.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Remove Iam Policy': 'projects_locations-remove-iam-policy.md' + - 'Locations Versions List': 'projects_locations-versions-list.md' theme: readthedocs diff --git a/gen/datafusion1_beta1-cli/src/client.rs b/gen/datafusion1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datafusion1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datafusion1_beta1-cli/src/main.rs b/gen/datafusion1_beta1-cli/src/main.rs index 0019350bc8..0d9a1ae533 100644 --- a/gen/datafusion1_beta1-cli/src/main.rs +++ b/gen/datafusion1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datafusion1_beta1::{api, Error, oauth2}; +use google_datafusion1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -136,6 +135,9 @@ where "enable-rbac" => Some(("enableRbac", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-logging" => Some(("enableStackdriverLogging", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-monitoring" => Some(("enableStackdriverMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "enable-zone-separation" => Some(("enableZoneSeparation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.enabled" => Some(("eventPublishConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.topic" => Some(("eventPublishConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gcs-bucket" => Some(("gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -154,7 +156,7 @@ where "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "type", "update-time", "version", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "enable-zone-separation", "enabled", "event-publish-config", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "topic", "type", "update-time", "version", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -270,7 +272,7 @@ where } } - async fn _projects_locations_instances_dns_peerings_add(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _projects_locations_instances_dns_peerings_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); @@ -293,13 +295,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { - "dns-peering.description" => Some(("dnsPeering.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "dns-peering.domain" => Some(("dnsPeering.domain", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "dns-peering.target-network" => Some(("dnsPeering.targetNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "dns-peering.target-project" => Some(("dnsPeering.targetProject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "dns-peering.zone" => Some(("dnsPeering.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "domain" => Some(("domain", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-network" => Some(("targetNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-project" => Some(("targetProject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "dns-peering", "domain", "target-network", "target-project", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "domain", "name", "target-network", "target-project"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -308,8 +310,64 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::AddDnsPeeringRequest = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_instances_dns_peerings_add(request, opt.value_of("parent").unwrap_or("")); + let mut request: api::DnsPeering = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_dns_peerings_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "dns-peering-id" => { + call = call.dns_peering_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["dns-peering-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_dns_peerings_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_instances_dns_peerings_delete(opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -369,7 +427,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -418,91 +476,6 @@ where } } - async fn _projects_locations_instances_dns_peerings_remove(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["zone"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::RemoveDnsPeeringRequest = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_instances_dns_peerings_remove(request, opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_instances_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_instances_get(opt.value_of("name").unwrap_or("")); @@ -562,7 +535,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -621,7 +594,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -683,7 +656,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -745,7 +718,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -999,6 +972,9 @@ where "enable-rbac" => Some(("enableRbac", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-logging" => Some(("enableStackdriverLogging", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-monitoring" => Some(("enableStackdriverMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "enable-zone-separation" => Some(("enableZoneSeparation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.enabled" => Some(("eventPublishConfig.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "event-publish-config.topic" => Some(("eventPublishConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gcs-bucket" => Some(("gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1017,7 +993,7 @@ where "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zone" => Some(("zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "type", "update-time", "version", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-endpoint", "create-time", "crypto-key-config", "dataproc-service-account", "description", "disabled-reason", "display-name", "enable-rbac", "enable-stackdriver-logging", "enable-stackdriver-monitoring", "enable-zone-separation", "enabled", "event-publish-config", "gcs-bucket", "ip-allocation", "key-reference", "labels", "name", "network", "network-config", "options", "p4-service-account", "private-instance", "service-account", "service-endpoint", "state", "state-message", "tenant-project-id", "topic", "type", "update-time", "version", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1032,7 +1008,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1431,10 +1407,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "include-unrevealed-locations" => { - call = call.include_unrevealed_locations(arg_from_str(value.unwrap_or("false"), err, "include-unrevealed-locations", "boolean")); + call = call.include_unrevealed_locations( value.map(|v| arg_from_str(v, err, "include-unrevealed-locations", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1684,7 +1660,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1830,10 +1806,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "latest-patch-only" => { - call = call.latest_patch_only(arg_from_str(value.unwrap_or("false"), err, "latest-patch-only", "boolean")); + call = call.latest_patch_only( value.map(|v| arg_from_str(v, err, "latest-patch-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1898,15 +1874,15 @@ where ("locations-instances-delete", Some(opt)) => { call_result = self._projects_locations_instances_delete(opt, dry_run, &mut err).await; }, - ("locations-instances-dns-peerings-add", Some(opt)) => { - call_result = self._projects_locations_instances_dns_peerings_add(opt, dry_run, &mut err).await; + ("locations-instances-dns-peerings-create", Some(opt)) => { + call_result = self._projects_locations_instances_dns_peerings_create(opt, dry_run, &mut err).await; + }, + ("locations-instances-dns-peerings-delete", Some(opt)) => { + call_result = self._projects_locations_instances_dns_peerings_delete(opt, dry_run, &mut err).await; }, ("locations-instances-dns-peerings-list", Some(opt)) => { call_result = self._projects_locations_instances_dns_peerings_list(opt, dry_run, &mut err).await; }, - ("locations-instances-dns-peerings-remove", Some(opt)) => { - call_result = self._projects_locations_instances_dns_peerings_remove(opt, dry_run, &mut err).await; - }, ("locations-instances-get", Some(opt)) => { call_result = self._projects_locations_instances_get(opt, dry_run, &mut err).await; }, @@ -2043,7 +2019,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-dns-peerings-add', 'locations-instances-dns-peerings-list', 'locations-instances-dns-peerings-remove', 'locations-instances-get', 'locations-instances-get-iam-policy', 'locations-instances-list', 'locations-instances-namespaces-get-iam-policy', 'locations-instances-namespaces-list', 'locations-instances-namespaces-set-iam-policy', 'locations-instances-namespaces-test-iam-permissions', 'locations-instances-patch', 'locations-instances-restart', 'locations-instances-set-iam-policy', 'locations-instances-test-iam-permissions', 'locations-instances-upgrade', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-remove-iam-policy' and 'locations-versions-list'", vec![ + ("projects", "methods: 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-dns-peerings-create', 'locations-instances-dns-peerings-delete', 'locations-instances-dns-peerings-list', 'locations-instances-get', 'locations-instances-get-iam-policy', 'locations-instances-list', 'locations-instances-namespaces-get-iam-policy', 'locations-instances-namespaces-list', 'locations-instances-namespaces-set-iam-policy', 'locations-instances-namespaces-test-iam-permissions', 'locations-instances-patch', 'locations-instances-restart', 'locations-instances-set-iam-policy', 'locations-instances-test-iam-permissions', 'locations-instances-upgrade', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-remove-iam-policy' and 'locations-versions-list'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli/projects_locations-get", @@ -2116,9 +2092,9 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-instances-dns-peerings-add", - Some(r##"Add DNS peering on the given resource."##), - "Details at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli/projects_locations-instances-dns-peerings-add", + ("locations-instances-dns-peerings-create", + Some(r##"Creates DNS peering on the given resource."##), + "Details at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli/projects_locations-instances-dns-peerings-create", vec![ (Some(r##"parent"##), None, @@ -2144,13 +2120,13 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-instances-dns-peerings-list", - Some(r##"List DNS peering for a given resource."##), - "Details at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli/projects_locations-instances-dns-peerings-list", + ("locations-instances-dns-peerings-delete", + Some(r##"Deletes DNS peering on the given resource."##), + "Details at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli/projects_locations-instances-dns-peerings-delete", vec![ - (Some(r##"parent"##), + (Some(r##"name"##), None, - Some(r##"Required. The resource on which dns peering will be listed."##), + Some(r##"Required. The name of the DNS peering zone to delete. Format: projects/{project}/locations/{location}/instances/{instance}/dnsPeerings/{dns_peering}"##), Some(true), Some(false)), @@ -2166,22 +2142,16 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-instances-dns-peerings-remove", - Some(r##"Remove DNS peering on the given resource."##), - "Details at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli/projects_locations-instances-dns-peerings-remove", + ("locations-instances-dns-peerings-list", + Some(r##"Lists DNS peerings for a given resource."##), + "Details at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli/projects_locations-instances-dns-peerings-list", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource on which DNS peering will be removed."##), + Some(r##"Required. The parent, which owns this collection of dns peerings. Format: projects/{project}/locations/{location}/instances/{instance}"##), Some(true), Some(false)), - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), @@ -2222,7 +2192,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2266,7 +2236,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2310,7 +2280,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2338,7 +2308,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2422,7 +2392,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2450,7 +2420,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2672,7 +2642,7 @@ async fn main() { let mut app = App::new("datafusion1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20211028") + .version("5.0.2+20221213") .about("Cloud Data Fusion is a fully-managed, cloud native, enterprise data integration service for quickly building and managing data pipelines. It provides a graphical interface to increase time efficiency and reduce complexity, and allows business users, developers, and data scientists to easily and reliably build scalable data integration solutions to cleanse, prepare, blend, transfer and transform data without having to wrestle with infrastructure.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datafusion1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/datafusion1_beta1/Cargo.toml b/gen/datafusion1_beta1/Cargo.toml index 8107f58def..1cbc4cfd50 100644 --- a/gen/datafusion1_beta1/Cargo.toml +++ b/gen/datafusion1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datafusion1_beta1" -version = "5.0.2-beta-1+20221213" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Fusion (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datafusion1_beta1" homepage = "https://cloud.google.com/data-fusion/docs" -documentation = "https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213" +documentation = "https://docs.rs/google-datafusion1_beta1/5.0.2+20221213" license = "MIT" keywords = ["datafusion", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datafusion1_beta1/README.md b/gen/datafusion1_beta1/README.md index 010913985e..b8dd066c0b 100644 --- a/gen/datafusion1_beta1/README.md +++ b/gen/datafusion1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datafusion1_beta1` library allows access to all features of the *Google Data Fusion* service. -This documentation was generated from *Data Fusion* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *datafusion:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Data Fusion* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *datafusion:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Data Fusion* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/data-fusion/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/DataFusion) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/DataFusion) ... * projects - * [*locations get*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDeleteCall), [*locations instances dns peerings create*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDnsPeeringCreateCall), [*locations instances dns peerings delete*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDnsPeeringDeleteCall), [*locations instances dns peerings list*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDnsPeeringListCall), [*locations instances get*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceGetCall), [*locations instances get iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceGetIamPolicyCall), [*locations instances list*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceListCall), [*locations instances namespaces get iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceGetIamPolicyCall), [*locations instances namespaces list*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceListCall), [*locations instances namespaces set iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceSetIamPolicyCall), [*locations instances namespaces test iam permissions*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceTestIamPermissionCall), [*locations instances patch*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstancePatchCall), [*locations instances restart*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceRestartCall), [*locations instances set iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceSetIamPolicyCall), [*locations instances test iam permissions*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceTestIamPermissionCall), [*locations instances upgrade*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceUpgradeCall), [*locations list*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationOperationListCall), [*locations remove iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationRemoveIamPolicyCall) and [*locations versions list*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/api::ProjectLocationVersionListCall) + * [*locations get*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDeleteCall), [*locations instances dns peerings create*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDnsPeeringCreateCall), [*locations instances dns peerings delete*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDnsPeeringDeleteCall), [*locations instances dns peerings list*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceDnsPeeringListCall), [*locations instances get*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceGetCall), [*locations instances get iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceGetIamPolicyCall), [*locations instances list*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceListCall), [*locations instances namespaces get iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceGetIamPolicyCall), [*locations instances namespaces list*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceListCall), [*locations instances namespaces set iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceSetIamPolicyCall), [*locations instances namespaces test iam permissions*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceNamespaceTestIamPermissionCall), [*locations instances patch*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstancePatchCall), [*locations instances restart*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceRestartCall), [*locations instances set iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceSetIamPolicyCall), [*locations instances test iam permissions*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceTestIamPermissionCall), [*locations instances upgrade*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationInstanceUpgradeCall), [*locations list*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationOperationListCall), [*locations remove iam policy*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationRemoveIamPolicyCall) and [*locations versions list*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/api::ProjectLocationVersionListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/DataFusion)** +* **[Hub](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/DataFusion)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datafusion1_beta1/5.0.2-beta-1+20221213/google_datafusion1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datafusion1_beta1/5.0.2+20221213/google_datafusion1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datafusion1_beta1/src/api.rs b/gen/datafusion1_beta1/src/api.rs index a4631851ae..6b3ff49a52 100644 --- a/gen/datafusion1_beta1/src/api.rs +++ b/gen/datafusion1_beta1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> DataFusion { DataFusion { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datafusion.googleapis.com/".to_string(), _root_url: "https://datafusion.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> DataFusion { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datafusion1_beta1/src/client.rs b/gen/datafusion1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datafusion1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datafusion1_beta1/src/lib.rs b/gen/datafusion1_beta1/src/lib.rs index ad125462cd..95b5c7303c 100644 --- a/gen/datafusion1_beta1/src/lib.rs +++ b/gen/datafusion1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Data Fusion* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *datafusion:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Data Fusion* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *datafusion:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Data Fusion* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/data-fusion/docs). diff --git a/gen/datalabeling1_beta1-cli/Cargo.toml b/gen/datalabeling1_beta1-cli/Cargo.toml index 4f869ed83e..921b42be4e 100644 --- a/gen/datalabeling1_beta1-cli/Cargo.toml +++ b/gen/datalabeling1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datalabeling1_beta1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20221205" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Labeling (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datalabeling1_beta1-cli" @@ -20,13 +20,13 @@ name = "datalabeling1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datalabeling1_beta1] path = "../datalabeling1_beta1" -version = "4.0.1+20220301" +version = "5.0.2+20221205" + diff --git a/gen/datalabeling1_beta1-cli/README.md b/gen/datalabeling1_beta1-cli/README.md index 8bc4de01ac..358f3f6ccd 100644 --- a/gen/datalabeling1_beta1-cli/README.md +++ b/gen/datalabeling1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Data Labeling* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Data Labeling* API at revision *20221205*. The CLI is at version *5.0.2*. ```bash datalabeling1-beta1 [options] diff --git a/gen/datalabeling1_beta1-cli/mkdocs.yml b/gen/datalabeling1_beta1-cli/mkdocs.yml index 9527e39bcd..9298a126ec 100644 --- a/gen/datalabeling1_beta1-cli/mkdocs.yml +++ b/gen/datalabeling1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Data Labeling v4.0.1+20220301 +site_name: Data Labeling v5.0.2+20221205 site_url: http://byron.github.io/google-apis-rs/google-datalabeling1_beta1-cli site_description: A complete library to interact with Data Labeling (protocol v1beta1) @@ -7,55 +7,56 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datalabeling1_be docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_annotation-spec-sets-create.md', 'Projects', 'Annotation Spec Sets Create'] -- ['projects_annotation-spec-sets-delete.md', 'Projects', 'Annotation Spec Sets Delete'] -- ['projects_annotation-spec-sets-get.md', 'Projects', 'Annotation Spec Sets Get'] -- ['projects_annotation-spec-sets-list.md', 'Projects', 'Annotation Spec Sets List'] -- ['projects_datasets-annotated-datasets-data-items-get.md', 'Projects', 'Datasets Annotated Datasets Data Items Get'] -- ['projects_datasets-annotated-datasets-data-items-list.md', 'Projects', 'Datasets Annotated Datasets Data Items List'] -- ['projects_datasets-annotated-datasets-delete.md', 'Projects', 'Datasets Annotated Datasets Delete'] -- ['projects_datasets-annotated-datasets-examples-get.md', 'Projects', 'Datasets Annotated Datasets Examples Get'] -- ['projects_datasets-annotated-datasets-examples-list.md', 'Projects', 'Datasets Annotated Datasets Examples List'] -- ['projects_datasets-annotated-datasets-feedback-threads-delete.md', 'Projects', 'Datasets Annotated Datasets Feedback Threads Delete'] -- ['projects_datasets-annotated-datasets-feedback-threads-feedback-messages-create.md', 'Projects', 'Datasets Annotated Datasets Feedback Threads Feedback Messages Create'] -- ['projects_datasets-annotated-datasets-feedback-threads-feedback-messages-delete.md', 'Projects', 'Datasets Annotated Datasets Feedback Threads Feedback Messages Delete'] -- ['projects_datasets-annotated-datasets-feedback-threads-feedback-messages-get.md', 'Projects', 'Datasets Annotated Datasets Feedback Threads Feedback Messages Get'] -- ['projects_datasets-annotated-datasets-feedback-threads-feedback-messages-list.md', 'Projects', 'Datasets Annotated Datasets Feedback Threads Feedback Messages List'] -- ['projects_datasets-annotated-datasets-feedback-threads-get.md', 'Projects', 'Datasets Annotated Datasets Feedback Threads Get'] -- ['projects_datasets-annotated-datasets-feedback-threads-list.md', 'Projects', 'Datasets Annotated Datasets Feedback Threads List'] -- ['projects_datasets-annotated-datasets-get.md', 'Projects', 'Datasets Annotated Datasets Get'] -- ['projects_datasets-annotated-datasets-list.md', 'Projects', 'Datasets Annotated Datasets List'] -- ['projects_datasets-create.md', 'Projects', 'Datasets Create'] -- ['projects_datasets-data-items-get.md', 'Projects', 'Datasets Data Items Get'] -- ['projects_datasets-data-items-list.md', 'Projects', 'Datasets Data Items List'] -- ['projects_datasets-delete.md', 'Projects', 'Datasets Delete'] -- ['projects_datasets-evaluations-example-comparisons-search.md', 'Projects', 'Datasets Evaluations Example Comparisons Search'] -- ['projects_datasets-evaluations-get.md', 'Projects', 'Datasets Evaluations Get'] -- ['projects_datasets-export-data.md', 'Projects', 'Datasets Export Data'] -- ['projects_datasets-get.md', 'Projects', 'Datasets Get'] -- ['projects_datasets-image-label.md', 'Projects', 'Datasets Image Label'] -- ['projects_datasets-import-data.md', 'Projects', 'Datasets Import Data'] -- ['projects_datasets-list.md', 'Projects', 'Datasets List'] -- ['projects_datasets-text-label.md', 'Projects', 'Datasets Text Label'] -- ['projects_datasets-video-label.md', 'Projects', 'Datasets Video Label'] -- ['projects_evaluation-jobs-create.md', 'Projects', 'Evaluation Jobs Create'] -- ['projects_evaluation-jobs-delete.md', 'Projects', 'Evaluation Jobs Delete'] -- ['projects_evaluation-jobs-get.md', 'Projects', 'Evaluation Jobs Get'] -- ['projects_evaluation-jobs-list.md', 'Projects', 'Evaluation Jobs List'] -- ['projects_evaluation-jobs-patch.md', 'Projects', 'Evaluation Jobs Patch'] -- ['projects_evaluation-jobs-pause.md', 'Projects', 'Evaluation Jobs Pause'] -- ['projects_evaluation-jobs-resume.md', 'Projects', 'Evaluation Jobs Resume'] -- ['projects_evaluations-search.md', 'Projects', 'Evaluations Search'] -- ['projects_instructions-create.md', 'Projects', 'Instructions Create'] -- ['projects_instructions-delete.md', 'Projects', 'Instructions Delete'] -- ['projects_instructions-get.md', 'Projects', 'Instructions Get'] -- ['projects_instructions-list.md', 'Projects', 'Instructions List'] -- ['projects_operations-cancel.md', 'Projects', 'Operations Cancel'] -- ['projects_operations-delete.md', 'Projects', 'Operations Delete'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Annotation Spec Sets Create': 'projects_annotation-spec-sets-create.md' + - 'Annotation Spec Sets Delete': 'projects_annotation-spec-sets-delete.md' + - 'Annotation Spec Sets Get': 'projects_annotation-spec-sets-get.md' + - 'Annotation Spec Sets List': 'projects_annotation-spec-sets-list.md' + - 'Datasets Annotated Datasets Data Items Get': 'projects_datasets-annotated-datasets-data-items-get.md' + - 'Datasets Annotated Datasets Data Items List': 'projects_datasets-annotated-datasets-data-items-list.md' + - 'Datasets Annotated Datasets Delete': 'projects_datasets-annotated-datasets-delete.md' + - 'Datasets Annotated Datasets Examples Get': 'projects_datasets-annotated-datasets-examples-get.md' + - 'Datasets Annotated Datasets Examples List': 'projects_datasets-annotated-datasets-examples-list.md' + - 'Datasets Annotated Datasets Feedback Threads Delete': 'projects_datasets-annotated-datasets-feedback-threads-delete.md' + - 'Datasets Annotated Datasets Feedback Threads Feedback Messages Create': 'projects_datasets-annotated-datasets-feedback-threads-feedback-messages-create.md' + - 'Datasets Annotated Datasets Feedback Threads Feedback Messages Delete': 'projects_datasets-annotated-datasets-feedback-threads-feedback-messages-delete.md' + - 'Datasets Annotated Datasets Feedback Threads Feedback Messages Get': 'projects_datasets-annotated-datasets-feedback-threads-feedback-messages-get.md' + - 'Datasets Annotated Datasets Feedback Threads Feedback Messages List': 'projects_datasets-annotated-datasets-feedback-threads-feedback-messages-list.md' + - 'Datasets Annotated Datasets Feedback Threads Get': 'projects_datasets-annotated-datasets-feedback-threads-get.md' + - 'Datasets Annotated Datasets Feedback Threads List': 'projects_datasets-annotated-datasets-feedback-threads-list.md' + - 'Datasets Annotated Datasets Get': 'projects_datasets-annotated-datasets-get.md' + - 'Datasets Annotated Datasets List': 'projects_datasets-annotated-datasets-list.md' + - 'Datasets Create': 'projects_datasets-create.md' + - 'Datasets Data Items Get': 'projects_datasets-data-items-get.md' + - 'Datasets Data Items List': 'projects_datasets-data-items-list.md' + - 'Datasets Delete': 'projects_datasets-delete.md' + - 'Datasets Evaluations Example Comparisons Search': 'projects_datasets-evaluations-example-comparisons-search.md' + - 'Datasets Evaluations Get': 'projects_datasets-evaluations-get.md' + - 'Datasets Export Data': 'projects_datasets-export-data.md' + - 'Datasets Get': 'projects_datasets-get.md' + - 'Datasets Image Label': 'projects_datasets-image-label.md' + - 'Datasets Import Data': 'projects_datasets-import-data.md' + - 'Datasets List': 'projects_datasets-list.md' + - 'Datasets Text Label': 'projects_datasets-text-label.md' + - 'Datasets Video Label': 'projects_datasets-video-label.md' + - 'Evaluation Jobs Create': 'projects_evaluation-jobs-create.md' + - 'Evaluation Jobs Delete': 'projects_evaluation-jobs-delete.md' + - 'Evaluation Jobs Get': 'projects_evaluation-jobs-get.md' + - 'Evaluation Jobs List': 'projects_evaluation-jobs-list.md' + - 'Evaluation Jobs Patch': 'projects_evaluation-jobs-patch.md' + - 'Evaluation Jobs Pause': 'projects_evaluation-jobs-pause.md' + - 'Evaluation Jobs Resume': 'projects_evaluation-jobs-resume.md' + - 'Evaluations Search': 'projects_evaluations-search.md' + - 'Instructions Create': 'projects_instructions-create.md' + - 'Instructions Delete': 'projects_instructions-delete.md' + - 'Instructions Get': 'projects_instructions-get.md' + - 'Instructions List': 'projects_instructions-list.md' + - 'Operations Cancel': 'projects_operations-cancel.md' + - 'Operations Delete': 'projects_operations-delete.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' theme: readthedocs diff --git a/gen/datalabeling1_beta1-cli/src/client.rs b/gen/datalabeling1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datalabeling1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datalabeling1_beta1-cli/src/main.rs b/gen/datalabeling1_beta1-cli/src/main.rs index dd9d55d9a7..4bd293b838 100644 --- a/gen/datalabeling1_beta1-cli/src/main.rs +++ b/gen/datalabeling1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datalabeling1_beta1::{api, Error, oauth2}; +use google_datalabeling1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -253,7 +252,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -367,7 +366,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -537,7 +536,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -843,7 +842,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -954,7 +953,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1065,7 +1064,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1270,7 +1269,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1859,7 +1858,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2348,7 +2347,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2477,7 +2476,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2704,7 +2703,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2963,7 +2962,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3181,7 +3180,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4585,7 +4584,7 @@ async fn main() { let mut app = App::new("datalabeling1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20221205") .about("Public API for Google Cloud AI Data Labeling Service.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datalabeling1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/datalabeling1_beta1/Cargo.toml b/gen/datalabeling1_beta1/Cargo.toml index 1886f6c72e..afdf2ff1a5 100644 --- a/gen/datalabeling1_beta1/Cargo.toml +++ b/gen/datalabeling1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datalabeling1_beta1" -version = "5.0.2-beta-1+20221205" +version = "5.0.2+20221205" authors = ["Sebastian Thiel "] description = "A complete library to interact with Data Labeling (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datalabeling1_beta1" homepage = "https://cloud.google.com/data-labeling/docs/" -documentation = "https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205" +documentation = "https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205" license = "MIT" keywords = ["datalabeling", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datalabeling1_beta1/README.md b/gen/datalabeling1_beta1/README.md index a6f0adb0c0..126303e2cf 100644 --- a/gen/datalabeling1_beta1/README.md +++ b/gen/datalabeling1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datalabeling1_beta1` library allows access to all features of the *Google Data Labeling* service. -This documentation was generated from *Data Labeling* crate version *5.0.2-beta-1+20221205*, where *20221205* is the exact revision of the *datalabeling:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Data Labeling* crate version *5.0.2+20221205*, where *20221205* is the exact revision of the *datalabeling:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Data Labeling* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/data-labeling/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/DataLabeling) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/DataLabeling) ... * projects - * [*annotation spec sets create*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetCreateCall), [*annotation spec sets delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetDeleteCall), [*annotation spec sets get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetGetCall), [*annotation spec sets list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetListCall), [*datasets annotated datasets data items get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetDataItemGetCall), [*datasets annotated datasets data items list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetDataItemListCall), [*datasets annotated datasets delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetDeleteCall), [*datasets annotated datasets examples get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetExampleGetCall), [*datasets annotated datasets examples list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetExampleListCall), [*datasets annotated datasets feedback threads delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadDeleteCall), [*datasets annotated datasets feedback threads feedback messages create*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageCreateCall), [*datasets annotated datasets feedback threads feedback messages delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageDeleteCall), [*datasets annotated datasets feedback threads feedback messages get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageGetCall), [*datasets annotated datasets feedback threads feedback messages list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageListCall), [*datasets annotated datasets feedback threads get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadGetCall), [*datasets annotated datasets feedback threads list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadListCall), [*datasets annotated datasets get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetGetCall), [*datasets annotated datasets list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetListCall), [*datasets create*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetCreateCall), [*datasets data items get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetDataItemGetCall), [*datasets data items list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetDataItemListCall), [*datasets delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetDeleteCall), [*datasets evaluations example comparisons search*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetEvaluationExampleComparisonSearchCall), [*datasets evaluations get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetEvaluationGetCall), [*datasets export data*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetExportDataCall), [*datasets get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetGetCall), [*datasets image label*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetImageLabelCall), [*datasets import data*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetImportDataCall), [*datasets list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetListCall), [*datasets text label*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetTextLabelCall), [*datasets video label*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectDatasetVideoLabelCall), [*evaluation jobs create*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobCreateCall), [*evaluation jobs delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobDeleteCall), [*evaluation jobs get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobGetCall), [*evaluation jobs list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobListCall), [*evaluation jobs patch*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobPatchCall), [*evaluation jobs pause*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobPauseCall), [*evaluation jobs resume*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobResumeCall), [*evaluations search*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectEvaluationSearchCall), [*instructions create*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectInstructionCreateCall), [*instructions delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectInstructionDeleteCall), [*instructions get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectInstructionGetCall), [*instructions list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectInstructionListCall), [*operations cancel*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectOperationCancelCall), [*operations delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectOperationDeleteCall), [*operations get*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectOperationGetCall) and [*operations list*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/api::ProjectOperationListCall) + * [*annotation spec sets create*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetCreateCall), [*annotation spec sets delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetDeleteCall), [*annotation spec sets get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetGetCall), [*annotation spec sets list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectAnnotationSpecSetListCall), [*datasets annotated datasets data items get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetDataItemGetCall), [*datasets annotated datasets data items list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetDataItemListCall), [*datasets annotated datasets delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetDeleteCall), [*datasets annotated datasets examples get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetExampleGetCall), [*datasets annotated datasets examples list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetExampleListCall), [*datasets annotated datasets feedback threads delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadDeleteCall), [*datasets annotated datasets feedback threads feedback messages create*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageCreateCall), [*datasets annotated datasets feedback threads feedback messages delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageDeleteCall), [*datasets annotated datasets feedback threads feedback messages get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageGetCall), [*datasets annotated datasets feedback threads feedback messages list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadFeedbackMessageListCall), [*datasets annotated datasets feedback threads get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadGetCall), [*datasets annotated datasets feedback threads list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetFeedbackThreadListCall), [*datasets annotated datasets get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetGetCall), [*datasets annotated datasets list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetAnnotatedDatasetListCall), [*datasets create*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetCreateCall), [*datasets data items get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetDataItemGetCall), [*datasets data items list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetDataItemListCall), [*datasets delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetDeleteCall), [*datasets evaluations example comparisons search*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetEvaluationExampleComparisonSearchCall), [*datasets evaluations get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetEvaluationGetCall), [*datasets export data*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetExportDataCall), [*datasets get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetGetCall), [*datasets image label*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetImageLabelCall), [*datasets import data*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetImportDataCall), [*datasets list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetListCall), [*datasets text label*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetTextLabelCall), [*datasets video label*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectDatasetVideoLabelCall), [*evaluation jobs create*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobCreateCall), [*evaluation jobs delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobDeleteCall), [*evaluation jobs get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobGetCall), [*evaluation jobs list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobListCall), [*evaluation jobs patch*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobPatchCall), [*evaluation jobs pause*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobPauseCall), [*evaluation jobs resume*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationJobResumeCall), [*evaluations search*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectEvaluationSearchCall), [*instructions create*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectInstructionCreateCall), [*instructions delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectInstructionDeleteCall), [*instructions get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectInstructionGetCall), [*instructions list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectInstructionListCall), [*operations cancel*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectOperationCancelCall), [*operations delete*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectOperationDeleteCall), [*operations get*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectOperationGetCall) and [*operations list*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/api::ProjectOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/DataLabeling)** +* **[Hub](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/DataLabeling)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datalabeling1_beta1/5.0.2-beta-1+20221205/google_datalabeling1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datalabeling1_beta1/5.0.2+20221205/google_datalabeling1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datalabeling1_beta1/src/api.rs b/gen/datalabeling1_beta1/src/api.rs index ce50f430e4..28e1690e47 100644 --- a/gen/datalabeling1_beta1/src/api.rs +++ b/gen/datalabeling1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> DataLabeling { DataLabeling { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datalabeling.googleapis.com/".to_string(), _root_url: "https://datalabeling.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> DataLabeling { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datalabeling1_beta1/src/client.rs b/gen/datalabeling1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datalabeling1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datalabeling1_beta1/src/lib.rs b/gen/datalabeling1_beta1/src/lib.rs index 507cc0d37d..7ae1c13cfc 100644 --- a/gen/datalabeling1_beta1/src/lib.rs +++ b/gen/datalabeling1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Data Labeling* crate version *5.0.2-beta-1+20221205*, where *20221205* is the exact revision of the *datalabeling:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Data Labeling* crate version *5.0.2+20221205*, where *20221205* is the exact revision of the *datalabeling:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Data Labeling* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/data-labeling/docs/). diff --git a/gen/datamigration1-cli/Cargo.toml b/gen/datamigration1-cli/Cargo.toml index 9b8ed23cba..412bb5a7d9 100644 --- a/gen/datamigration1-cli/Cargo.toml +++ b/gen/datamigration1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datamigration1-cli" -version = "4.0.1+20220216" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Database Migration Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datamigration1-cli" @@ -20,13 +20,13 @@ name = "datamigration1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datamigration1] path = "../datamigration1" -version = "4.0.1+20220216" +version = "5.0.2+20230105" + diff --git a/gen/datamigration1-cli/README.md b/gen/datamigration1-cli/README.md index faef9c11f7..34b845c276 100644 --- a/gen/datamigration1-cli/README.md +++ b/gen/datamigration1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Database Migration Service* API at revision *20220216*. The CLI is at version *4.0.1*. +This documentation was generated from the *Database Migration Service* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash datamigration1 [options] @@ -38,6 +38,20 @@ datamigration1 [options] locations-connection-profiles-patch (-r )... [-p ]... [-o ] locations-connection-profiles-set-iam-policy (-r )... [-p ]... [-o ] locations-connection-profiles-test-iam-permissions (-r )... [-p ]... [-o ] + locations-conversion-workspaces-apply (-r )... [-p ]... [-o ] + locations-conversion-workspaces-commit (-r )... [-p ]... [-o ] + locations-conversion-workspaces-convert (-r )... [-p ]... [-o ] + locations-conversion-workspaces-create (-r )... [-p ]... [-o ] + locations-conversion-workspaces-delete [-p ]... [-o ] + locations-conversion-workspaces-describe-conversion-workspace-revisions [-p ]... [-o ] + locations-conversion-workspaces-describe-database-entities [-p ]... [-o ] + locations-conversion-workspaces-get [-p ]... [-o ] + locations-conversion-workspaces-list [-p ]... [-o ] + locations-conversion-workspaces-mapping-rules-import (-r )... [-p ]... [-o ] + locations-conversion-workspaces-patch (-r )... [-p ]... [-o ] + locations-conversion-workspaces-rollback (-r )... [-p ]... [-o ] + locations-conversion-workspaces-search-background-jobs [-p ]... [-o ] + locations-conversion-workspaces-seed (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] locations-migration-jobs-create (-r )... [-p ]... [-o ] @@ -59,6 +73,10 @@ datamigration1 [options] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] + locations-private-connections-create (-r )... [-p ]... [-o ] + locations-private-connections-delete [-p ]... [-o ] + locations-private-connections-get [-p ]... [-o ] + locations-private-connections-list [-p ]... [-o ] datamigration1 --help Configuration: diff --git a/gen/datamigration1-cli/mkdocs.yml b/gen/datamigration1-cli/mkdocs.yml index 5b8cab38e0..fc8d79eaa9 100644 --- a/gen/datamigration1-cli/mkdocs.yml +++ b/gen/datamigration1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Database Migration Service v4.0.1+20220216 +site_name: Database Migration Service v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-datamigration1-cli site_description: A complete library to interact with Database Migration Service (protocol v1) @@ -7,37 +7,56 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datamigration1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-connection-profiles-create.md', 'Projects', 'Locations Connection Profiles Create'] -- ['projects_locations-connection-profiles-delete.md', 'Projects', 'Locations Connection Profiles Delete'] -- ['projects_locations-connection-profiles-get.md', 'Projects', 'Locations Connection Profiles Get'] -- ['projects_locations-connection-profiles-get-iam-policy.md', 'Projects', 'Locations Connection Profiles Get Iam Policy'] -- ['projects_locations-connection-profiles-list.md', 'Projects', 'Locations Connection Profiles List'] -- ['projects_locations-connection-profiles-patch.md', 'Projects', 'Locations Connection Profiles Patch'] -- ['projects_locations-connection-profiles-set-iam-policy.md', 'Projects', 'Locations Connection Profiles Set Iam Policy'] -- ['projects_locations-connection-profiles-test-iam-permissions.md', 'Projects', 'Locations Connection Profiles Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-migration-jobs-create.md', 'Projects', 'Locations Migration Jobs Create'] -- ['projects_locations-migration-jobs-delete.md', 'Projects', 'Locations Migration Jobs Delete'] -- ['projects_locations-migration-jobs-generate-ssh-script.md', 'Projects', 'Locations Migration Jobs Generate Ssh Script'] -- ['projects_locations-migration-jobs-get.md', 'Projects', 'Locations Migration Jobs Get'] -- ['projects_locations-migration-jobs-get-iam-policy.md', 'Projects', 'Locations Migration Jobs Get Iam Policy'] -- ['projects_locations-migration-jobs-list.md', 'Projects', 'Locations Migration Jobs List'] -- ['projects_locations-migration-jobs-patch.md', 'Projects', 'Locations Migration Jobs Patch'] -- ['projects_locations-migration-jobs-promote.md', 'Projects', 'Locations Migration Jobs Promote'] -- ['projects_locations-migration-jobs-restart.md', 'Projects', 'Locations Migration Jobs Restart'] -- ['projects_locations-migration-jobs-resume.md', 'Projects', 'Locations Migration Jobs Resume'] -- ['projects_locations-migration-jobs-set-iam-policy.md', 'Projects', 'Locations Migration Jobs Set Iam Policy'] -- ['projects_locations-migration-jobs-start.md', 'Projects', 'Locations Migration Jobs Start'] -- ['projects_locations-migration-jobs-stop.md', 'Projects', 'Locations Migration Jobs Stop'] -- ['projects_locations-migration-jobs-test-iam-permissions.md', 'Projects', 'Locations Migration Jobs Test Iam Permissions'] -- ['projects_locations-migration-jobs-verify.md', 'Projects', 'Locations Migration Jobs Verify'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Connection Profiles Create': 'projects_locations-connection-profiles-create.md' + - 'Locations Connection Profiles Delete': 'projects_locations-connection-profiles-delete.md' + - 'Locations Connection Profiles Get': 'projects_locations-connection-profiles-get.md' + - 'Locations Connection Profiles Get Iam Policy': 'projects_locations-connection-profiles-get-iam-policy.md' + - 'Locations Connection Profiles List': 'projects_locations-connection-profiles-list.md' + - 'Locations Connection Profiles Patch': 'projects_locations-connection-profiles-patch.md' + - 'Locations Connection Profiles Set Iam Policy': 'projects_locations-connection-profiles-set-iam-policy.md' + - 'Locations Connection Profiles Test Iam Permissions': 'projects_locations-connection-profiles-test-iam-permissions.md' + - 'Locations Conversion Workspaces Apply': 'projects_locations-conversion-workspaces-apply.md' + - 'Locations Conversion Workspaces Commit': 'projects_locations-conversion-workspaces-commit.md' + - 'Locations Conversion Workspaces Convert': 'projects_locations-conversion-workspaces-convert.md' + - 'Locations Conversion Workspaces Create': 'projects_locations-conversion-workspaces-create.md' + - 'Locations Conversion Workspaces Delete': 'projects_locations-conversion-workspaces-delete.md' + - 'Locations Conversion Workspaces Describe Conversion Workspace Revisions': 'projects_locations-conversion-workspaces-describe-conversion-workspace-revisions.md' + - 'Locations Conversion Workspaces Describe Database Entities': 'projects_locations-conversion-workspaces-describe-database-entities.md' + - 'Locations Conversion Workspaces Get': 'projects_locations-conversion-workspaces-get.md' + - 'Locations Conversion Workspaces List': 'projects_locations-conversion-workspaces-list.md' + - 'Locations Conversion Workspaces Mapping Rules Import': 'projects_locations-conversion-workspaces-mapping-rules-import.md' + - 'Locations Conversion Workspaces Patch': 'projects_locations-conversion-workspaces-patch.md' + - 'Locations Conversion Workspaces Rollback': 'projects_locations-conversion-workspaces-rollback.md' + - 'Locations Conversion Workspaces Search Background Jobs': 'projects_locations-conversion-workspaces-search-background-jobs.md' + - 'Locations Conversion Workspaces Seed': 'projects_locations-conversion-workspaces-seed.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Migration Jobs Create': 'projects_locations-migration-jobs-create.md' + - 'Locations Migration Jobs Delete': 'projects_locations-migration-jobs-delete.md' + - 'Locations Migration Jobs Generate Ssh Script': 'projects_locations-migration-jobs-generate-ssh-script.md' + - 'Locations Migration Jobs Get': 'projects_locations-migration-jobs-get.md' + - 'Locations Migration Jobs Get Iam Policy': 'projects_locations-migration-jobs-get-iam-policy.md' + - 'Locations Migration Jobs List': 'projects_locations-migration-jobs-list.md' + - 'Locations Migration Jobs Patch': 'projects_locations-migration-jobs-patch.md' + - 'Locations Migration Jobs Promote': 'projects_locations-migration-jobs-promote.md' + - 'Locations Migration Jobs Restart': 'projects_locations-migration-jobs-restart.md' + - 'Locations Migration Jobs Resume': 'projects_locations-migration-jobs-resume.md' + - 'Locations Migration Jobs Set Iam Policy': 'projects_locations-migration-jobs-set-iam-policy.md' + - 'Locations Migration Jobs Start': 'projects_locations-migration-jobs-start.md' + - 'Locations Migration Jobs Stop': 'projects_locations-migration-jobs-stop.md' + - 'Locations Migration Jobs Test Iam Permissions': 'projects_locations-migration-jobs-test-iam-permissions.md' + - 'Locations Migration Jobs Verify': 'projects_locations-migration-jobs-verify.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Private Connections Create': 'projects_locations-private-connections-create.md' + - 'Locations Private Connections Delete': 'projects_locations-private-connections-delete.md' + - 'Locations Private Connections Get': 'projects_locations-private-connections-get.md' + - 'Locations Private Connections List': 'projects_locations-private-connections-list.md' theme: readthedocs diff --git a/gen/datamigration1-cli/src/client.rs b/gen/datamigration1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datamigration1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datamigration1-cli/src/main.rs b/gen/datamigration1-cli/src/main.rs index a94a0d1b7a..c75e721745 100644 --- a/gen/datamigration1-cli/src/main.rs +++ b/gen/datamigration1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datamigration1::{api, Error, oauth2}; +use google_datamigration1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -74,11 +73,24 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "alloydb.cluster-id" => Some(("alloydb.clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.initial-user.password" => Some(("alloydb.settings.initialUser.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.initial-user.password-set" => Some(("alloydb.settings.initialUser.passwordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "alloydb.settings.initial-user.user" => Some(("alloydb.settings.initialUser.user", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.labels" => Some(("alloydb.settings.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "alloydb.settings.primary-instance-settings.database-flags" => Some(("alloydb.settings.primaryInstanceSettings.databaseFlags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "alloydb.settings.primary-instance-settings.id" => Some(("alloydb.settings.primaryInstanceSettings.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.primary-instance-settings.labels" => Some(("alloydb.settings.primaryInstanceSettings.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "alloydb.settings.primary-instance-settings.machine-config.cpu-count" => Some(("alloydb.settings.primaryInstanceSettings.machineConfig.cpuCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "alloydb.settings.primary-instance-settings.private-ip" => Some(("alloydb.settings.primaryInstanceSettings.privateIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.vpc-network" => Some(("alloydb.settings.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloudsql.additional-public-ip" => Some(("cloudsql.additionalPublicIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.cloud-sql-id" => Some(("cloudsql.cloudSqlId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.private-ip" => Some(("cloudsql.privateIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.public-ip" => Some(("cloudsql.publicIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.activation-policy" => Some(("cloudsql.settings.activationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.auto-storage-increase" => Some(("cloudsql.settings.autoStorageIncrease", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cloudsql.settings.availability-type" => Some(("cloudsql.settings.availabilityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.cmek-key-name" => Some(("cloudsql.settings.cmekKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.collation" => Some(("cloudsql.settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.data-disk-size-gb" => Some(("cloudsql.settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -90,6 +102,7 @@ where "cloudsql.settings.ip-config.require-ssl" => Some(("cloudsql.settings.ipConfig.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cloudsql.settings.root-password" => Some(("cloudsql.settings.rootPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.root-password-set" => Some(("cloudsql.settings.rootPasswordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cloudsql.settings.secondary-zone" => Some(("cloudsql.settings.secondaryZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.source-id" => Some(("cloudsql.settings.sourceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.storage-auto-resize-limit" => Some(("cloudsql.settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.tier" => Some(("cloudsql.settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -111,11 +124,25 @@ where "mysql.ssl.type" => Some(("mysql.ssl.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mysql.username" => Some(("mysql.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.database-service" => Some(("oracle.databaseService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.hostname" => Some(("oracle.forwardSshConnectivity.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.password" => Some(("oracle.forwardSshConnectivity.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.port" => Some(("oracle.forwardSshConnectivity.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.private-key" => Some(("oracle.forwardSshConnectivity.privateKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.username" => Some(("oracle.forwardSshConnectivity.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.host" => Some(("oracle.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.password" => Some(("oracle.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.password-set" => Some(("oracle.passwordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "oracle.port" => Some(("oracle.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "oracle.private-connectivity.private-connection" => Some(("oracle.privateConnectivity.privateConnection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.username" => Some(("oracle.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.cloud-sql-id" => Some(("postgresql.cloudSqlId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.host" => Some(("postgresql.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql.network-architecture" => Some(("postgresql.networkArchitecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.password" => Some(("postgresql.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.password-set" => Some(("postgresql.passwordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "postgresql.port" => Some(("postgresql.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "postgresql.private-service-connect-connectivity.service-attachment" => Some(("postgresql.privateServiceConnectConnectivity.serviceAttachment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.ssl.ca-certificate" => Some(("postgresql.ssl.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.ssl.client-certificate" => Some(("postgresql.ssl.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.ssl.client-key" => Some(("postgresql.ssl.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -125,7 +152,7 @@ where "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "auto-storage-increase", "ca-certificate", "client-certificate", "client-key", "cloud-sql-id", "cloudsql", "cmek-key-name", "code", "collation", "create-time", "data-disk-size-gb", "data-disk-type", "database-flags", "database-version", "display-name", "enable-ipv4", "error", "host", "ip-config", "labels", "message", "mysql", "name", "password", "password-set", "port", "postgresql", "private-ip", "private-network", "provider", "public-ip", "require-ssl", "root-password", "root-password-set", "settings", "source-id", "ssl", "state", "storage-auto-resize-limit", "tier", "type", "update-time", "user-labels", "username", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "additional-public-ip", "alloydb", "auto-storage-increase", "availability-type", "ca-certificate", "client-certificate", "client-key", "cloud-sql-id", "cloudsql", "cluster-id", "cmek-key-name", "code", "collation", "cpu-count", "create-time", "data-disk-size-gb", "data-disk-type", "database-flags", "database-service", "database-version", "display-name", "enable-ipv4", "error", "forward-ssh-connectivity", "host", "hostname", "id", "initial-user", "ip-config", "labels", "machine-config", "message", "mysql", "name", "network-architecture", "oracle", "password", "password-set", "port", "postgresql", "primary-instance-settings", "private-connection", "private-connectivity", "private-ip", "private-key", "private-network", "private-service-connect-connectivity", "provider", "public-ip", "require-ssl", "root-password", "root-password-set", "secondary-zone", "service-attachment", "settings", "source-id", "ssl", "state", "storage-auto-resize-limit", "tier", "type", "update-time", "user", "user-labels", "username", "vpc-network", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -139,6 +166,12 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "skip-validation" => { + call = call.skip_validation( value.map(|v| arg_from_str(v, err, "skip-validation", "boolean")).unwrap_or(false)); + }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, @@ -158,7 +191,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["connection-profile-id", "request-id"].iter().map(|v|*v)); + v.extend(["connection-profile-id", "request-id", "skip-validation", "validate-only"].iter().map(|v|*v)); v } )); } } @@ -202,7 +235,7 @@ where call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -310,7 +343,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -369,7 +402,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -447,11 +480,24 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "alloydb.cluster-id" => Some(("alloydb.clusterId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.initial-user.password" => Some(("alloydb.settings.initialUser.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.initial-user.password-set" => Some(("alloydb.settings.initialUser.passwordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "alloydb.settings.initial-user.user" => Some(("alloydb.settings.initialUser.user", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.labels" => Some(("alloydb.settings.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "alloydb.settings.primary-instance-settings.database-flags" => Some(("alloydb.settings.primaryInstanceSettings.databaseFlags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "alloydb.settings.primary-instance-settings.id" => Some(("alloydb.settings.primaryInstanceSettings.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.primary-instance-settings.labels" => Some(("alloydb.settings.primaryInstanceSettings.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "alloydb.settings.primary-instance-settings.machine-config.cpu-count" => Some(("alloydb.settings.primaryInstanceSettings.machineConfig.cpuCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "alloydb.settings.primary-instance-settings.private-ip" => Some(("alloydb.settings.primaryInstanceSettings.privateIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "alloydb.settings.vpc-network" => Some(("alloydb.settings.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloudsql.additional-public-ip" => Some(("cloudsql.additionalPublicIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.cloud-sql-id" => Some(("cloudsql.cloudSqlId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.private-ip" => Some(("cloudsql.privateIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.public-ip" => Some(("cloudsql.publicIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.activation-policy" => Some(("cloudsql.settings.activationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.auto-storage-increase" => Some(("cloudsql.settings.autoStorageIncrease", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cloudsql.settings.availability-type" => Some(("cloudsql.settings.availabilityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.cmek-key-name" => Some(("cloudsql.settings.cmekKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.collation" => Some(("cloudsql.settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.data-disk-size-gb" => Some(("cloudsql.settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -463,6 +509,7 @@ where "cloudsql.settings.ip-config.require-ssl" => Some(("cloudsql.settings.ipConfig.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cloudsql.settings.root-password" => Some(("cloudsql.settings.rootPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.root-password-set" => Some(("cloudsql.settings.rootPasswordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cloudsql.settings.secondary-zone" => Some(("cloudsql.settings.secondaryZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.source-id" => Some(("cloudsql.settings.sourceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.storage-auto-resize-limit" => Some(("cloudsql.settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloudsql.settings.tier" => Some(("cloudsql.settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -484,11 +531,25 @@ where "mysql.ssl.type" => Some(("mysql.ssl.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mysql.username" => Some(("mysql.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.database-service" => Some(("oracle.databaseService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.hostname" => Some(("oracle.forwardSshConnectivity.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.password" => Some(("oracle.forwardSshConnectivity.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.port" => Some(("oracle.forwardSshConnectivity.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.private-key" => Some(("oracle.forwardSshConnectivity.privateKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.forward-ssh-connectivity.username" => Some(("oracle.forwardSshConnectivity.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.host" => Some(("oracle.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.password" => Some(("oracle.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.password-set" => Some(("oracle.passwordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "oracle.port" => Some(("oracle.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "oracle.private-connectivity.private-connection" => Some(("oracle.privateConnectivity.privateConnection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oracle.username" => Some(("oracle.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.cloud-sql-id" => Some(("postgresql.cloudSqlId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.host" => Some(("postgresql.host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql.network-architecture" => Some(("postgresql.networkArchitecture", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.password" => Some(("postgresql.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.password-set" => Some(("postgresql.passwordSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "postgresql.port" => Some(("postgresql.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "postgresql.private-service-connect-connectivity.service-attachment" => Some(("postgresql.privateServiceConnectConnectivity.serviceAttachment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.ssl.ca-certificate" => Some(("postgresql.ssl.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.ssl.client-certificate" => Some(("postgresql.ssl.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "postgresql.ssl.client-key" => Some(("postgresql.ssl.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -498,7 +559,7 @@ where "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "auto-storage-increase", "ca-certificate", "client-certificate", "client-key", "cloud-sql-id", "cloudsql", "cmek-key-name", "code", "collation", "create-time", "data-disk-size-gb", "data-disk-type", "database-flags", "database-version", "display-name", "enable-ipv4", "error", "host", "ip-config", "labels", "message", "mysql", "name", "password", "password-set", "port", "postgresql", "private-ip", "private-network", "provider", "public-ip", "require-ssl", "root-password", "root-password-set", "settings", "source-id", "ssl", "state", "storage-auto-resize-limit", "tier", "type", "update-time", "user-labels", "username", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "additional-public-ip", "alloydb", "auto-storage-increase", "availability-type", "ca-certificate", "client-certificate", "client-key", "cloud-sql-id", "cloudsql", "cluster-id", "cmek-key-name", "code", "collation", "cpu-count", "create-time", "data-disk-size-gb", "data-disk-type", "database-flags", "database-service", "database-version", "display-name", "enable-ipv4", "error", "forward-ssh-connectivity", "host", "hostname", "id", "initial-user", "ip-config", "labels", "machine-config", "message", "mysql", "name", "network-architecture", "oracle", "password", "password-set", "port", "postgresql", "primary-instance-settings", "private-connection", "private-connectivity", "private-ip", "private-key", "private-network", "private-service-connect-connectivity", "provider", "public-ip", "require-ssl", "root-password", "root-password-set", "secondary-zone", "service-attachment", "settings", "source-id", "ssl", "state", "storage-auto-resize-limit", "tier", "type", "update-time", "user", "user-labels", "username", "vpc-network", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -512,8 +573,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "skip-validation" => { + call = call.skip_validation( value.map(|v| arg_from_str(v, err, "skip-validation", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -531,7 +598,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["request-id", "update-mask"].iter().map(|v|*v)); + v.extend(["request-id", "skip-validation", "update-mask", "validate-only"].iter().map(|v|*v)); v } )); } } @@ -737,6 +804,1085 @@ where } } + async fn _projects_locations_conversion_workspaces_apply(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "connection-profile" => Some(("connectionProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["connection-profile", "filter"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ApplyConversionWorkspaceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_apply(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_commit(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "commit-name" => Some(("commitName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["commit-name"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CommitConversionWorkspaceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_commit(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_convert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "auto-commit" => Some(("autoCommit", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-commit", "filter"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ConvertConversionWorkspaceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_convert(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.engine" => Some(("destination.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.version" => Some(("destination.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "global-settings" => Some(("globalSettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "has-uncommitted-changes" => Some(("hasUncommittedChanges", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-commit-id" => Some(("latestCommitId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-commit-time" => Some(("latestCommitTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.engine" => Some(("source.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.version" => Some(("source.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "destination", "display-name", "engine", "global-settings", "has-uncommitted-changes", "latest-commit-id", "latest-commit-time", "name", "source", "update-time", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ConversionWorkspace = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + "conversion-workspace-id" => { + call = call.conversion_workspace_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["conversion-workspace-id", "request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_conversion_workspaces_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_describe_conversion_workspace_revisions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_conversion_workspaces_describe_conversion_workspace_revisions(opt.value_of("conversion-workspace").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "commit-id" => { + call = call.commit_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["commit-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_describe_database_entities(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_conversion_workspaces_describe_database_entities(opt.value_of("conversion-workspace").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "uncommitted" => { + call = call.uncommitted( value.map(|v| arg_from_str(v, err, "uncommitted", "boolean")).unwrap_or(false)); + }, + "tree" => { + call = call.tree(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + "commit-id" => { + call = call.commit_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["commit-id", "filter", "page-size", "page-token", "tree", "uncommitted"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_conversion_workspaces_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_conversion_workspaces_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_mapping_rules_import(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "auto-commit" => Some(("autoCommit", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "rules-format" => Some(("rulesFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-commit", "rules-format"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ImportMappingRulesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_mapping_rules_import(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.engine" => Some(("destination.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.version" => Some(("destination.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "global-settings" => Some(("globalSettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "has-uncommitted-changes" => Some(("hasUncommittedChanges", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "latest-commit-id" => Some(("latestCommitId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-commit-time" => Some(("latestCommitTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.engine" => Some(("source.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.version" => Some(("source.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "destination", "display-name", "engine", "global-settings", "has-uncommitted-changes", "latest-commit-id", "latest-commit-time", "name", "source", "update-time", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ConversionWorkspace = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_rollback(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RollbackConversionWorkspaceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_rollback(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_search_background_jobs(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_conversion_workspaces_search_background_jobs(opt.value_of("conversion-workspace").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "return-most-recent-per-job-type" => { + call = call.return_most_recent_per_job_type( value.map(|v| arg_from_str(v, err, "return-most-recent-per-job-type", "boolean")).unwrap_or(false)); + }, + "max-size" => { + call = call.max_size( value.map(|v| arg_from_str(v, err, "max-size", "int32")).unwrap_or(-0)); + }, + "completed-until-time" => { + call = call.completed_until_time( value.map(|v| arg_from_str(v, err, "completed-until-time", "google-datetime")).unwrap_or(chrono::Utc::now())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["completed-until-time", "max-size", "return-most-recent-per-job-type"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_conversion_workspaces_seed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "auto-commit" => Some(("autoCommit", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "destination-connection-profile" => Some(("destinationConnectionProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-connection-profile" => Some(("sourceConnectionProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-commit", "destination-connection-profile", "source-connection-profile"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SeedConversionWorkspaceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversion_workspaces_seed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or("")); @@ -799,7 +1945,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -874,6 +2020,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "conversion-workspace.commit-id" => Some(("conversionWorkspace.commitId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "conversion-workspace.name" => Some(("conversionWorkspace.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination" => Some(("destination", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-database.engine" => Some(("destinationDatabase.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -884,6 +2032,7 @@ where "end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "error.message" => Some(("error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "phase" => Some(("phase", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -899,7 +2048,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vpc-peering-connectivity.vpc" => Some(("vpcPeeringConnectivity.vpc", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "create-time", "destination", "destination-database", "display-name", "dump-path", "duration", "end-time", "engine", "error", "labels", "message", "name", "phase", "provider", "reverse-ssh-connectivity", "source", "source-database", "state", "type", "update-time", "vm", "vm-ip", "vm-port", "vpc", "vpc-peering-connectivity"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "commit-id", "conversion-workspace", "create-time", "destination", "destination-database", "display-name", "dump-path", "duration", "end-time", "engine", "error", "filter", "labels", "message", "name", "phase", "provider", "reverse-ssh-connectivity", "source", "source-database", "state", "type", "update-time", "vm", "vm-ip", "vm-port", "vpc", "vpc-peering-connectivity"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -976,7 +2125,7 @@ where call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1174,7 +2323,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1233,7 +2382,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1311,6 +2460,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "conversion-workspace.commit-id" => Some(("conversionWorkspace.commitId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "conversion-workspace.name" => Some(("conversionWorkspace.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination" => Some(("destination", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-database.engine" => Some(("destinationDatabase.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1321,6 +2472,7 @@ where "end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "error.message" => Some(("error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "phase" => Some(("phase", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1336,7 +2488,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vpc-peering-connectivity.vpc" => Some(("vpcPeeringConnectivity.vpc", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "create-time", "destination", "destination-database", "display-name", "dump-path", "duration", "end-time", "engine", "error", "labels", "message", "name", "phase", "provider", "reverse-ssh-connectivity", "source", "source-database", "state", "type", "update-time", "vm", "vm-ip", "vm-port", "vpc", "vpc-peering-connectivity"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "commit-id", "conversion-workspace", "create-time", "destination", "destination-database", "display-name", "dump-path", "duration", "end-time", "engine", "error", "filter", "labels", "message", "name", "phase", "provider", "reverse-ssh-connectivity", "source", "source-database", "state", "type", "update-time", "vm", "vm-ip", "vm-port", "vpc", "vpc-peering-connectivity"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1351,7 +2503,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2277,7 +3429,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2329,6 +3481,283 @@ where } } + async fn _projects_locations_private_connections_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "error.message" => Some(("error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vpc-peering-config.subnet" => Some(("vpcPeeringConfig.subnet", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "vpc-peering-config.vpc-name" => Some(("vpcPeeringConfig.vpcName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "create-time", "display-name", "error", "labels", "message", "name", "state", "subnet", "update-time", "vpc-name", "vpc-peering-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::PrivateConnection = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_private_connections_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "skip-validation" => { + call = call.skip_validation( value.map(|v| arg_from_str(v, err, "skip-validation", "boolean")).unwrap_or(false)); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + "private-connection-id" => { + call = call.private_connection_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["private-connection-id", "request-id", "skip-validation"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_private_connections_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_private_connections_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_private_connections_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_private_connections_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_private_connections_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_private_connections_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -2360,6 +3789,48 @@ where ("locations-connection-profiles-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_connection_profiles_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-conversion-workspaces-apply", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_apply(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-commit", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_commit(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-convert", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_convert(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-create", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_create(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-delete", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_delete(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-describe-conversion-workspace-revisions", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_describe_conversion_workspace_revisions(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-describe-database-entities", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_describe_database_entities(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-get", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_get(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-list", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_list(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-mapping-rules-import", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_mapping_rules_import(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-patch", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_patch(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-rollback", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_rollback(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-search-background-jobs", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_search_background_jobs(opt, dry_run, &mut err).await; + }, + ("locations-conversion-workspaces-seed", Some(opt)) => { + call_result = self._projects_locations_conversion_workspaces_seed(opt, dry_run, &mut err).await; + }, ("locations-get", Some(opt)) => { call_result = self._projects_locations_get(opt, dry_run, &mut err).await; }, @@ -2423,6 +3894,18 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, + ("locations-private-connections-create", Some(opt)) => { + call_result = self._projects_locations_private_connections_create(opt, dry_run, &mut err).await; + }, + ("locations-private-connections-delete", Some(opt)) => { + call_result = self._projects_locations_private_connections_delete(opt, dry_run, &mut err).await; + }, + ("locations-private-connections-get", Some(opt)) => { + call_result = self._projects_locations_private_connections_get(opt, dry_run, &mut err).await; + }, + ("locations-private-connections-list", Some(opt)) => { + call_result = self._projects_locations_private_connections_list(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2502,7 +3985,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-connection-profiles-create', 'locations-connection-profiles-delete', 'locations-connection-profiles-get', 'locations-connection-profiles-get-iam-policy', 'locations-connection-profiles-list', 'locations-connection-profiles-patch', 'locations-connection-profiles-set-iam-policy', 'locations-connection-profiles-test-iam-permissions', 'locations-get', 'locations-list', 'locations-migration-jobs-create', 'locations-migration-jobs-delete', 'locations-migration-jobs-generate-ssh-script', 'locations-migration-jobs-get', 'locations-migration-jobs-get-iam-policy', 'locations-migration-jobs-list', 'locations-migration-jobs-patch', 'locations-migration-jobs-promote', 'locations-migration-jobs-restart', 'locations-migration-jobs-resume', 'locations-migration-jobs-set-iam-policy', 'locations-migration-jobs-start', 'locations-migration-jobs-stop', 'locations-migration-jobs-test-iam-permissions', 'locations-migration-jobs-verify', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-connection-profiles-create', 'locations-connection-profiles-delete', 'locations-connection-profiles-get', 'locations-connection-profiles-get-iam-policy', 'locations-connection-profiles-list', 'locations-connection-profiles-patch', 'locations-connection-profiles-set-iam-policy', 'locations-connection-profiles-test-iam-permissions', 'locations-conversion-workspaces-apply', 'locations-conversion-workspaces-commit', 'locations-conversion-workspaces-convert', 'locations-conversion-workspaces-create', 'locations-conversion-workspaces-delete', 'locations-conversion-workspaces-describe-conversion-workspace-revisions', 'locations-conversion-workspaces-describe-database-entities', 'locations-conversion-workspaces-get', 'locations-conversion-workspaces-list', 'locations-conversion-workspaces-mapping-rules-import', 'locations-conversion-workspaces-patch', 'locations-conversion-workspaces-rollback', 'locations-conversion-workspaces-search-background-jobs', 'locations-conversion-workspaces-seed', 'locations-get', 'locations-list', 'locations-migration-jobs-create', 'locations-migration-jobs-delete', 'locations-migration-jobs-generate-ssh-script', 'locations-migration-jobs-get', 'locations-migration-jobs-get-iam-policy', 'locations-migration-jobs-list', 'locations-migration-jobs-patch', 'locations-migration-jobs-promote', 'locations-migration-jobs-restart', 'locations-migration-jobs-resume', 'locations-migration-jobs-set-iam-policy', 'locations-migration-jobs-start', 'locations-migration-jobs-stop', 'locations-migration-jobs-test-iam-permissions', 'locations-migration-jobs-verify', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-private-connections-create', 'locations-private-connections-delete', 'locations-private-connections-get' and 'locations-private-connections-list'", vec![ ("locations-connection-profiles-create", Some(r##"Creates a new connection profile in a given project and location."##), "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-connection-profiles-create", @@ -2581,7 +4064,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2653,7 +4136,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2681,7 +4164,363 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-apply", + Some(r##"Apply draft tree onto a specific destination database"##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-apply", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the conversion workspace resource to apply draft to destination for. in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-commit", + Some(r##"Marks all the data in the conversion workspace as committed."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-commit", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the conversion workspace resource to commit."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-convert", + Some(r##"Creates a draft tree schema for the destination database."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-convert", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the conversion workspace resource to convert in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-create", + Some(r##"Creates a new conversion workspace in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent, which owns this collection of conversion workspaces."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-delete", + Some(r##"Deletes a single conversion workspace."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the conversion workspace resource to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-describe-conversion-workspace-revisions", + Some(r##"Retrieves a list of committed revisions of a specific conversion workspace."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-describe-conversion-workspace-revisions", + vec![ + (Some(r##"conversion-workspace"##), + None, + Some(r##"Required. Name of the conversion workspace resource whose revisions are listed. in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-describe-database-entities", + Some(r##"Use this method to describe the database entities tree for a specific conversion workspace and a specific tree type. The DB Entities are not a resource like conversion workspace or mapping rule, and they can not be created, updated or deleted like one. Instead they are simple data objects describing the structure of the client database."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-describe-database-entities", + vec![ + (Some(r##"conversion-workspace"##), + None, + Some(r##"Required. Name of the conversion workspace resource whose DB entities are described in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-get", + Some(r##"Gets details of a single conversion workspace."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the conversion workspace resource to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-list", + Some(r##"Lists conversion workspaces in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent, which owns this collection of conversion workspaces."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-mapping-rules-import", + Some(r##"Imports the mapping rules for a given conversion workspace. Supports various formats of external rules files."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-mapping-rules-import", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the conversion workspace resource to import the rules to in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-patch", + Some(r##"Updates the parameters of a single conversion workspace."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Full name of the workspace resource, in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-rollback", + Some(r##"Rollbacks a conversion workspace to the last committed spanshot."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-rollback", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the conversion workspace resource to rollback to."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-search-background-jobs", + Some(r##"Use this method to search/list the background jobs for a specific conversion workspace. The background jobs are not a resource like conversion workspace or mapping rule, and they can not be created, updated or deleted like one. Instead they are a way to expose the data plane jobs log."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-search-background-jobs", + vec![ + (Some(r##"conversion-workspace"##), + None, + Some(r##"Required. Name of the conversion workspace resource whos jobs are listed. in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversion-workspaces-seed", + Some(r##"Imports a snapshot of the source database into the conversion workspace."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-conversion-workspaces-seed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the conversion workspace resource to seed with new database structure. in the form of: projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}."##), Some(true), Some(false)), @@ -2853,7 +4692,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3009,7 +4848,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3093,7 +4932,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3231,6 +5070,100 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-private-connections-create", + Some(r##"Creates a new private connection in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-private-connections-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent that owns the collection of PrivateConnections."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-private-connections-delete", + Some(r##"Deletes a single Database Migration Service private connection."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-private-connections-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the private connection to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-private-connections-get", + Some(r##"Gets details of a single private connection."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-private-connections-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the private connection to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-private-connections-list", + Some(r##"Retrieves a list of private connections in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_datamigration1_cli/projects_locations-private-connections-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent that owns the collection of private connections."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3243,7 +5176,7 @@ async fn main() { let mut app = App::new("datamigration1") .author("Sebastian Thiel ") - .version("4.0.1+20220216") + .version("5.0.2+20230105") .about("Manage Cloud Database Migration Service resources on Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datamigration1_cli") .arg(Arg::with_name("url") diff --git a/gen/datamigration1/Cargo.toml b/gen/datamigration1/Cargo.toml index 3b4bac50d7..8d61f1b0ec 100644 --- a/gen/datamigration1/Cargo.toml +++ b/gen/datamigration1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datamigration1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Database Migration Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datamigration1" homepage = "https://cloud.google.com/database-migration/" -documentation = "https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-datamigration1/5.0.2+20230105" license = "MIT" keywords = ["datamigration", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datamigration1/README.md b/gen/datamigration1/README.md index 578f2f5cee..48506154c9 100644 --- a/gen/datamigration1/README.md +++ b/gen/datamigration1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datamigration1` library allows access to all features of the *Google Database Migration Service* service. -This documentation was generated from *Database Migration Service* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *datamigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Database Migration Service* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *datamigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Database Migration Service* *v1* API can be found at the [official documentation site](https://cloud.google.com/database-migration/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/DatabaseMigrationService) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/DatabaseMigrationService) ... * projects - * [*locations connection profiles create*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfileCreateCall), [*locations connection profiles delete*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfileDeleteCall), [*locations connection profiles get*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfileGetCall), [*locations connection profiles get iam policy*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfileGetIamPolicyCall), [*locations connection profiles list*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfileListCall), [*locations connection profiles patch*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfilePatchCall), [*locations connection profiles set iam policy*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfileSetIamPolicyCall), [*locations connection profiles test iam permissions*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConnectionProfileTestIamPermissionCall), [*locations conversion workspaces apply*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceApplyCall), [*locations conversion workspaces commit*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceCommitCall), [*locations conversion workspaces convert*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceConvertCall), [*locations conversion workspaces create*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceCreateCall), [*locations conversion workspaces delete*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceDeleteCall), [*locations conversion workspaces describe conversion workspace revisions*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceDescribeConversionWorkspaceRevisionCall), [*locations conversion workspaces describe database entities*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceDescribeDatabaseEntityCall), [*locations conversion workspaces get*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceGetCall), [*locations conversion workspaces list*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceListCall), [*locations conversion workspaces mapping rules import*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceMappingRuleImportCall), [*locations conversion workspaces patch*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspacePatchCall), [*locations conversion workspaces rollback*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceRollbackCall), [*locations conversion workspaces search background jobs*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceSearchBackgroundJobCall), [*locations conversion workspaces seed*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceSeedCall), [*locations get*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationListCall), [*locations migration jobs create*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobCreateCall), [*locations migration jobs delete*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobDeleteCall), [*locations migration jobs generate ssh script*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobGenerateSshScriptCall), [*locations migration jobs get*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobGetCall), [*locations migration jobs get iam policy*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobGetIamPolicyCall), [*locations migration jobs list*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobListCall), [*locations migration jobs patch*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobPatchCall), [*locations migration jobs promote*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobPromoteCall), [*locations migration jobs restart*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobRestartCall), [*locations migration jobs resume*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobResumeCall), [*locations migration jobs set iam policy*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobSetIamPolicyCall), [*locations migration jobs start*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobStartCall), [*locations migration jobs stop*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobStopCall), [*locations migration jobs test iam permissions*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobTestIamPermissionCall), [*locations migration jobs verify*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationMigrationJobVerifyCall), [*locations operations cancel*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationOperationListCall), [*locations private connections create*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionCreateCall), [*locations private connections delete*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionDeleteCall), [*locations private connections get*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionGetCall) and [*locations private connections list*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionListCall) + * [*locations connection profiles create*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfileCreateCall), [*locations connection profiles delete*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfileDeleteCall), [*locations connection profiles get*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfileGetCall), [*locations connection profiles get iam policy*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfileGetIamPolicyCall), [*locations connection profiles list*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfileListCall), [*locations connection profiles patch*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfilePatchCall), [*locations connection profiles set iam policy*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfileSetIamPolicyCall), [*locations connection profiles test iam permissions*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConnectionProfileTestIamPermissionCall), [*locations conversion workspaces apply*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceApplyCall), [*locations conversion workspaces commit*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceCommitCall), [*locations conversion workspaces convert*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceConvertCall), [*locations conversion workspaces create*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceCreateCall), [*locations conversion workspaces delete*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceDeleteCall), [*locations conversion workspaces describe conversion workspace revisions*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceDescribeConversionWorkspaceRevisionCall), [*locations conversion workspaces describe database entities*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceDescribeDatabaseEntityCall), [*locations conversion workspaces get*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceGetCall), [*locations conversion workspaces list*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceListCall), [*locations conversion workspaces mapping rules import*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceMappingRuleImportCall), [*locations conversion workspaces patch*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspacePatchCall), [*locations conversion workspaces rollback*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceRollbackCall), [*locations conversion workspaces search background jobs*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceSearchBackgroundJobCall), [*locations conversion workspaces seed*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationConversionWorkspaceSeedCall), [*locations get*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationListCall), [*locations migration jobs create*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobCreateCall), [*locations migration jobs delete*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobDeleteCall), [*locations migration jobs generate ssh script*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobGenerateSshScriptCall), [*locations migration jobs get*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobGetCall), [*locations migration jobs get iam policy*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobGetIamPolicyCall), [*locations migration jobs list*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobListCall), [*locations migration jobs patch*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobPatchCall), [*locations migration jobs promote*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobPromoteCall), [*locations migration jobs restart*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobRestartCall), [*locations migration jobs resume*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobResumeCall), [*locations migration jobs set iam policy*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobSetIamPolicyCall), [*locations migration jobs start*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobStartCall), [*locations migration jobs stop*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobStopCall), [*locations migration jobs test iam permissions*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobTestIamPermissionCall), [*locations migration jobs verify*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationMigrationJobVerifyCall), [*locations operations cancel*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationOperationListCall), [*locations private connections create*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionCreateCall), [*locations private connections delete*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionDeleteCall), [*locations private connections get*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionGetCall) and [*locations private connections list*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/api::ProjectLocationPrivateConnectionListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/DatabaseMigrationService)** +* **[Hub](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/DatabaseMigrationService)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Part)** + * **[Parts](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -151,17 +151,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -171,29 +171,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::RequestValue) and -[decodable](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::RequestValue) and +[decodable](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datamigration1/5.0.2-beta-1+20230105/google_datamigration1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datamigration1/5.0.2+20230105/google_datamigration1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datamigration1/src/api.rs b/gen/datamigration1/src/api.rs index 3c0d44f4f0..f06b8a4111 100644 --- a/gen/datamigration1/src/api.rs +++ b/gen/datamigration1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> DatabaseMigrationService { DatabaseMigrationService { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datamigration.googleapis.com/".to_string(), _root_url: "https://datamigration.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> DatabaseMigrationService { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datamigration1/src/client.rs b/gen/datamigration1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datamigration1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datamigration1/src/lib.rs b/gen/datamigration1/src/lib.rs index a97c6094b0..eb64f9e723 100644 --- a/gen/datamigration1/src/lib.rs +++ b/gen/datamigration1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Database Migration Service* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *datamigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Database Migration Service* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *datamigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Database Migration Service* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/database-migration/). diff --git a/gen/datapipelines1-cli/Cargo.toml b/gen/datapipelines1-cli/Cargo.toml index 92dd53c268..9d37454d89 100644 --- a/gen/datapipelines1-cli/Cargo.toml +++ b/gen/datapipelines1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datapipelines1-cli" -version = "4.0.1+20220218" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Datapipelines (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datapipelines1-cli" @@ -20,13 +20,13 @@ name = "datapipelines1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datapipelines1] path = "../datapipelines1" -version = "4.0.1+20220218" +version = "5.0.2+20230115" + diff --git a/gen/datapipelines1-cli/README.md b/gen/datapipelines1-cli/README.md index 9171c17e49..7e07a4313b 100644 --- a/gen/datapipelines1-cli/README.md +++ b/gen/datapipelines1-cli/README.md @@ -25,16 +25,16 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Datapipelines* API at revision *20220218*. The CLI is at version *4.0.1*. +This documentation was generated from the *Datapipelines* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash datapipelines1 [options] projects - locations-list-pipelines [-p ]... [-o ] locations-pipelines-create (-r )... [-p ]... [-o ] locations-pipelines-delete [-p ]... [-o ] locations-pipelines-get [-p ]... [-o ] locations-pipelines-jobs-list [-p ]... [-o ] + locations-pipelines-list [-p ]... [-o ] locations-pipelines-patch (-r )... [-p ]... [-o ] locations-pipelines-run (-r )... [-p ]... [-o ] locations-pipelines-stop (-r )... [-p ]... [-o ] diff --git a/gen/datapipelines1-cli/mkdocs.yml b/gen/datapipelines1-cli/mkdocs.yml index 188b5c612a..d9c7a4531b 100644 --- a/gen/datapipelines1-cli/mkdocs.yml +++ b/gen/datapipelines1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Datapipelines v4.0.1+20220218 +site_name: Datapipelines v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-datapipelines1-cli site_description: A complete library to interact with Datapipelines (protocol v1) @@ -7,16 +7,17 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datapipelines1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-list-pipelines.md', 'Projects', 'Locations List Pipelines'] -- ['projects_locations-pipelines-create.md', 'Projects', 'Locations Pipelines Create'] -- ['projects_locations-pipelines-delete.md', 'Projects', 'Locations Pipelines Delete'] -- ['projects_locations-pipelines-get.md', 'Projects', 'Locations Pipelines Get'] -- ['projects_locations-pipelines-jobs-list.md', 'Projects', 'Locations Pipelines Jobs List'] -- ['projects_locations-pipelines-patch.md', 'Projects', 'Locations Pipelines Patch'] -- ['projects_locations-pipelines-run.md', 'Projects', 'Locations Pipelines Run'] -- ['projects_locations-pipelines-stop.md', 'Projects', 'Locations Pipelines Stop'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Pipelines Create': 'projects_locations-pipelines-create.md' + - 'Locations Pipelines Delete': 'projects_locations-pipelines-delete.md' + - 'Locations Pipelines Get': 'projects_locations-pipelines-get.md' + - 'Locations Pipelines Jobs List': 'projects_locations-pipelines-jobs-list.md' + - 'Locations Pipelines List': 'projects_locations-pipelines-list.md' + - 'Locations Pipelines Patch': 'projects_locations-pipelines-patch.md' + - 'Locations Pipelines Run': 'projects_locations-pipelines-run.md' + - 'Locations Pipelines Stop': 'projects_locations-pipelines-stop.md' theme: readthedocs diff --git a/gen/datapipelines1-cli/src/client.rs b/gen/datapipelines1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datapipelines1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datapipelines1-cli/src/main.rs b/gen/datapipelines1-cli/src/main.rs index ba4f637acc..e83897f292 100644 --- a/gen/datapipelines1-cli/src/main.rs +++ b/gen/datapipelines1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datapipelines1::{api, Error, oauth2}; +use google_datapipelines1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,68 +50,6 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { - async fn _projects_locations_list_pipelines(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_list_pipelines(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_pipelines_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -372,7 +309,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -421,6 +358,68 @@ where } } + async fn _projects_locations_pipelines_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_pipelines_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_pipelines_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -521,7 +520,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -745,9 +744,6 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { - ("locations-list-pipelines", Some(opt)) => { - call_result = self._projects_locations_list_pipelines(opt, dry_run, &mut err).await; - }, ("locations-pipelines-create", Some(opt)) => { call_result = self._projects_locations_pipelines_create(opt, dry_run, &mut err).await; }, @@ -760,6 +756,9 @@ where ("locations-pipelines-jobs-list", Some(opt)) => { call_result = self._projects_locations_pipelines_jobs_list(opt, dry_run, &mut err).await; }, + ("locations-pipelines-list", Some(opt)) => { + call_result = self._projects_locations_pipelines_list(opt, dry_run, &mut err).await; + }, ("locations-pipelines-patch", Some(opt)) => { call_result = self._projects_locations_pipelines_patch(opt, dry_run, &mut err).await; }, @@ -848,29 +847,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-list-pipelines', 'locations-pipelines-create', 'locations-pipelines-delete', 'locations-pipelines-get', 'locations-pipelines-jobs-list', 'locations-pipelines-patch', 'locations-pipelines-run' and 'locations-pipelines-stop'", vec![ - ("locations-list-pipelines", - Some(r##"Lists pipelines. Returns a "FORBIDDEN" error if the caller doesn't have permission to access it."##), - "Details at http://byron.github.io/google-apis-rs/google_datapipelines1_cli/projects_locations-list-pipelines", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The location name. For example: `projects/PROJECT_ID/locations/LOCATION_ID`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), + ("projects", "methods: 'locations-pipelines-create', 'locations-pipelines-delete', 'locations-pipelines-get', 'locations-pipelines-jobs-list', 'locations-pipelines-list', 'locations-pipelines-patch', 'locations-pipelines-run' and 'locations-pipelines-stop'", vec![ ("locations-pipelines-create", Some(r##"Creates a pipeline. For a batch pipeline, you can pass scheduler information. Data Pipelines uses the scheduler information to create an internal scheduler that runs jobs periodically. If the internal scheduler is not configured, you can use RunPipeline to run jobs."##), "Details at http://byron.github.io/google-apis-rs/google_datapipelines1_cli/projects_locations-pipelines-create", @@ -959,6 +936,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-pipelines-list", + Some(r##"Lists pipelines. Returns a "FORBIDDEN" error if the caller doesn't have permission to access it."##), + "Details at http://byron.github.io/google-apis-rs/google_datapipelines1_cli/projects_locations-pipelines-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The location name. For example: `projects/PROJECT_ID/locations/LOCATION_ID`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1055,7 +1054,7 @@ async fn main() { let mut app = App::new("datapipelines1") .author("Sebastian Thiel ") - .version("4.0.1+20220218") + .version("5.0.2+20230115") .about("Data Pipelines provides an interface for creating, updating, and managing recurring Data Analytics jobs.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datapipelines1_cli") .arg(Arg::with_name("url") diff --git a/gen/datapipelines1/Cargo.toml b/gen/datapipelines1/Cargo.toml index 3865621a1d..667c18b4ec 100644 --- a/gen/datapipelines1/Cargo.toml +++ b/gen/datapipelines1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datapipelines1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Datapipelines (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datapipelines1" homepage = "https://cloud.google.com/dataflow/docs/guides/data-pipelines" -documentation = "https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-datapipelines1/5.0.2+20230115" license = "MIT" keywords = ["datapipelines", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datapipelines1/README.md b/gen/datapipelines1/README.md index 0c02e64924..55a40f0f66 100644 --- a/gen/datapipelines1/README.md +++ b/gen/datapipelines1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datapipelines1` library allows access to all features of the *Google Datapipelines* service. -This documentation was generated from *Datapipelines* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *datapipelines:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Datapipelines* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *datapipelines:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Datapipelines* *v1* API can be found at the [official documentation site](https://cloud.google.com/dataflow/docs/guides/data-pipelines). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/Datapipelines) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/Datapipelines) ... * projects - * [*locations pipelines create*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelineCreateCall), [*locations pipelines delete*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelineDeleteCall), [*locations pipelines get*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelineGetCall), [*locations pipelines jobs list*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelineJobListCall), [*locations pipelines list*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelineListCall), [*locations pipelines patch*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelinePatchCall), [*locations pipelines run*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelineRunCall) and [*locations pipelines stop*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/api::ProjectLocationPipelineStopCall) + * [*locations pipelines create*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelineCreateCall), [*locations pipelines delete*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelineDeleteCall), [*locations pipelines get*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelineGetCall), [*locations pipelines jobs list*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelineJobListCall), [*locations pipelines list*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelineListCall), [*locations pipelines patch*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelinePatchCall), [*locations pipelines run*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelineRunCall) and [*locations pipelines stop*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/api::ProjectLocationPipelineStopCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/Datapipelines)** +* **[Hub](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/Datapipelines)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Part)** + * **[Parts](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::RequestValue) and -[decodable](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::RequestValue) and +[decodable](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datapipelines1/5.0.2-beta-1+20230115/google_datapipelines1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datapipelines1/5.0.2+20230115/google_datapipelines1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datapipelines1/src/api.rs b/gen/datapipelines1/src/api.rs index 8ccea72bda..f41f255d17 100644 --- a/gen/datapipelines1/src/api.rs +++ b/gen/datapipelines1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Datapipelines { Datapipelines { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datapipelines.googleapis.com/".to_string(), _root_url: "https://datapipelines.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Datapipelines { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datapipelines1/src/client.rs b/gen/datapipelines1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datapipelines1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datapipelines1/src/lib.rs b/gen/datapipelines1/src/lib.rs index e4ad841080..8ecc1fe5de 100644 --- a/gen/datapipelines1/src/lib.rs +++ b/gen/datapipelines1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Datapipelines* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *datapipelines:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Datapipelines* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *datapipelines:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Datapipelines* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/dataflow/docs/guides/data-pipelines). diff --git a/gen/dataplex1-cli/Cargo.toml b/gen/dataplex1-cli/Cargo.toml index c8def8ceb7..8d4d5c8653 100644 --- a/gen/dataplex1-cli/Cargo.toml +++ b/gen/dataplex1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dataplex1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Dataplex (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dataplex1-cli" @@ -20,13 +20,13 @@ name = "dataplex1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dataplex1] path = "../dataplex1" -version = "4.0.1+20220223" +version = "5.0.2+20230120" + diff --git a/gen/dataplex1-cli/README.md b/gen/dataplex1-cli/README.md index 9ded032b6c..791cb0723b 100644 --- a/gen/dataplex1-cli/README.md +++ b/gen/dataplex1-cli/README.md @@ -25,21 +25,64 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Dataplex* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Dataplex* API at revision *20230120*. The CLI is at version *5.0.2*. ```bash dataplex1 [options] projects + locations-data-attribute-bindings-create (-r )... [-p ]... [-o ] + locations-data-attribute-bindings-delete [-p ]... [-o ] + locations-data-attribute-bindings-get [-p ]... [-o ] + locations-data-attribute-bindings-get-iam-policy [-p ]... [-o ] + locations-data-attribute-bindings-list [-p ]... [-o ] + locations-data-attribute-bindings-patch (-r )... [-p ]... [-o ] + locations-data-attribute-bindings-set-iam-policy (-r )... [-p ]... [-o ] + locations-data-attribute-bindings-test-iam-permissions (-r )... [-p ]... [-o ] + locations-data-scans-create (-r )... [-p ]... [-o ] + locations-data-scans-delete [-p ]... [-o ] + locations-data-scans-get [-p ]... [-o ] + locations-data-scans-get-iam-policy [-p ]... [-o ] + locations-data-scans-jobs-get [-p ]... [-o ] + locations-data-scans-jobs-list [-p ]... [-o ] + locations-data-scans-list [-p ]... [-o ] + locations-data-scans-patch (-r )... [-p ]... [-o ] + locations-data-scans-run (-r )... [-p ]... [-o ] + locations-data-scans-set-iam-policy (-r )... [-p ]... [-o ] + locations-data-scans-test-iam-permissions (-r )... [-p ]... [-o ] + locations-data-taxonomies-attributes-create (-r )... [-p ]... [-o ] + locations-data-taxonomies-attributes-delete [-p ]... [-o ] + locations-data-taxonomies-attributes-get [-p ]... [-o ] + locations-data-taxonomies-attributes-get-iam-policy [-p ]... [-o ] + locations-data-taxonomies-attributes-list [-p ]... [-o ] + locations-data-taxonomies-attributes-patch (-r )... [-p ]... [-o ] + locations-data-taxonomies-attributes-set-iam-policy (-r )... [-p ]... [-o ] + locations-data-taxonomies-attributes-test-iam-permissions (-r )... [-p ]... [-o ] + locations-data-taxonomies-create (-r )... [-p ]... [-o ] + locations-data-taxonomies-delete [-p ]... [-o ] + locations-data-taxonomies-get [-p ]... [-o ] + locations-data-taxonomies-get-iam-policy [-p ]... [-o ] + locations-data-taxonomies-list [-p ]... [-o ] + locations-data-taxonomies-patch (-r )... [-p ]... [-o ] + locations-data-taxonomies-set-iam-policy (-r )... [-p ]... [-o ] + locations-data-taxonomies-test-iam-permissions (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-lakes-actions-list [-p ]... [-o ] + locations-lakes-content-create (-r )... [-p ]... [-o ] + locations-lakes-content-delete [-p ]... [-o ] + locations-lakes-content-get [-p ]... [-o ] locations-lakes-content-get-iam-policy [-p ]... [-o ] + locations-lakes-content-list [-p ]... [-o ] + locations-lakes-content-patch (-r )... [-p ]... [-o ] locations-lakes-content-set-iam-policy (-r )... [-p ]... [-o ] locations-lakes-content-test-iam-permissions (-r )... [-p ]... [-o ] locations-lakes-contentitems-create (-r )... [-p ]... [-o ] locations-lakes-contentitems-delete [-p ]... [-o ] locations-lakes-contentitems-get [-p ]... [-o ] + locations-lakes-contentitems-get-iam-policy [-p ]... [-o ] locations-lakes-contentitems-list [-p ]... [-o ] locations-lakes-contentitems-patch (-r )... [-p ]... [-o ] + locations-lakes-contentitems-set-iam-policy (-r )... [-p ]... [-o ] + locations-lakes-contentitems-test-iam-permissions (-r )... [-p ]... [-o ] locations-lakes-create (-r )... [-p ]... [-o ] locations-lakes-delete [-p ]... [-o ] locations-lakes-environments-create (-r )... [-p ]... [-o ] @@ -65,6 +108,7 @@ dataplex1 [options] locations-lakes-tasks-jobs-list [-p ]... [-o ] locations-lakes-tasks-list [-p ]... [-o ] locations-lakes-tasks-patch (-r )... [-p ]... [-o ] + locations-lakes-tasks-run (-r )... [-p ]... [-o ] locations-lakes-tasks-set-iam-policy (-r )... [-p ]... [-o ] locations-lakes-tasks-test-iam-permissions (-r )... [-p ]... [-o ] locations-lakes-test-iam-permissions (-r )... [-p ]... [-o ] diff --git a/gen/dataplex1-cli/mkdocs.yml b/gen/dataplex1-cli/mkdocs.yml index 6135e392dd..23a4a661fd 100644 --- a/gen/dataplex1-cli/mkdocs.yml +++ b/gen/dataplex1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Dataplex v4.0.1+20220223 +site_name: Cloud Dataplex v5.0.2+20230120 site_url: http://byron.github.io/google-apis-rs/google-dataplex1-cli site_description: A complete library to interact with Cloud Dataplex (protocol v1) @@ -7,78 +7,123 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dataplex1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-lakes-actions-list.md', 'Projects', 'Locations Lakes Actions List'] -- ['projects_locations-lakes-content-get-iam-policy.md', 'Projects', 'Locations Lakes Content Get Iam Policy'] -- ['projects_locations-lakes-content-set-iam-policy.md', 'Projects', 'Locations Lakes Content Set Iam Policy'] -- ['projects_locations-lakes-content-test-iam-permissions.md', 'Projects', 'Locations Lakes Content Test Iam Permissions'] -- ['projects_locations-lakes-contentitems-create.md', 'Projects', 'Locations Lakes Contentitems Create'] -- ['projects_locations-lakes-contentitems-delete.md', 'Projects', 'Locations Lakes Contentitems Delete'] -- ['projects_locations-lakes-contentitems-get.md', 'Projects', 'Locations Lakes Contentitems Get'] -- ['projects_locations-lakes-contentitems-list.md', 'Projects', 'Locations Lakes Contentitems List'] -- ['projects_locations-lakes-contentitems-patch.md', 'Projects', 'Locations Lakes Contentitems Patch'] -- ['projects_locations-lakes-create.md', 'Projects', 'Locations Lakes Create'] -- ['projects_locations-lakes-delete.md', 'Projects', 'Locations Lakes Delete'] -- ['projects_locations-lakes-environments-create.md', 'Projects', 'Locations Lakes Environments Create'] -- ['projects_locations-lakes-environments-delete.md', 'Projects', 'Locations Lakes Environments Delete'] -- ['projects_locations-lakes-environments-get.md', 'Projects', 'Locations Lakes Environments Get'] -- ['projects_locations-lakes-environments-get-iam-policy.md', 'Projects', 'Locations Lakes Environments Get Iam Policy'] -- ['projects_locations-lakes-environments-list.md', 'Projects', 'Locations Lakes Environments List'] -- ['projects_locations-lakes-environments-patch.md', 'Projects', 'Locations Lakes Environments Patch'] -- ['projects_locations-lakes-environments-sessions-list.md', 'Projects', 'Locations Lakes Environments Sessions List'] -- ['projects_locations-lakes-environments-set-iam-policy.md', 'Projects', 'Locations Lakes Environments Set Iam Policy'] -- ['projects_locations-lakes-environments-test-iam-permissions.md', 'Projects', 'Locations Lakes Environments Test Iam Permissions'] -- ['projects_locations-lakes-get.md', 'Projects', 'Locations Lakes Get'] -- ['projects_locations-lakes-get-iam-policy.md', 'Projects', 'Locations Lakes Get Iam Policy'] -- ['projects_locations-lakes-list.md', 'Projects', 'Locations Lakes List'] -- ['projects_locations-lakes-patch.md', 'Projects', 'Locations Lakes Patch'] -- ['projects_locations-lakes-set-iam-policy.md', 'Projects', 'Locations Lakes Set Iam Policy'] -- ['projects_locations-lakes-tasks-create.md', 'Projects', 'Locations Lakes Tasks Create'] -- ['projects_locations-lakes-tasks-delete.md', 'Projects', 'Locations Lakes Tasks Delete'] -- ['projects_locations-lakes-tasks-get.md', 'Projects', 'Locations Lakes Tasks Get'] -- ['projects_locations-lakes-tasks-get-iam-policy.md', 'Projects', 'Locations Lakes Tasks Get Iam Policy'] -- ['projects_locations-lakes-tasks-jobs-cancel.md', 'Projects', 'Locations Lakes Tasks Jobs Cancel'] -- ['projects_locations-lakes-tasks-jobs-get.md', 'Projects', 'Locations Lakes Tasks Jobs Get'] -- ['projects_locations-lakes-tasks-jobs-list.md', 'Projects', 'Locations Lakes Tasks Jobs List'] -- ['projects_locations-lakes-tasks-list.md', 'Projects', 'Locations Lakes Tasks List'] -- ['projects_locations-lakes-tasks-patch.md', 'Projects', 'Locations Lakes Tasks Patch'] -- ['projects_locations-lakes-tasks-set-iam-policy.md', 'Projects', 'Locations Lakes Tasks Set Iam Policy'] -- ['projects_locations-lakes-tasks-test-iam-permissions.md', 'Projects', 'Locations Lakes Tasks Test Iam Permissions'] -- ['projects_locations-lakes-test-iam-permissions.md', 'Projects', 'Locations Lakes Test Iam Permissions'] -- ['projects_locations-lakes-zones-actions-list.md', 'Projects', 'Locations Lakes Zones Actions List'] -- ['projects_locations-lakes-zones-assets-actions-list.md', 'Projects', 'Locations Lakes Zones Assets Actions List'] -- ['projects_locations-lakes-zones-assets-create.md', 'Projects', 'Locations Lakes Zones Assets Create'] -- ['projects_locations-lakes-zones-assets-delete.md', 'Projects', 'Locations Lakes Zones Assets Delete'] -- ['projects_locations-lakes-zones-assets-get.md', 'Projects', 'Locations Lakes Zones Assets Get'] -- ['projects_locations-lakes-zones-assets-get-iam-policy.md', 'Projects', 'Locations Lakes Zones Assets Get Iam Policy'] -- ['projects_locations-lakes-zones-assets-list.md', 'Projects', 'Locations Lakes Zones Assets List'] -- ['projects_locations-lakes-zones-assets-patch.md', 'Projects', 'Locations Lakes Zones Assets Patch'] -- ['projects_locations-lakes-zones-assets-set-iam-policy.md', 'Projects', 'Locations Lakes Zones Assets Set Iam Policy'] -- ['projects_locations-lakes-zones-assets-test-iam-permissions.md', 'Projects', 'Locations Lakes Zones Assets Test Iam Permissions'] -- ['projects_locations-lakes-zones-create.md', 'Projects', 'Locations Lakes Zones Create'] -- ['projects_locations-lakes-zones-delete.md', 'Projects', 'Locations Lakes Zones Delete'] -- ['projects_locations-lakes-zones-entities-create.md', 'Projects', 'Locations Lakes Zones Entities Create'] -- ['projects_locations-lakes-zones-entities-delete.md', 'Projects', 'Locations Lakes Zones Entities Delete'] -- ['projects_locations-lakes-zones-entities-get.md', 'Projects', 'Locations Lakes Zones Entities Get'] -- ['projects_locations-lakes-zones-entities-list.md', 'Projects', 'Locations Lakes Zones Entities List'] -- ['projects_locations-lakes-zones-entities-partitions-create.md', 'Projects', 'Locations Lakes Zones Entities Partitions Create'] -- ['projects_locations-lakes-zones-entities-partitions-delete.md', 'Projects', 'Locations Lakes Zones Entities Partitions Delete'] -- ['projects_locations-lakes-zones-entities-partitions-get.md', 'Projects', 'Locations Lakes Zones Entities Partitions Get'] -- ['projects_locations-lakes-zones-entities-partitions-list.md', 'Projects', 'Locations Lakes Zones Entities Partitions List'] -- ['projects_locations-lakes-zones-entities-update.md', 'Projects', 'Locations Lakes Zones Entities Update'] -- ['projects_locations-lakes-zones-get.md', 'Projects', 'Locations Lakes Zones Get'] -- ['projects_locations-lakes-zones-get-iam-policy.md', 'Projects', 'Locations Lakes Zones Get Iam Policy'] -- ['projects_locations-lakes-zones-list.md', 'Projects', 'Locations Lakes Zones List'] -- ['projects_locations-lakes-zones-patch.md', 'Projects', 'Locations Lakes Zones Patch'] -- ['projects_locations-lakes-zones-set-iam-policy.md', 'Projects', 'Locations Lakes Zones Set Iam Policy'] -- ['projects_locations-lakes-zones-test-iam-permissions.md', 'Projects', 'Locations Lakes Zones Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Data Attribute Bindings Create': 'projects_locations-data-attribute-bindings-create.md' + - 'Locations Data Attribute Bindings Delete': 'projects_locations-data-attribute-bindings-delete.md' + - 'Locations Data Attribute Bindings Get': 'projects_locations-data-attribute-bindings-get.md' + - 'Locations Data Attribute Bindings Get Iam Policy': 'projects_locations-data-attribute-bindings-get-iam-policy.md' + - 'Locations Data Attribute Bindings List': 'projects_locations-data-attribute-bindings-list.md' + - 'Locations Data Attribute Bindings Patch': 'projects_locations-data-attribute-bindings-patch.md' + - 'Locations Data Attribute Bindings Set Iam Policy': 'projects_locations-data-attribute-bindings-set-iam-policy.md' + - 'Locations Data Attribute Bindings Test Iam Permissions': 'projects_locations-data-attribute-bindings-test-iam-permissions.md' + - 'Locations Data Scans Create': 'projects_locations-data-scans-create.md' + - 'Locations Data Scans Delete': 'projects_locations-data-scans-delete.md' + - 'Locations Data Scans Get': 'projects_locations-data-scans-get.md' + - 'Locations Data Scans Get Iam Policy': 'projects_locations-data-scans-get-iam-policy.md' + - 'Locations Data Scans Jobs Get': 'projects_locations-data-scans-jobs-get.md' + - 'Locations Data Scans Jobs List': 'projects_locations-data-scans-jobs-list.md' + - 'Locations Data Scans List': 'projects_locations-data-scans-list.md' + - 'Locations Data Scans Patch': 'projects_locations-data-scans-patch.md' + - 'Locations Data Scans Run': 'projects_locations-data-scans-run.md' + - 'Locations Data Scans Set Iam Policy': 'projects_locations-data-scans-set-iam-policy.md' + - 'Locations Data Scans Test Iam Permissions': 'projects_locations-data-scans-test-iam-permissions.md' + - 'Locations Data Taxonomies Attributes Create': 'projects_locations-data-taxonomies-attributes-create.md' + - 'Locations Data Taxonomies Attributes Delete': 'projects_locations-data-taxonomies-attributes-delete.md' + - 'Locations Data Taxonomies Attributes Get': 'projects_locations-data-taxonomies-attributes-get.md' + - 'Locations Data Taxonomies Attributes Get Iam Policy': 'projects_locations-data-taxonomies-attributes-get-iam-policy.md' + - 'Locations Data Taxonomies Attributes List': 'projects_locations-data-taxonomies-attributes-list.md' + - 'Locations Data Taxonomies Attributes Patch': 'projects_locations-data-taxonomies-attributes-patch.md' + - 'Locations Data Taxonomies Attributes Set Iam Policy': 'projects_locations-data-taxonomies-attributes-set-iam-policy.md' + - 'Locations Data Taxonomies Attributes Test Iam Permissions': 'projects_locations-data-taxonomies-attributes-test-iam-permissions.md' + - 'Locations Data Taxonomies Create': 'projects_locations-data-taxonomies-create.md' + - 'Locations Data Taxonomies Delete': 'projects_locations-data-taxonomies-delete.md' + - 'Locations Data Taxonomies Get': 'projects_locations-data-taxonomies-get.md' + - 'Locations Data Taxonomies Get Iam Policy': 'projects_locations-data-taxonomies-get-iam-policy.md' + - 'Locations Data Taxonomies List': 'projects_locations-data-taxonomies-list.md' + - 'Locations Data Taxonomies Patch': 'projects_locations-data-taxonomies-patch.md' + - 'Locations Data Taxonomies Set Iam Policy': 'projects_locations-data-taxonomies-set-iam-policy.md' + - 'Locations Data Taxonomies Test Iam Permissions': 'projects_locations-data-taxonomies-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Lakes Actions List': 'projects_locations-lakes-actions-list.md' + - 'Locations Lakes Content Create': 'projects_locations-lakes-content-create.md' + - 'Locations Lakes Content Delete': 'projects_locations-lakes-content-delete.md' + - 'Locations Lakes Content Get': 'projects_locations-lakes-content-get.md' + - 'Locations Lakes Content Get Iam Policy': 'projects_locations-lakes-content-get-iam-policy.md' + - 'Locations Lakes Content List': 'projects_locations-lakes-content-list.md' + - 'Locations Lakes Content Patch': 'projects_locations-lakes-content-patch.md' + - 'Locations Lakes Content Set Iam Policy': 'projects_locations-lakes-content-set-iam-policy.md' + - 'Locations Lakes Content Test Iam Permissions': 'projects_locations-lakes-content-test-iam-permissions.md' + - 'Locations Lakes Contentitems Create': 'projects_locations-lakes-contentitems-create.md' + - 'Locations Lakes Contentitems Delete': 'projects_locations-lakes-contentitems-delete.md' + - 'Locations Lakes Contentitems Get': 'projects_locations-lakes-contentitems-get.md' + - 'Locations Lakes Contentitems Get Iam Policy': 'projects_locations-lakes-contentitems-get-iam-policy.md' + - 'Locations Lakes Contentitems List': 'projects_locations-lakes-contentitems-list.md' + - 'Locations Lakes Contentitems Patch': 'projects_locations-lakes-contentitems-patch.md' + - 'Locations Lakes Contentitems Set Iam Policy': 'projects_locations-lakes-contentitems-set-iam-policy.md' + - 'Locations Lakes Contentitems Test Iam Permissions': 'projects_locations-lakes-contentitems-test-iam-permissions.md' + - 'Locations Lakes Create': 'projects_locations-lakes-create.md' + - 'Locations Lakes Delete': 'projects_locations-lakes-delete.md' + - 'Locations Lakes Environments Create': 'projects_locations-lakes-environments-create.md' + - 'Locations Lakes Environments Delete': 'projects_locations-lakes-environments-delete.md' + - 'Locations Lakes Environments Get': 'projects_locations-lakes-environments-get.md' + - 'Locations Lakes Environments Get Iam Policy': 'projects_locations-lakes-environments-get-iam-policy.md' + - 'Locations Lakes Environments List': 'projects_locations-lakes-environments-list.md' + - 'Locations Lakes Environments Patch': 'projects_locations-lakes-environments-patch.md' + - 'Locations Lakes Environments Sessions List': 'projects_locations-lakes-environments-sessions-list.md' + - 'Locations Lakes Environments Set Iam Policy': 'projects_locations-lakes-environments-set-iam-policy.md' + - 'Locations Lakes Environments Test Iam Permissions': 'projects_locations-lakes-environments-test-iam-permissions.md' + - 'Locations Lakes Get': 'projects_locations-lakes-get.md' + - 'Locations Lakes Get Iam Policy': 'projects_locations-lakes-get-iam-policy.md' + - 'Locations Lakes List': 'projects_locations-lakes-list.md' + - 'Locations Lakes Patch': 'projects_locations-lakes-patch.md' + - 'Locations Lakes Set Iam Policy': 'projects_locations-lakes-set-iam-policy.md' + - 'Locations Lakes Tasks Create': 'projects_locations-lakes-tasks-create.md' + - 'Locations Lakes Tasks Delete': 'projects_locations-lakes-tasks-delete.md' + - 'Locations Lakes Tasks Get': 'projects_locations-lakes-tasks-get.md' + - 'Locations Lakes Tasks Get Iam Policy': 'projects_locations-lakes-tasks-get-iam-policy.md' + - 'Locations Lakes Tasks Jobs Cancel': 'projects_locations-lakes-tasks-jobs-cancel.md' + - 'Locations Lakes Tasks Jobs Get': 'projects_locations-lakes-tasks-jobs-get.md' + - 'Locations Lakes Tasks Jobs List': 'projects_locations-lakes-tasks-jobs-list.md' + - 'Locations Lakes Tasks List': 'projects_locations-lakes-tasks-list.md' + - 'Locations Lakes Tasks Patch': 'projects_locations-lakes-tasks-patch.md' + - 'Locations Lakes Tasks Run': 'projects_locations-lakes-tasks-run.md' + - 'Locations Lakes Tasks Set Iam Policy': 'projects_locations-lakes-tasks-set-iam-policy.md' + - 'Locations Lakes Tasks Test Iam Permissions': 'projects_locations-lakes-tasks-test-iam-permissions.md' + - 'Locations Lakes Test Iam Permissions': 'projects_locations-lakes-test-iam-permissions.md' + - 'Locations Lakes Zones Actions List': 'projects_locations-lakes-zones-actions-list.md' + - 'Locations Lakes Zones Assets Actions List': 'projects_locations-lakes-zones-assets-actions-list.md' + - 'Locations Lakes Zones Assets Create': 'projects_locations-lakes-zones-assets-create.md' + - 'Locations Lakes Zones Assets Delete': 'projects_locations-lakes-zones-assets-delete.md' + - 'Locations Lakes Zones Assets Get': 'projects_locations-lakes-zones-assets-get.md' + - 'Locations Lakes Zones Assets Get Iam Policy': 'projects_locations-lakes-zones-assets-get-iam-policy.md' + - 'Locations Lakes Zones Assets List': 'projects_locations-lakes-zones-assets-list.md' + - 'Locations Lakes Zones Assets Patch': 'projects_locations-lakes-zones-assets-patch.md' + - 'Locations Lakes Zones Assets Set Iam Policy': 'projects_locations-lakes-zones-assets-set-iam-policy.md' + - 'Locations Lakes Zones Assets Test Iam Permissions': 'projects_locations-lakes-zones-assets-test-iam-permissions.md' + - 'Locations Lakes Zones Create': 'projects_locations-lakes-zones-create.md' + - 'Locations Lakes Zones Delete': 'projects_locations-lakes-zones-delete.md' + - 'Locations Lakes Zones Entities Create': 'projects_locations-lakes-zones-entities-create.md' + - 'Locations Lakes Zones Entities Delete': 'projects_locations-lakes-zones-entities-delete.md' + - 'Locations Lakes Zones Entities Get': 'projects_locations-lakes-zones-entities-get.md' + - 'Locations Lakes Zones Entities List': 'projects_locations-lakes-zones-entities-list.md' + - 'Locations Lakes Zones Entities Partitions Create': 'projects_locations-lakes-zones-entities-partitions-create.md' + - 'Locations Lakes Zones Entities Partitions Delete': 'projects_locations-lakes-zones-entities-partitions-delete.md' + - 'Locations Lakes Zones Entities Partitions Get': 'projects_locations-lakes-zones-entities-partitions-get.md' + - 'Locations Lakes Zones Entities Partitions List': 'projects_locations-lakes-zones-entities-partitions-list.md' + - 'Locations Lakes Zones Entities Update': 'projects_locations-lakes-zones-entities-update.md' + - 'Locations Lakes Zones Get': 'projects_locations-lakes-zones-get.md' + - 'Locations Lakes Zones Get Iam Policy': 'projects_locations-lakes-zones-get-iam-policy.md' + - 'Locations Lakes Zones List': 'projects_locations-lakes-zones-list.md' + - 'Locations Lakes Zones Patch': 'projects_locations-lakes-zones-patch.md' + - 'Locations Lakes Zones Set Iam Policy': 'projects_locations-lakes-zones-set-iam-policy.md' + - 'Locations Lakes Zones Test Iam Permissions': 'projects_locations-lakes-zones-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/dataplex1-cli/src/client.rs b/gen/dataplex1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dataplex1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dataplex1-cli/src/main.rs b/gen/dataplex1-cli/src/main.rs index 7b0e87382a..bb357a22ed 100644 --- a/gen/dataplex1-cli/src/main.rs +++ b/gen/dataplex1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dataplex1::{api, Error, oauth2}; +use google_dataplex1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,2643 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_data_attribute_bindings_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attributes" => Some(("attributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource" => Some(("resource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attributes", "create-time", "description", "display-name", "etag", "labels", "name", "resource", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataAttributeBinding = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_attribute_bindings_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "data-attribute-binding-id" => { + call = call.data_attribute_binding_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["data-attribute-binding-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_attribute_bindings_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_attribute_bindings_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "etag" => { + call = call.etag(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["etag"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_attribute_bindings_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_attribute_bindings_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_attribute_bindings_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_attribute_bindings_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_attribute_bindings_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_attribute_bindings_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_attribute_bindings_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attributes" => Some(("attributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource" => Some(("resource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attributes", "create-time", "description", "display-name", "etag", "labels", "name", "resource", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataAttributeBinding = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_attribute_bindings_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_attribute_bindings_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_attribute_bindings_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_attribute_bindings_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_attribute_bindings_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data.entity" => Some(("data.entity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-profile-result.row-count" => Some(("dataProfileResult.rowCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "data-profile-result.scanned-data.incremental-field.end" => Some(("dataProfileResult.scannedData.incrementalField.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-profile-result.scanned-data.incremental-field.field" => Some(("dataProfileResult.scannedData.incrementalField.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-profile-result.scanned-data.incremental-field.start" => Some(("dataProfileResult.scannedData.incrementalField.start", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-quality-result.passed" => Some(("dataQualityResult.passed", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "data-quality-result.row-count" => Some(("dataQualityResult.rowCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "data-quality-result.scanned-data.incremental-field.end" => Some(("dataQualityResult.scannedData.incrementalField.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-quality-result.scanned-data.incremental-field.field" => Some(("dataQualityResult.scannedData.incrementalField.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-quality-result.scanned-data.incremental-field.start" => Some(("dataQualityResult.scannedData.incrementalField.start", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-spec.field" => Some(("executionSpec.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-spec.trigger.schedule.cron" => Some(("executionSpec.trigger.schedule.cron", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job-end-time" => Some(("executionStatus.latestJobEndTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job-start-time" => Some(("executionStatus.latestJobStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "cron", "data", "data-profile-result", "data-quality-result", "description", "display-name", "end", "entity", "execution-spec", "execution-status", "field", "incremental-field", "labels", "latest-job-end-time", "latest-job-start-time", "name", "passed", "row-count", "scanned-data", "schedule", "start", "state", "trigger", "type", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataScan = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_scans_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "data-scan-id" => { + call = call.data_scan_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["data-scan-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_scans_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_scans_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["view"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_scans_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_jobs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_scans_jobs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["view"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_jobs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_scans_jobs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_scans_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data.entity" => Some(("data.entity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-profile-result.row-count" => Some(("dataProfileResult.rowCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "data-profile-result.scanned-data.incremental-field.end" => Some(("dataProfileResult.scannedData.incrementalField.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-profile-result.scanned-data.incremental-field.field" => Some(("dataProfileResult.scannedData.incrementalField.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-profile-result.scanned-data.incremental-field.start" => Some(("dataProfileResult.scannedData.incrementalField.start", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-quality-result.passed" => Some(("dataQualityResult.passed", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "data-quality-result.row-count" => Some(("dataQualityResult.rowCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "data-quality-result.scanned-data.incremental-field.end" => Some(("dataQualityResult.scannedData.incrementalField.end", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-quality-result.scanned-data.incremental-field.field" => Some(("dataQualityResult.scannedData.incrementalField.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-quality-result.scanned-data.incremental-field.start" => Some(("dataQualityResult.scannedData.incrementalField.start", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-spec.field" => Some(("executionSpec.field", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-spec.trigger.schedule.cron" => Some(("executionSpec.trigger.schedule.cron", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job-end-time" => Some(("executionStatus.latestJobEndTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job-start-time" => Some(("executionStatus.latestJobStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "cron", "data", "data-profile-result", "data-quality-result", "description", "display-name", "end", "entity", "execution-spec", "execution-status", "field", "incremental-field", "labels", "latest-job-end-time", "latest-job-start-time", "name", "passed", "row-count", "scanned-data", "schedule", "start", "state", "trigger", "type", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataScan = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_scans_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_run(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1RunDataScanRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_scans_run(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_scans_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_scans_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_scans_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attribute-count" => Some(("attributeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-access-spec.readers" => Some(("dataAccessSpec.readers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent-id" => Some(("parentId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-access-spec.owners" => Some(("resourceAccessSpec.owners", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "resource-access-spec.readers" => Some(("resourceAccessSpec.readers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "resource-access-spec.writers" => Some(("resourceAccessSpec.writers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attribute-count", "create-time", "data-access-spec", "description", "display-name", "etag", "labels", "name", "owners", "parent-id", "readers", "resource-access-spec", "uid", "update-time", "writers"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataAttribute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_attributes_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "data-attribute-id" => { + call = call.data_attribute_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["data-attribute-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_attributes_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "etag" => { + call = call.etag(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["etag"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_attributes_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_attributes_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_attributes_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attribute-count" => Some(("attributeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-access-spec.readers" => Some(("dataAccessSpec.readers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent-id" => Some(("parentId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-access-spec.owners" => Some(("resourceAccessSpec.owners", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "resource-access-spec.readers" => Some(("resourceAccessSpec.readers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "resource-access-spec.writers" => Some(("resourceAccessSpec.writers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attribute-count", "create-time", "data-access-spec", "description", "display-name", "etag", "labels", "name", "owners", "parent-id", "readers", "resource-access-spec", "uid", "update-time", "writers"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataAttribute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_attributes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_attributes_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_attributes_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_attributes_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attribute-count" => Some(("attributeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attribute-count", "create-time", "description", "display-name", "etag", "labels", "name", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataTaxonomy = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "data-taxonomy-id" => { + call = call.data_taxonomy_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["data-taxonomy-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "etag" => { + call = call.etag(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["etag"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_data_taxonomies_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attribute-count" => Some(("attributeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attribute-count", "create-time", "description", "display-name", "etag", "labels", "name", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1DataTaxonomy = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_data_taxonomies_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_data_taxonomies_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or("")); @@ -113,7 +2749,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -162,6 +2798,212 @@ where } } + async fn _projects_locations_lakes_content_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-text" => Some(("dataText", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.kernel-type" => Some(("notebook.kernelType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-script.engine" => Some(("sqlScript.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "data-text", "description", "engine", "kernel-type", "labels", "name", "notebook", "path", "sql-script", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1Content = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_lakes_content_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_lakes_content_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_lakes_content_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_lakes_content_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_lakes_content_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["view"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_lakes_content_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_lakes_content_get_iam_policy(opt.value_of("resource").unwrap_or("")); @@ -169,7 +3011,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -218,6 +3060,169 @@ where } } + async fn _projects_locations_lakes_content_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_lakes_content_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_lakes_content_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-text" => Some(("dataText", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.kernel-type" => Some(("notebook.kernelType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sql-script.engine" => Some(("sqlScript.engine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "data-text", "description", "engine", "kernel-type", "labels", "name", "notebook", "path", "sql-script", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1Content = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_lakes_content_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_lakes_content_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -439,7 +3444,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -596,6 +3601,62 @@ where } } + async fn _projects_locations_lakes_contentitems_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_lakes_contentitems_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_lakes_contentitems_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_lakes_contentitems_list(opt.value_of("parent").unwrap_or("")); @@ -606,7 +3667,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -707,10 +3768,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -759,6 +3820,178 @@ where } } + async fn _projects_locations_lakes_contentitems_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_lakes_contentitems_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_lakes_contentitems_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_lakes_contentitems_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_lakes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -815,7 +4048,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "lake-id" => { call = call.lake_id(value.unwrap_or("")); @@ -978,7 +4211,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "environment-id" => { call = call.environment_id(value.unwrap_or("")); @@ -1141,7 +4374,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1200,7 +4433,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1314,10 +4547,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1376,7 +4609,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); }, _ => { let mut found = false; @@ -1391,7 +4627,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -1656,7 +4892,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1715,7 +4951,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1826,10 +5062,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1992,14 +5228,39 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "execution-spec.args" => Some(("executionSpec.args", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "execution-spec.kms-key" => Some(("executionSpec.kmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "execution-spec.max-job-execution-lifetime" => Some(("executionSpec.maxJobExecutionLifetime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-spec.project" => Some(("executionSpec.project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "execution-spec.service-account" => Some(("executionSpec.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.end-time" => Some(("executionStatus.latestJob.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.message" => Some(("executionStatus.latestJob.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.name" => Some(("executionStatus.latestJob.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.retry-count" => Some(("executionStatus.latestJob.retryCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "execution-status.latest-job.service" => Some(("executionStatus.latestJob.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.service-job" => Some(("executionStatus.latestJob.serviceJob", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.start-time" => Some(("executionStatus.latestJob.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.state" => Some(("executionStatus.latestJob.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.uid" => Some(("executionStatus.latestJob.uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.update-time" => Some(("executionStatus.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.archive-uris" => Some(("notebook.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.file-uris" => Some(("notebook.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.batch.executors-count" => Some(("notebook.infrastructureSpec.batch.executorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.batch.max-executors-count" => Some(("notebook.infrastructureSpec.batch.maxExecutorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.container-image.image" => Some(("notebook.infrastructureSpec.containerImage.image", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.container-image.java-jars" => Some(("notebook.infrastructureSpec.containerImage.javaJars", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.container-image.properties" => Some(("notebook.infrastructureSpec.containerImage.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "notebook.infrastructure-spec.container-image.python-packages" => Some(("notebook.infrastructureSpec.containerImage.pythonPackages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.vpc-network.network" => Some(("notebook.infrastructureSpec.vpcNetwork.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.vpc-network.network-tags" => Some(("notebook.infrastructureSpec.vpcNetwork.networkTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.vpc-network.sub-network" => Some(("notebook.infrastructureSpec.vpcNetwork.subNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.notebook" => Some(("notebook.notebook", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spark.archive-uris" => Some(("spark.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "spark.file-uris" => Some(("spark.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "spark.infrastructure-spec.batch.executors-count" => Some(("spark.infrastructureSpec.batch.executorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "spark.infrastructure-spec.batch.max-executors-count" => Some(("spark.infrastructureSpec.batch.maxExecutorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "spark.infrastructure-spec.container-image.image" => Some(("spark.infrastructureSpec.containerImage.image", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spark.infrastructure-spec.container-image.java-jars" => Some(("spark.infrastructureSpec.containerImage.javaJars", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "spark.infrastructure-spec.container-image.properties" => Some(("spark.infrastructureSpec.containerImage.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "spark.infrastructure-spec.container-image.python-packages" => Some(("spark.infrastructureSpec.containerImage.pythonPackages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2020,7 +5281,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "batch", "container-image", "create-time", "description", "disabled", "display-name", "execution-spec", "executors-count", "file-uris", "infrastructure-spec", "java-jars", "labels", "main-class", "main-jar-file-uri", "max-executors-count", "max-job-execution-lifetime", "max-retries", "name", "network", "network-tags", "properties", "python-packages", "python-script-file", "schedule", "service-account", "spark", "sql-script", "sql-script-file", "start-time", "state", "sub-network", "trigger-spec", "type", "uid", "update-time", "vpc-network"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "batch", "container-image", "create-time", "description", "disabled", "display-name", "end-time", "execution-spec", "execution-status", "executors-count", "file-uris", "image", "infrastructure-spec", "java-jars", "kms-key", "labels", "latest-job", "main-class", "main-jar-file-uri", "max-executors-count", "max-job-execution-lifetime", "max-retries", "message", "name", "network", "network-tags", "notebook", "project", "properties", "python-packages", "python-script-file", "retry-count", "schedule", "service", "service-account", "service-job", "spark", "sql-script", "sql-script-file", "start-time", "state", "sub-network", "trigger-spec", "type", "uid", "update-time", "vpc-network"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2035,7 +5296,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "task-id" => { call = call.task_id(value.unwrap_or("")); @@ -2198,7 +5459,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2393,7 +5654,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2452,7 +5713,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2534,14 +5795,39 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "execution-spec.args" => Some(("executionSpec.args", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "execution-spec.kms-key" => Some(("executionSpec.kmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "execution-spec.max-job-execution-lifetime" => Some(("executionSpec.maxJobExecutionLifetime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-spec.project" => Some(("executionSpec.project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "execution-spec.service-account" => Some(("executionSpec.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.end-time" => Some(("executionStatus.latestJob.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.message" => Some(("executionStatus.latestJob.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.name" => Some(("executionStatus.latestJob.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.retry-count" => Some(("executionStatus.latestJob.retryCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "execution-status.latest-job.service" => Some(("executionStatus.latestJob.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.service-job" => Some(("executionStatus.latestJob.serviceJob", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.start-time" => Some(("executionStatus.latestJob.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.state" => Some(("executionStatus.latestJob.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.latest-job.uid" => Some(("executionStatus.latestJob.uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-status.update-time" => Some(("executionStatus.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.archive-uris" => Some(("notebook.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.file-uris" => Some(("notebook.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.batch.executors-count" => Some(("notebook.infrastructureSpec.batch.executorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.batch.max-executors-count" => Some(("notebook.infrastructureSpec.batch.maxExecutorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.container-image.image" => Some(("notebook.infrastructureSpec.containerImage.image", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.container-image.java-jars" => Some(("notebook.infrastructureSpec.containerImage.javaJars", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.container-image.properties" => Some(("notebook.infrastructureSpec.containerImage.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "notebook.infrastructure-spec.container-image.python-packages" => Some(("notebook.infrastructureSpec.containerImage.pythonPackages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.vpc-network.network" => Some(("notebook.infrastructureSpec.vpcNetwork.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.infrastructure-spec.vpc-network.network-tags" => Some(("notebook.infrastructureSpec.vpcNetwork.networkTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "notebook.infrastructure-spec.vpc-network.sub-network" => Some(("notebook.infrastructureSpec.vpcNetwork.subNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "notebook.notebook" => Some(("notebook.notebook", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spark.archive-uris" => Some(("spark.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "spark.file-uris" => Some(("spark.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "spark.infrastructure-spec.batch.executors-count" => Some(("spark.infrastructureSpec.batch.executorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "spark.infrastructure-spec.batch.max-executors-count" => Some(("spark.infrastructureSpec.batch.maxExecutorsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "spark.infrastructure-spec.container-image.image" => Some(("spark.infrastructureSpec.containerImage.image", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spark.infrastructure-spec.container-image.java-jars" => Some(("spark.infrastructureSpec.containerImage.javaJars", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "spark.infrastructure-spec.container-image.properties" => Some(("spark.infrastructureSpec.containerImage.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "spark.infrastructure-spec.container-image.python-packages" => Some(("spark.infrastructureSpec.containerImage.pythonPackages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2562,7 +5848,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "batch", "container-image", "create-time", "description", "disabled", "display-name", "execution-spec", "executors-count", "file-uris", "infrastructure-spec", "java-jars", "labels", "main-class", "main-jar-file-uri", "max-executors-count", "max-job-execution-lifetime", "max-retries", "name", "network", "network-tags", "properties", "python-packages", "python-script-file", "schedule", "service-account", "spark", "sql-script", "sql-script-file", "start-time", "state", "sub-network", "trigger-spec", "type", "uid", "update-time", "vpc-network"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "batch", "container-image", "create-time", "description", "disabled", "display-name", "end-time", "execution-spec", "execution-status", "executors-count", "file-uris", "image", "infrastructure-spec", "java-jars", "kms-key", "labels", "latest-job", "main-class", "main-jar-file-uri", "max-executors-count", "max-job-execution-lifetime", "max-retries", "message", "name", "network", "network-tags", "notebook", "project", "properties", "python-packages", "python-script-file", "retry-count", "schedule", "service", "service-account", "service-job", "spark", "sql-script", "sql-script-file", "start-time", "state", "sub-network", "trigger-spec", "type", "uid", "update-time", "vpc-network"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2577,10 +5863,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2629,6 +5915,90 @@ where } } + async fn _projects_locations_lakes_tasks_run(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDataplexV1RunTaskRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_lakes_tasks_run(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_lakes_tasks_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2896,7 +6266,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2955,7 +6325,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3052,6 +6422,7 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-spec.name" => Some(("resourceSpec.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-spec.read-access-mode" => Some(("resourceSpec.readAccessMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-spec.type" => Some(("resourceSpec.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-status.message" => Some(("resourceStatus.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-status.state" => Some(("resourceStatus.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3063,7 +6434,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "csv-options", "data-items", "data-size", "delimiter", "description", "disable-type-inference", "discovery-spec", "discovery-status", "display-name", "enabled", "encoding", "exclude-patterns", "filesets", "header-rows", "include-patterns", "json-options", "labels", "last-run-duration", "last-run-time", "message", "name", "resource-spec", "resource-status", "schedule", "security-status", "state", "stats", "tables", "type", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "csv-options", "data-items", "data-size", "delimiter", "description", "disable-type-inference", "discovery-spec", "discovery-status", "display-name", "enabled", "encoding", "exclude-patterns", "filesets", "header-rows", "include-patterns", "json-options", "labels", "last-run-duration", "last-run-time", "message", "name", "read-access-mode", "resource-spec", "resource-status", "schedule", "security-status", "state", "stats", "tables", "type", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3078,7 +6449,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "asset-id" => { call = call.asset_id(value.unwrap_or("")); @@ -3241,7 +6612,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3300,7 +6671,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3403,6 +6774,7 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-spec.name" => Some(("resourceSpec.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-spec.read-access-mode" => Some(("resourceSpec.readAccessMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-spec.type" => Some(("resourceSpec.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-status.message" => Some(("resourceStatus.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-status.state" => Some(("resourceStatus.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3414,7 +6786,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "csv-options", "data-items", "data-size", "delimiter", "description", "disable-type-inference", "discovery-spec", "discovery-status", "display-name", "enabled", "encoding", "exclude-patterns", "filesets", "header-rows", "include-patterns", "json-options", "labels", "last-run-duration", "last-run-time", "message", "name", "resource-spec", "resource-status", "schedule", "security-status", "state", "stats", "tables", "type", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "csv-options", "data-items", "data-size", "delimiter", "description", "disable-type-inference", "discovery-spec", "discovery-status", "display-name", "enabled", "encoding", "exclude-patterns", "filesets", "header-rows", "include-patterns", "json-options", "labels", "last-run-duration", "last-run-time", "message", "name", "read-access-mode", "resource-spec", "resource-status", "schedule", "security-status", "state", "stats", "tables", "type", "uid", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3429,10 +6801,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3718,7 +7090,7 @@ where call = call.zone_id(value.unwrap_or("")); }, "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3842,6 +7214,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "access.read" => Some(("access.read", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "asset" => Some(("asset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "catalog-entry" => Some(("catalogEntry", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compatibility.bigquery.compatible" => Some(("compatibility.bigquery.compatible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3860,6 +7233,7 @@ where "format.csv.header-rows" => Some(("format.csv.headerRows", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "format.csv.quote" => Some(("format.csv.quote", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format.format" => Some(("format.format", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "format.iceberg.metadata-location" => Some(("format.iceberg.metadataLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format.json.encoding" => Some(("format.json.encoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format.mime-type" => Some(("format.mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3868,9 +7242,10 @@ where "schema.user-managed" => Some(("schema.userManaged", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "system" => Some(("system", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["asset", "bigquery", "catalog-entry", "compatibility", "compatible", "compression-format", "create-time", "csv", "data-path", "data-path-pattern", "delimiter", "description", "display-name", "encoding", "etag", "format", "header-rows", "hive-metastore", "id", "json", "mime-type", "name", "partition-style", "quote", "reason", "schema", "system", "type", "update-time", "user-managed"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "asset", "bigquery", "catalog-entry", "compatibility", "compatible", "compression-format", "create-time", "csv", "data-path", "data-path-pattern", "delimiter", "description", "display-name", "encoding", "etag", "format", "header-rows", "hive-metastore", "iceberg", "id", "json", "metadata-location", "mime-type", "name", "partition-style", "quote", "read", "reason", "schema", "system", "type", "uid", "update-time", "user-managed"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3885,7 +7260,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4059,7 +7434,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4154,7 +7529,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4321,7 +7696,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4396,6 +7771,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "access.read" => Some(("access.read", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "asset" => Some(("asset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "catalog-entry" => Some(("catalogEntry", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compatibility.bigquery.compatible" => Some(("compatibility.bigquery.compatible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -4414,6 +7790,7 @@ where "format.csv.header-rows" => Some(("format.csv.headerRows", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "format.csv.quote" => Some(("format.csv.quote", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format.format" => Some(("format.format", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "format.iceberg.metadata-location" => Some(("format.iceberg.metadataLocation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format.json.encoding" => Some(("format.json.encoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format.mime-type" => Some(("format.mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4422,9 +7799,10 @@ where "schema.user-managed" => Some(("schema.userManaged", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "system" => Some(("system", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["asset", "bigquery", "catalog-entry", "compatibility", "compatible", "compression-format", "create-time", "csv", "data-path", "data-path-pattern", "delimiter", "description", "display-name", "encoding", "etag", "format", "header-rows", "hive-metastore", "id", "json", "mime-type", "name", "partition-style", "quote", "reason", "schema", "system", "type", "update-time", "user-managed"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "asset", "bigquery", "catalog-entry", "compatibility", "compatible", "compression-format", "create-time", "csv", "data-path", "data-path-pattern", "delimiter", "description", "display-name", "encoding", "etag", "format", "header-rows", "hive-metastore", "iceberg", "id", "json", "metadata-location", "mime-type", "name", "partition-style", "quote", "read", "reason", "schema", "system", "type", "uid", "update-time", "user-managed"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4439,7 +7817,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4547,7 +7925,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4606,7 +7984,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4723,10 +8101,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4957,7 +8335,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5207,7 +8585,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5266,15 +8644,135 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-data-attribute-bindings-create", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_create(opt, dry_run, &mut err).await; + }, + ("locations-data-attribute-bindings-delete", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_delete(opt, dry_run, &mut err).await; + }, + ("locations-data-attribute-bindings-get", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_get(opt, dry_run, &mut err).await; + }, + ("locations-data-attribute-bindings-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-attribute-bindings-list", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_list(opt, dry_run, &mut err).await; + }, + ("locations-data-attribute-bindings-patch", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_patch(opt, dry_run, &mut err).await; + }, + ("locations-data-attribute-bindings-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-attribute-bindings-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_data_attribute_bindings_test_iam_permissions(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-create", Some(opt)) => { + call_result = self._projects_locations_data_scans_create(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-delete", Some(opt)) => { + call_result = self._projects_locations_data_scans_delete(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-get", Some(opt)) => { + call_result = self._projects_locations_data_scans_get(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_scans_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-jobs-get", Some(opt)) => { + call_result = self._projects_locations_data_scans_jobs_get(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-jobs-list", Some(opt)) => { + call_result = self._projects_locations_data_scans_jobs_list(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-list", Some(opt)) => { + call_result = self._projects_locations_data_scans_list(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-patch", Some(opt)) => { + call_result = self._projects_locations_data_scans_patch(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-run", Some(opt)) => { + call_result = self._projects_locations_data_scans_run(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_scans_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-scans-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_data_scans_test_iam_permissions(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-create", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_create(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-delete", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_delete(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-get", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_get(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-list", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_list(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-patch", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_patch(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-attributes-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_attributes_test_iam_permissions(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-create", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_create(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-delete", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_delete(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-get", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_get(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-list", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_list(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-patch", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_patch(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-data-taxonomies-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_data_taxonomies_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-get", Some(opt)) => { call_result = self._projects_locations_get(opt, dry_run, &mut err).await; }, ("locations-lakes-actions-list", Some(opt)) => { call_result = self._projects_locations_lakes_actions_list(opt, dry_run, &mut err).await; }, + ("locations-lakes-content-create", Some(opt)) => { + call_result = self._projects_locations_lakes_content_create(opt, dry_run, &mut err).await; + }, + ("locations-lakes-content-delete", Some(opt)) => { + call_result = self._projects_locations_lakes_content_delete(opt, dry_run, &mut err).await; + }, + ("locations-lakes-content-get", Some(opt)) => { + call_result = self._projects_locations_lakes_content_get(opt, dry_run, &mut err).await; + }, ("locations-lakes-content-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_lakes_content_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-lakes-content-list", Some(opt)) => { + call_result = self._projects_locations_lakes_content_list(opt, dry_run, &mut err).await; + }, + ("locations-lakes-content-patch", Some(opt)) => { + call_result = self._projects_locations_lakes_content_patch(opt, dry_run, &mut err).await; + }, ("locations-lakes-content-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_lakes_content_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -5290,12 +8788,21 @@ where ("locations-lakes-contentitems-get", Some(opt)) => { call_result = self._projects_locations_lakes_contentitems_get(opt, dry_run, &mut err).await; }, + ("locations-lakes-contentitems-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_lakes_contentitems_get_iam_policy(opt, dry_run, &mut err).await; + }, ("locations-lakes-contentitems-list", Some(opt)) => { call_result = self._projects_locations_lakes_contentitems_list(opt, dry_run, &mut err).await; }, ("locations-lakes-contentitems-patch", Some(opt)) => { call_result = self._projects_locations_lakes_contentitems_patch(opt, dry_run, &mut err).await; }, + ("locations-lakes-contentitems-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_lakes_contentitems_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-lakes-contentitems-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_lakes_contentitems_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-lakes-create", Some(opt)) => { call_result = self._projects_locations_lakes_create(opt, dry_run, &mut err).await; }, @@ -5371,6 +8878,9 @@ where ("locations-lakes-tasks-patch", Some(opt)) => { call_result = self._projects_locations_lakes_tasks_patch(opt, dry_run, &mut err).await; }, + ("locations-lakes-tasks-run", Some(opt)) => { + call_result = self._projects_locations_lakes_tasks_run(opt, dry_run, &mut err).await; + }, ("locations-lakes-tasks-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_lakes_tasks_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -5555,7 +9065,879 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-lakes-actions-list', 'locations-lakes-content-get-iam-policy', 'locations-lakes-content-set-iam-policy', 'locations-lakes-content-test-iam-permissions', 'locations-lakes-contentitems-create', 'locations-lakes-contentitems-delete', 'locations-lakes-contentitems-get', 'locations-lakes-contentitems-list', 'locations-lakes-contentitems-patch', 'locations-lakes-create', 'locations-lakes-delete', 'locations-lakes-environments-create', 'locations-lakes-environments-delete', 'locations-lakes-environments-get', 'locations-lakes-environments-get-iam-policy', 'locations-lakes-environments-list', 'locations-lakes-environments-patch', 'locations-lakes-environments-sessions-list', 'locations-lakes-environments-set-iam-policy', 'locations-lakes-environments-test-iam-permissions', 'locations-lakes-get', 'locations-lakes-get-iam-policy', 'locations-lakes-list', 'locations-lakes-patch', 'locations-lakes-set-iam-policy', 'locations-lakes-tasks-create', 'locations-lakes-tasks-delete', 'locations-lakes-tasks-get', 'locations-lakes-tasks-get-iam-policy', 'locations-lakes-tasks-jobs-cancel', 'locations-lakes-tasks-jobs-get', 'locations-lakes-tasks-jobs-list', 'locations-lakes-tasks-list', 'locations-lakes-tasks-patch', 'locations-lakes-tasks-set-iam-policy', 'locations-lakes-tasks-test-iam-permissions', 'locations-lakes-test-iam-permissions', 'locations-lakes-zones-actions-list', 'locations-lakes-zones-assets-actions-list', 'locations-lakes-zones-assets-create', 'locations-lakes-zones-assets-delete', 'locations-lakes-zones-assets-get', 'locations-lakes-zones-assets-get-iam-policy', 'locations-lakes-zones-assets-list', 'locations-lakes-zones-assets-patch', 'locations-lakes-zones-assets-set-iam-policy', 'locations-lakes-zones-assets-test-iam-permissions', 'locations-lakes-zones-create', 'locations-lakes-zones-delete', 'locations-lakes-zones-entities-create', 'locations-lakes-zones-entities-delete', 'locations-lakes-zones-entities-get', 'locations-lakes-zones-entities-list', 'locations-lakes-zones-entities-partitions-create', 'locations-lakes-zones-entities-partitions-delete', 'locations-lakes-zones-entities-partitions-get', 'locations-lakes-zones-entities-partitions-list', 'locations-lakes-zones-entities-update', 'locations-lakes-zones-get', 'locations-lakes-zones-get-iam-policy', 'locations-lakes-zones-list', 'locations-lakes-zones-patch', 'locations-lakes-zones-set-iam-policy', 'locations-lakes-zones-test-iam-permissions', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-data-attribute-bindings-create', 'locations-data-attribute-bindings-delete', 'locations-data-attribute-bindings-get', 'locations-data-attribute-bindings-get-iam-policy', 'locations-data-attribute-bindings-list', 'locations-data-attribute-bindings-patch', 'locations-data-attribute-bindings-set-iam-policy', 'locations-data-attribute-bindings-test-iam-permissions', 'locations-data-scans-create', 'locations-data-scans-delete', 'locations-data-scans-get', 'locations-data-scans-get-iam-policy', 'locations-data-scans-jobs-get', 'locations-data-scans-jobs-list', 'locations-data-scans-list', 'locations-data-scans-patch', 'locations-data-scans-run', 'locations-data-scans-set-iam-policy', 'locations-data-scans-test-iam-permissions', 'locations-data-taxonomies-attributes-create', 'locations-data-taxonomies-attributes-delete', 'locations-data-taxonomies-attributes-get', 'locations-data-taxonomies-attributes-get-iam-policy', 'locations-data-taxonomies-attributes-list', 'locations-data-taxonomies-attributes-patch', 'locations-data-taxonomies-attributes-set-iam-policy', 'locations-data-taxonomies-attributes-test-iam-permissions', 'locations-data-taxonomies-create', 'locations-data-taxonomies-delete', 'locations-data-taxonomies-get', 'locations-data-taxonomies-get-iam-policy', 'locations-data-taxonomies-list', 'locations-data-taxonomies-patch', 'locations-data-taxonomies-set-iam-policy', 'locations-data-taxonomies-test-iam-permissions', 'locations-get', 'locations-lakes-actions-list', 'locations-lakes-content-create', 'locations-lakes-content-delete', 'locations-lakes-content-get', 'locations-lakes-content-get-iam-policy', 'locations-lakes-content-list', 'locations-lakes-content-patch', 'locations-lakes-content-set-iam-policy', 'locations-lakes-content-test-iam-permissions', 'locations-lakes-contentitems-create', 'locations-lakes-contentitems-delete', 'locations-lakes-contentitems-get', 'locations-lakes-contentitems-get-iam-policy', 'locations-lakes-contentitems-list', 'locations-lakes-contentitems-patch', 'locations-lakes-contentitems-set-iam-policy', 'locations-lakes-contentitems-test-iam-permissions', 'locations-lakes-create', 'locations-lakes-delete', 'locations-lakes-environments-create', 'locations-lakes-environments-delete', 'locations-lakes-environments-get', 'locations-lakes-environments-get-iam-policy', 'locations-lakes-environments-list', 'locations-lakes-environments-patch', 'locations-lakes-environments-sessions-list', 'locations-lakes-environments-set-iam-policy', 'locations-lakes-environments-test-iam-permissions', 'locations-lakes-get', 'locations-lakes-get-iam-policy', 'locations-lakes-list', 'locations-lakes-patch', 'locations-lakes-set-iam-policy', 'locations-lakes-tasks-create', 'locations-lakes-tasks-delete', 'locations-lakes-tasks-get', 'locations-lakes-tasks-get-iam-policy', 'locations-lakes-tasks-jobs-cancel', 'locations-lakes-tasks-jobs-get', 'locations-lakes-tasks-jobs-list', 'locations-lakes-tasks-list', 'locations-lakes-tasks-patch', 'locations-lakes-tasks-run', 'locations-lakes-tasks-set-iam-policy', 'locations-lakes-tasks-test-iam-permissions', 'locations-lakes-test-iam-permissions', 'locations-lakes-zones-actions-list', 'locations-lakes-zones-assets-actions-list', 'locations-lakes-zones-assets-create', 'locations-lakes-zones-assets-delete', 'locations-lakes-zones-assets-get', 'locations-lakes-zones-assets-get-iam-policy', 'locations-lakes-zones-assets-list', 'locations-lakes-zones-assets-patch', 'locations-lakes-zones-assets-set-iam-policy', 'locations-lakes-zones-assets-test-iam-permissions', 'locations-lakes-zones-create', 'locations-lakes-zones-delete', 'locations-lakes-zones-entities-create', 'locations-lakes-zones-entities-delete', 'locations-lakes-zones-entities-get', 'locations-lakes-zones-entities-list', 'locations-lakes-zones-entities-partitions-create', 'locations-lakes-zones-entities-partitions-delete', 'locations-lakes-zones-entities-partitions-get', 'locations-lakes-zones-entities-partitions-list', 'locations-lakes-zones-entities-update', 'locations-lakes-zones-get', 'locations-lakes-zones-get-iam-policy', 'locations-lakes-zones-list', 'locations-lakes-zones-patch', 'locations-lakes-zones-set-iam-policy', 'locations-lakes-zones-test-iam-permissions', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("locations-data-attribute-bindings-create", + Some(r##"Create a DataAttributeBinding resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent data taxonomy projects/{project_number}/locations/{location_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-attribute-bindings-delete", + Some(r##"Deletes a DataAttributeBinding resource. All attributes within the DataAttributeBinding must be deleted before the DataAttributeBinding can be deleted."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the DataAttributeBinding: projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-attribute-bindings-get", + Some(r##"Retrieves a DataAttributeBinding resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the DataAttributeBinding: projects/{project_number}/locations/{location_id}/dataAttributeBindings/{data_attribute_binding_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-attribute-bindings-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-attribute-bindings-list", + Some(r##"Lists DataAttributeBinding resources in a project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the Location: projects/{project_number}/locations/{location_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-attribute-bindings-patch", + Some(r##"Updates a DataAttributeBinding resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The relative resource name of the Data Attribute Binding, of the form: projects/{project_number}/locations/{location}/dataAttributeBindings/{data_attribute_binding_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-attribute-bindings-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and PERMISSION_DENIED errors."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-attribute-bindings-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a NOT_FOUND error.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-attribute-bindings-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-create", + Some(r##"Creates a DataScan resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent location: projects/{project}/locations/{location_id} where project refers to a project_id or project_number and location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-delete", + Some(r##"Deletes a DataScan resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the dataScan: projects/{project}/locations/{location_id}/dataScans/{data_scan_id} where project refers to a project_id or project_number and location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-get", + Some(r##"Gets a DataScan resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the dataScan: projects/{project}/locations/{location_id}/dataScans/{data_scan_id} where project refers to a project_id or project_number and location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-jobs-get", + Some(r##"Gets a DataScanJob resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-jobs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the DataScanJob: projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/dataScanJobs/{data_scan_job_id} where project refers to a project_id or project_number and location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-jobs-list", + Some(r##"Lists DataScanJobs under the given DataScan."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-jobs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent environment: projects/{project}/locations/{location_id}/dataScans/{data_scan_id} where project refers to a project_id or project_number and location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-list", + Some(r##"Lists DataScans."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent location: projects/{project}/locations/{location_id} where project refers to a project_id or project_number and location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-patch", + Some(r##"Updates a DataScan resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The relative resource name of the scan, of the form: projects/{project}/locations/{location_id}/dataScans/{datascan_id}, where project refers to a project_id or project_number and location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-run", + Some(r##"Runs an on-demand execution of a DataScan"##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-run", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the DataScan: projects/{project}/locations/{location_id}/dataScans/{data_scan_id}. where project refers to a project_id or project_number and location_id refers to a GCP region.Only OnDemand data scans are allowed."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and PERMISSION_DENIED errors."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-scans-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a NOT_FOUND error.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-scans-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-create", + Some(r##"Create a DataAttribute resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent data taxonomy projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-delete", + Some(r##"Deletes a Data Attribute resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the DataAttribute: projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-get", + Some(r##"Retrieves a Data Attribute resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the dataAttribute: projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-list", + Some(r##"Lists Data Attribute resources in a DataTaxonomy."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the DataTaxonomy: projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-patch", + Some(r##"Updates a DataAttribute resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The relative resource name of the dataAttribute, of the form: projects/{project_number}/locations/{location_id}/dataTaxonomies/{dataTaxonomy}/attributes/{data_attribute_id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and PERMISSION_DENIED errors."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-attributes-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a NOT_FOUND error.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-attributes-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-create", + Some(r##"Create a DataTaxonomy resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the data taxonomy location, of the form: projects/{project_number}/locations/{location_id} where location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-delete", + Some(r##"Deletes a DataTaxonomy resource. All attributes within the DataTaxonomy must be deleted before the DataTaxonomy can be deleted."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the DataTaxonomy: projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-get", + Some(r##"Retrieves a DataTaxonomy resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the DataTaxonomy: projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-list", + Some(r##"Lists DataTaxonomy resources in a project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the DataTaxonomy location, of the form: projects/{project_number}/locations/{location_id} where location_id refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-patch", + Some(r##"Updates a DataTaxonomy resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The relative resource name of the DataTaxonomy, of the form: projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and PERMISSION_DENIED errors."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-data-taxonomies-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a NOT_FOUND error.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-data-taxonomies-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-get", @@ -5584,7 +9966,79 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-content-create", + Some(r##"Create a content."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent lake: projects/{project_id}/locations/{location_id}/lakes/{lake_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-content-delete", + Some(r##"Delete a content."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the content: projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-content-get", + Some(r##"Get a content resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the content: projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id}"##), Some(true), Some(false)), @@ -5601,12 +10055,12 @@ async fn main() { Some(false)), ]), ("locations-lakes-content-get-iam-policy", - Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + Some(r##"Gets the access control policy for a contentitem resource. A NOT_FOUND error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a policy set on it.Caller must have Google IAM dataplex.content.getIamPolicy permission on the resource."##), "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5616,6 +10070,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-content-list", + Some(r##"List content."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the parent lake: projects/{project_id}/locations/{location_id}/lakes/{lake_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-content-patch", + Some(r##"Update a content. Only supports full resource update."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The relative resource name of the content, of the form: projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5623,12 +10127,12 @@ async fn main() { Some(false)), ]), ("locations-lakes-content-set-iam-policy", - Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and PERMISSION_DENIED errors."##), + Some(r##"Sets the access control policy on the specified contentitem resource. Replaces any existing policy.Caller must have Google IAM dataplex.content.setIamPolicy permission on the resource."##), "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-set-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5651,12 +10155,12 @@ async fn main() { Some(false)), ]), ("locations-lakes-content-test-iam-permissions", - Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a NOT_FOUND error.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + Some(r##"Returns the caller's permissions on a resource. If the resource does not exist, an empty set of permissions is returned (a NOT_FOUND error is not returned).A caller is not required to have Google IAM permission to make this request.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-content-test-iam-permissions", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5744,6 +10248,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-contentitems-get-iam-policy", + Some(r##"Gets the access control policy for a contentitem resource. A NOT_FOUND error is returned if the resource does not exist. An empty policy is returned if the resource exists but does not have a policy set on it.Caller must have Google IAM dataplex.content.getIamPolicy permission on the resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-contentitems-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5794,6 +10320,62 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-contentitems-set-iam-policy", + Some(r##"Sets the access control policy on the specified contentitem resource. Replaces any existing policy.Caller must have Google IAM dataplex.content.setIamPolicy permission on the resource."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-contentitems-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-contentitems-test-iam-permissions", + Some(r##"Returns the caller's permissions on a resource. If the resource does not exist, an empty set of permissions is returned (a NOT_FOUND error is not returned).A caller is not required to have Google IAM permission to make this request.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-contentitems-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5834,7 +10416,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -5856,7 +10438,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent lake: projects/{project_id}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the parent lake: projects/{project_id}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -5884,7 +10466,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the environment: projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}`"##), + Some(r##"Required. The resource name of the environment: projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}."##), Some(true), Some(false)), @@ -5906,7 +10488,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the environment: projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}"##), + Some(r##"Required. The resource name of the environment: projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}."##), Some(true), Some(false)), @@ -5928,7 +10510,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5950,7 +10532,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent lake: projects/{project_id}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the parent lake: projects/{project_id}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -6000,7 +10582,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent environment: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}"##), + Some(r##"Required. The resource name of the parent environment: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}."##), Some(true), Some(false)), @@ -6022,7 +10604,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6050,7 +10632,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6078,7 +10660,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -6100,7 +10682,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6172,7 +10754,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6200,7 +10782,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -6228,7 +10810,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the task: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /task/{task_id}`"##), + Some(r##"Required. The resource name of the task: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}."##), Some(true), Some(false)), @@ -6250,7 +10832,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the task: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /tasks/{tasks_id}"##), + Some(r##"Required. The resource name of the task: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{tasks_id}."##), Some(true), Some(false)), @@ -6272,7 +10854,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6294,7 +10876,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the job: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /task/{task_id}/job/{job_id}`"##), + Some(r##"Required. The resource name of the job: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/task/{task_id}/job/{job_id}."##), Some(true), Some(false)), @@ -6322,7 +10904,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the job: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /tasks/{task_id}/jobs/{job_id}"##), + Some(r##"Required. The resource name of the job: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}/jobs/{job_id}."##), Some(true), Some(false)), @@ -6344,7 +10926,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent environment: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}"##), + Some(r##"Required. The resource name of the parent environment: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}."##), Some(true), Some(false)), @@ -6366,7 +10948,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -6404,6 +10986,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-lakes-tasks-run", + Some(r##"Run an on demand execution of a Task."##), + "Details at http://byron.github.io/google-apis-rs/google_dataplex1_cli/projects_locations-lakes-tasks-run", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the task: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/tasks/{task_id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -6416,7 +11026,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6444,7 +11054,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6472,7 +11082,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6500,7 +11110,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}"##), + Some(r##"Required. The resource name of the parent zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}."##), Some(true), Some(false)), @@ -6522,7 +11132,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent asset: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}"##), + Some(r##"Required. The resource name of the parent asset: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}."##), Some(true), Some(false)), @@ -6544,7 +11154,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /zones/{zone_id}`"##), + Some(r##"Required. The resource name of the parent zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}."##), Some(true), Some(false)), @@ -6572,7 +11182,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the asset: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /zones/{zone_id}/assets/{asset_id}"##), + Some(r##"Required. The resource name of the asset: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}."##), Some(true), Some(false)), @@ -6594,7 +11204,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the asset: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /zones/{zone_id}/assets/{asset_id}"##), + Some(r##"Required. The resource name of the asset: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/assets/{asset_id}."##), Some(true), Some(false)), @@ -6616,7 +11226,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6638,7 +11248,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /zones/{zone_id}`"##), + Some(r##"Required. The resource name of the parent zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}."##), Some(true), Some(false)), @@ -6688,7 +11298,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6716,7 +11326,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6744,7 +11354,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -6772,7 +11382,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /zones/{zone_id}`"##), + Some(r##"Required. The resource name of the zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}."##), Some(true), Some(false)), @@ -7010,7 +11620,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id} /zones/{zone_id}"##), + Some(r##"Required. The resource name of the zone: projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}."##), Some(true), Some(false)), @@ -7032,7 +11642,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7054,7 +11664,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}"##), + Some(r##"Required. The resource name of the parent lake: projects/{project_number}/locations/{location_id}/lakes/{lake_id}."##), Some(true), Some(false)), @@ -7104,7 +11714,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7132,7 +11742,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7276,7 +11886,7 @@ async fn main() { let mut app = App::new("dataplex1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20230120") .about("Dataplex API is used to manage the lifecycle of data lakes.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dataplex1_cli") .arg(Arg::with_name("url") diff --git a/gen/dataplex1/Cargo.toml b/gen/dataplex1/Cargo.toml index ec61c77be9..da6da91ef9 100644 --- a/gen/dataplex1/Cargo.toml +++ b/gen/dataplex1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dataplex1" -version = "5.0.2-beta-1+20230120" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Dataplex (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dataplex1" homepage = "https://cloud.google.com/dataplex/docs" -documentation = "https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120" +documentation = "https://docs.rs/google-dataplex1/5.0.2+20230120" license = "MIT" keywords = ["dataplex", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dataplex1/README.md b/gen/dataplex1/README.md index 796564ead6..c4fc5f0c85 100644 --- a/gen/dataplex1/README.md +++ b/gen/dataplex1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-dataplex1` library allows access to all features of the *Google Cloud Dataplex* service. -This documentation was generated from *Cloud Dataplex* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *dataplex:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Dataplex* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *dataplex:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Dataplex* *v1* API can be found at the [official documentation site](https://cloud.google.com/dataplex/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/CloudDataplex) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/CloudDataplex) ... * projects - * [*locations data attribute bindings create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingCreateCall), [*locations data attribute bindings delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingDeleteCall), [*locations data attribute bindings get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingGetCall), [*locations data attribute bindings get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingGetIamPolicyCall), [*locations data attribute bindings list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingListCall), [*locations data attribute bindings patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingPatchCall), [*locations data attribute bindings set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingSetIamPolicyCall), [*locations data attribute bindings test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingTestIamPermissionCall), [*locations data scans create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanCreateCall), [*locations data scans delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanDeleteCall), [*locations data scans get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanGetCall), [*locations data scans get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanGetIamPolicyCall), [*locations data scans jobs get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanJobGetCall), [*locations data scans jobs list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanJobListCall), [*locations data scans list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanListCall), [*locations data scans patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanPatchCall), [*locations data scans run*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanRunCall), [*locations data scans set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanSetIamPolicyCall), [*locations data scans test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataScanTestIamPermissionCall), [*locations data taxonomies attributes create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeCreateCall), [*locations data taxonomies attributes delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeDeleteCall), [*locations data taxonomies attributes get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeGetCall), [*locations data taxonomies attributes get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeGetIamPolicyCall), [*locations data taxonomies attributes list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeListCall), [*locations data taxonomies attributes patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributePatchCall), [*locations data taxonomies attributes set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeSetIamPolicyCall), [*locations data taxonomies attributes test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeTestIamPermissionCall), [*locations data taxonomies create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyCreateCall), [*locations data taxonomies delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyDeleteCall), [*locations data taxonomies get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyGetCall), [*locations data taxonomies get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyGetIamPolicyCall), [*locations data taxonomies list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyListCall), [*locations data taxonomies patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyPatchCall), [*locations data taxonomies set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomySetIamPolicyCall), [*locations data taxonomies test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyTestIamPermissionCall), [*locations get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationGetCall), [*locations lakes actions list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeActionListCall), [*locations lakes content create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentCreateCall), [*locations lakes content delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentDeleteCall), [*locations lakes content get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentGetCall), [*locations lakes content get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentGetIamPolicyCall), [*locations lakes content list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentListCall), [*locations lakes content patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentPatchCall), [*locations lakes content set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentSetIamPolicyCall), [*locations lakes content test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentTestIamPermissionCall), [*locations lakes contentitems create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemCreateCall), [*locations lakes contentitems delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemDeleteCall), [*locations lakes contentitems get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemGetCall), [*locations lakes contentitems get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemGetIamPolicyCall), [*locations lakes contentitems list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemListCall), [*locations lakes contentitems patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemPatchCall), [*locations lakes contentitems set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemSetIamPolicyCall), [*locations lakes contentitems test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeContentitemTestIamPermissionCall), [*locations lakes create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeCreateCall), [*locations lakes delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeDeleteCall), [*locations lakes environments create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentCreateCall), [*locations lakes environments delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentDeleteCall), [*locations lakes environments get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentGetCall), [*locations lakes environments get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentGetIamPolicyCall), [*locations lakes environments list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentListCall), [*locations lakes environments patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentPatchCall), [*locations lakes environments sessions list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentSessionListCall), [*locations lakes environments set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentSetIamPolicyCall), [*locations lakes environments test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentTestIamPermissionCall), [*locations lakes get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeGetCall), [*locations lakes get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeGetIamPolicyCall), [*locations lakes list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeListCall), [*locations lakes patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakePatchCall), [*locations lakes set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeSetIamPolicyCall), [*locations lakes tasks create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskCreateCall), [*locations lakes tasks delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskDeleteCall), [*locations lakes tasks get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskGetCall), [*locations lakes tasks get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskGetIamPolicyCall), [*locations lakes tasks jobs cancel*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskJobCancelCall), [*locations lakes tasks jobs get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskJobGetCall), [*locations lakes tasks jobs list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskJobListCall), [*locations lakes tasks list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskListCall), [*locations lakes tasks patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskPatchCall), [*locations lakes tasks run*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskRunCall), [*locations lakes tasks set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskSetIamPolicyCall), [*locations lakes tasks test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTaskTestIamPermissionCall), [*locations lakes test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeTestIamPermissionCall), [*locations lakes zones actions list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneActionListCall), [*locations lakes zones assets actions list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetActionListCall), [*locations lakes zones assets create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetCreateCall), [*locations lakes zones assets delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetDeleteCall), [*locations lakes zones assets get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetGetCall), [*locations lakes zones assets get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetGetIamPolicyCall), [*locations lakes zones assets list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetListCall), [*locations lakes zones assets patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetPatchCall), [*locations lakes zones assets set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetSetIamPolicyCall), [*locations lakes zones assets test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetTestIamPermissionCall), [*locations lakes zones create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneCreateCall), [*locations lakes zones delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneDeleteCall), [*locations lakes zones entities create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityCreateCall), [*locations lakes zones entities delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityDeleteCall), [*locations lakes zones entities get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityGetCall), [*locations lakes zones entities list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityListCall), [*locations lakes zones entities partitions create*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionCreateCall), [*locations lakes zones entities partitions delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionDeleteCall), [*locations lakes zones entities partitions get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionGetCall), [*locations lakes zones entities partitions list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionListCall), [*locations lakes zones entities update*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityUpdateCall), [*locations lakes zones get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneGetCall), [*locations lakes zones get iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneGetIamPolicyCall), [*locations lakes zones list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneListCall), [*locations lakes zones patch*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZonePatchCall), [*locations lakes zones set iam policy*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneSetIamPolicyCall), [*locations lakes zones test iam permissions*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationLakeZoneTestIamPermissionCall), [*locations list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/api::ProjectLocationOperationListCall) + * [*locations data attribute bindings create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingCreateCall), [*locations data attribute bindings delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingDeleteCall), [*locations data attribute bindings get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingGetCall), [*locations data attribute bindings get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingGetIamPolicyCall), [*locations data attribute bindings list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingListCall), [*locations data attribute bindings patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingPatchCall), [*locations data attribute bindings set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingSetIamPolicyCall), [*locations data attribute bindings test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataAttributeBindingTestIamPermissionCall), [*locations data scans create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanCreateCall), [*locations data scans delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanDeleteCall), [*locations data scans get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanGetCall), [*locations data scans get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanGetIamPolicyCall), [*locations data scans jobs get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanJobGetCall), [*locations data scans jobs list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanJobListCall), [*locations data scans list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanListCall), [*locations data scans patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanPatchCall), [*locations data scans run*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanRunCall), [*locations data scans set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanSetIamPolicyCall), [*locations data scans test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataScanTestIamPermissionCall), [*locations data taxonomies attributes create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeCreateCall), [*locations data taxonomies attributes delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeDeleteCall), [*locations data taxonomies attributes get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeGetCall), [*locations data taxonomies attributes get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeGetIamPolicyCall), [*locations data taxonomies attributes list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeListCall), [*locations data taxonomies attributes patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributePatchCall), [*locations data taxonomies attributes set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeSetIamPolicyCall), [*locations data taxonomies attributes test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyAttributeTestIamPermissionCall), [*locations data taxonomies create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyCreateCall), [*locations data taxonomies delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyDeleteCall), [*locations data taxonomies get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyGetCall), [*locations data taxonomies get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyGetIamPolicyCall), [*locations data taxonomies list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyListCall), [*locations data taxonomies patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyPatchCall), [*locations data taxonomies set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomySetIamPolicyCall), [*locations data taxonomies test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationDataTaxonomyTestIamPermissionCall), [*locations get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationGetCall), [*locations lakes actions list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeActionListCall), [*locations lakes content create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentCreateCall), [*locations lakes content delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentDeleteCall), [*locations lakes content get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentGetCall), [*locations lakes content get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentGetIamPolicyCall), [*locations lakes content list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentListCall), [*locations lakes content patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentPatchCall), [*locations lakes content set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentSetIamPolicyCall), [*locations lakes content test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentTestIamPermissionCall), [*locations lakes contentitems create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemCreateCall), [*locations lakes contentitems delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemDeleteCall), [*locations lakes contentitems get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemGetCall), [*locations lakes contentitems get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemGetIamPolicyCall), [*locations lakes contentitems list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemListCall), [*locations lakes contentitems patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemPatchCall), [*locations lakes contentitems set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemSetIamPolicyCall), [*locations lakes contentitems test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeContentitemTestIamPermissionCall), [*locations lakes create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeCreateCall), [*locations lakes delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeDeleteCall), [*locations lakes environments create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentCreateCall), [*locations lakes environments delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentDeleteCall), [*locations lakes environments get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentGetCall), [*locations lakes environments get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentGetIamPolicyCall), [*locations lakes environments list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentListCall), [*locations lakes environments patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentPatchCall), [*locations lakes environments sessions list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentSessionListCall), [*locations lakes environments set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentSetIamPolicyCall), [*locations lakes environments test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeEnvironmentTestIamPermissionCall), [*locations lakes get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeGetCall), [*locations lakes get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeGetIamPolicyCall), [*locations lakes list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeListCall), [*locations lakes patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakePatchCall), [*locations lakes set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeSetIamPolicyCall), [*locations lakes tasks create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskCreateCall), [*locations lakes tasks delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskDeleteCall), [*locations lakes tasks get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskGetCall), [*locations lakes tasks get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskGetIamPolicyCall), [*locations lakes tasks jobs cancel*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskJobCancelCall), [*locations lakes tasks jobs get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskJobGetCall), [*locations lakes tasks jobs list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskJobListCall), [*locations lakes tasks list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskListCall), [*locations lakes tasks patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskPatchCall), [*locations lakes tasks run*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskRunCall), [*locations lakes tasks set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskSetIamPolicyCall), [*locations lakes tasks test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTaskTestIamPermissionCall), [*locations lakes test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeTestIamPermissionCall), [*locations lakes zones actions list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneActionListCall), [*locations lakes zones assets actions list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetActionListCall), [*locations lakes zones assets create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetCreateCall), [*locations lakes zones assets delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetDeleteCall), [*locations lakes zones assets get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetGetCall), [*locations lakes zones assets get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetGetIamPolicyCall), [*locations lakes zones assets list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetListCall), [*locations lakes zones assets patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetPatchCall), [*locations lakes zones assets set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetSetIamPolicyCall), [*locations lakes zones assets test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneAssetTestIamPermissionCall), [*locations lakes zones create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneCreateCall), [*locations lakes zones delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneDeleteCall), [*locations lakes zones entities create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityCreateCall), [*locations lakes zones entities delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityDeleteCall), [*locations lakes zones entities get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityGetCall), [*locations lakes zones entities list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityListCall), [*locations lakes zones entities partitions create*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionCreateCall), [*locations lakes zones entities partitions delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionDeleteCall), [*locations lakes zones entities partitions get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionGetCall), [*locations lakes zones entities partitions list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityPartitionListCall), [*locations lakes zones entities update*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneEntityUpdateCall), [*locations lakes zones get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneGetCall), [*locations lakes zones get iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneGetIamPolicyCall), [*locations lakes zones list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneListCall), [*locations lakes zones patch*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZonePatchCall), [*locations lakes zones set iam policy*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneSetIamPolicyCall), [*locations lakes zones test iam permissions*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationLakeZoneTestIamPermissionCall), [*locations list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/CloudDataplex)** +* **[Hub](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/CloudDataplex)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::CallBuilder) -* **[Resources](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::CallBuilder) +* **[Resources](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Part)** + * **[Parts](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -153,17 +153,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -173,29 +173,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Delegate) to the -[Method Builder](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Delegate) to the +[Method Builder](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::RequestValue) and -[decodable](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::RequestValue) and +[decodable](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dataplex1/5.0.2-beta-1+20230120/google_dataplex1/client::RequestValue) are moved +* [request values](https://docs.rs/google-dataplex1/5.0.2+20230120/google_dataplex1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dataplex1/src/api.rs b/gen/dataplex1/src/api.rs index b9e50af4c2..2a2d81be4d 100644 --- a/gen/dataplex1/src/api.rs +++ b/gen/dataplex1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> CloudDataplex { CloudDataplex { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dataplex.googleapis.com/".to_string(), _root_url: "https://dataplex.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> CloudDataplex { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dataplex1/src/client.rs b/gen/dataplex1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dataplex1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dataplex1/src/lib.rs b/gen/dataplex1/src/lib.rs index bb8702caac..c003a8219a 100644 --- a/gen/dataplex1/src/lib.rs +++ b/gen/dataplex1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Dataplex* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *dataplex:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Dataplex* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *dataplex:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Dataplex* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/dataplex/docs). diff --git a/gen/dataproc1-cli/Cargo.toml b/gen/dataproc1-cli/Cargo.toml index 03e90a6109..836e20e3f6 100644 --- a/gen/dataproc1-cli/Cargo.toml +++ b/gen/dataproc1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dataproc1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dataproc (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dataproc1-cli" @@ -20,13 +20,13 @@ name = "dataproc1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dataproc1] path = "../dataproc1" -version = "4.0.1+20220224" +version = "5.0.2+20230103" + diff --git a/gen/dataproc1-cli/README.md b/gen/dataproc1-cli/README.md index 37a710e0ea..9b355c3d94 100644 --- a/gen/dataproc1-cli/README.md +++ b/gen/dataproc1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dataproc* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dataproc* API at revision *20230103*. The CLI is at version *5.0.2*. ```bash dataproc1 [options] @@ -42,6 +42,10 @@ dataproc1 [options] locations-batches-delete [-p ]... [-o ] locations-batches-get [-p ]... [-o ] locations-batches-list [-p ]... [-o ] + locations-operations-cancel [-p ]... [-o ] + locations-operations-delete [-p ]... [-o ] + locations-operations-get [-p ]... [-o ] + locations-operations-list [-p ]... [-o ] locations-workflow-templates-create (-r )... [-p ]... [-o ] locations-workflow-templates-delete [-p ]... [-o ] locations-workflow-templates-get [-p ]... [-o ] @@ -67,6 +71,9 @@ dataproc1 [options] regions-clusters-get-iam-policy (-r )... [-p ]... [-o ] regions-clusters-inject-credentials (-r )... [-p ]... [-o ] regions-clusters-list [-p ]... [-o ] + regions-clusters-node-groups-create (-r )... [-p ]... [-o ] + regions-clusters-node-groups-get [-p ]... [-o ] + regions-clusters-node-groups-resize (-r )... [-p ]... [-o ] regions-clusters-patch (-r )... [-p ]... [-o ] regions-clusters-repair (-r )... [-p ]... [-o ] regions-clusters-set-iam-policy (-r )... [-p ]... [-o ] diff --git a/gen/dataproc1-cli/mkdocs.yml b/gen/dataproc1-cli/mkdocs.yml index 3b6c2c34ac..adef214d31 100644 --- a/gen/dataproc1-cli/mkdocs.yml +++ b/gen/dataproc1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dataproc v4.0.1+20220224 +site_name: Dataproc v5.0.2+20230103 site_url: http://byron.github.io/google-apis-rs/google-dataproc1-cli site_description: A complete library to interact with Dataproc (protocol v1) @@ -7,78 +7,86 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dataproc1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-autoscaling-policies-create.md', 'Projects', 'Locations Autoscaling Policies Create'] -- ['projects_locations-autoscaling-policies-delete.md', 'Projects', 'Locations Autoscaling Policies Delete'] -- ['projects_locations-autoscaling-policies-get.md', 'Projects', 'Locations Autoscaling Policies Get'] -- ['projects_locations-autoscaling-policies-get-iam-policy.md', 'Projects', 'Locations Autoscaling Policies Get Iam Policy'] -- ['projects_locations-autoscaling-policies-list.md', 'Projects', 'Locations Autoscaling Policies List'] -- ['projects_locations-autoscaling-policies-set-iam-policy.md', 'Projects', 'Locations Autoscaling Policies Set Iam Policy'] -- ['projects_locations-autoscaling-policies-test-iam-permissions.md', 'Projects', 'Locations Autoscaling Policies Test Iam Permissions'] -- ['projects_locations-autoscaling-policies-update.md', 'Projects', 'Locations Autoscaling Policies Update'] -- ['projects_locations-batches-create.md', 'Projects', 'Locations Batches Create'] -- ['projects_locations-batches-delete.md', 'Projects', 'Locations Batches Delete'] -- ['projects_locations-batches-get.md', 'Projects', 'Locations Batches Get'] -- ['projects_locations-batches-list.md', 'Projects', 'Locations Batches List'] -- ['projects_locations-workflow-templates-create.md', 'Projects', 'Locations Workflow Templates Create'] -- ['projects_locations-workflow-templates-delete.md', 'Projects', 'Locations Workflow Templates Delete'] -- ['projects_locations-workflow-templates-get.md', 'Projects', 'Locations Workflow Templates Get'] -- ['projects_locations-workflow-templates-get-iam-policy.md', 'Projects', 'Locations Workflow Templates Get Iam Policy'] -- ['projects_locations-workflow-templates-instantiate.md', 'Projects', 'Locations Workflow Templates Instantiate'] -- ['projects_locations-workflow-templates-instantiate-inline.md', 'Projects', 'Locations Workflow Templates Instantiate Inline'] -- ['projects_locations-workflow-templates-list.md', 'Projects', 'Locations Workflow Templates List'] -- ['projects_locations-workflow-templates-set-iam-policy.md', 'Projects', 'Locations Workflow Templates Set Iam Policy'] -- ['projects_locations-workflow-templates-test-iam-permissions.md', 'Projects', 'Locations Workflow Templates Test Iam Permissions'] -- ['projects_locations-workflow-templates-update.md', 'Projects', 'Locations Workflow Templates Update'] -- ['projects_regions-autoscaling-policies-create.md', 'Projects', 'Regions Autoscaling Policies Create'] -- ['projects_regions-autoscaling-policies-delete.md', 'Projects', 'Regions Autoscaling Policies Delete'] -- ['projects_regions-autoscaling-policies-get.md', 'Projects', 'Regions Autoscaling Policies Get'] -- ['projects_regions-autoscaling-policies-get-iam-policy.md', 'Projects', 'Regions Autoscaling Policies Get Iam Policy'] -- ['projects_regions-autoscaling-policies-list.md', 'Projects', 'Regions Autoscaling Policies List'] -- ['projects_regions-autoscaling-policies-set-iam-policy.md', 'Projects', 'Regions Autoscaling Policies Set Iam Policy'] -- ['projects_regions-autoscaling-policies-test-iam-permissions.md', 'Projects', 'Regions Autoscaling Policies Test Iam Permissions'] -- ['projects_regions-autoscaling-policies-update.md', 'Projects', 'Regions Autoscaling Policies Update'] -- ['projects_regions-clusters-create.md', 'Projects', 'Regions Clusters Create'] -- ['projects_regions-clusters-delete.md', 'Projects', 'Regions Clusters Delete'] -- ['projects_regions-clusters-diagnose.md', 'Projects', 'Regions Clusters Diagnose'] -- ['projects_regions-clusters-get.md', 'Projects', 'Regions Clusters Get'] -- ['projects_regions-clusters-get-iam-policy.md', 'Projects', 'Regions Clusters Get Iam Policy'] -- ['projects_regions-clusters-inject-credentials.md', 'Projects', 'Regions Clusters Inject Credentials'] -- ['projects_regions-clusters-list.md', 'Projects', 'Regions Clusters List'] -- ['projects_regions-clusters-patch.md', 'Projects', 'Regions Clusters Patch'] -- ['projects_regions-clusters-repair.md', 'Projects', 'Regions Clusters Repair'] -- ['projects_regions-clusters-set-iam-policy.md', 'Projects', 'Regions Clusters Set Iam Policy'] -- ['projects_regions-clusters-start.md', 'Projects', 'Regions Clusters Start'] -- ['projects_regions-clusters-stop.md', 'Projects', 'Regions Clusters Stop'] -- ['projects_regions-clusters-test-iam-permissions.md', 'Projects', 'Regions Clusters Test Iam Permissions'] -- ['projects_regions-jobs-cancel.md', 'Projects', 'Regions Jobs Cancel'] -- ['projects_regions-jobs-delete.md', 'Projects', 'Regions Jobs Delete'] -- ['projects_regions-jobs-get.md', 'Projects', 'Regions Jobs Get'] -- ['projects_regions-jobs-get-iam-policy.md', 'Projects', 'Regions Jobs Get Iam Policy'] -- ['projects_regions-jobs-list.md', 'Projects', 'Regions Jobs List'] -- ['projects_regions-jobs-patch.md', 'Projects', 'Regions Jobs Patch'] -- ['projects_regions-jobs-set-iam-policy.md', 'Projects', 'Regions Jobs Set Iam Policy'] -- ['projects_regions-jobs-submit.md', 'Projects', 'Regions Jobs Submit'] -- ['projects_regions-jobs-submit-as-operation.md', 'Projects', 'Regions Jobs Submit As Operation'] -- ['projects_regions-jobs-test-iam-permissions.md', 'Projects', 'Regions Jobs Test Iam Permissions'] -- ['projects_regions-operations-cancel.md', 'Projects', 'Regions Operations Cancel'] -- ['projects_regions-operations-delete.md', 'Projects', 'Regions Operations Delete'] -- ['projects_regions-operations-get.md', 'Projects', 'Regions Operations Get'] -- ['projects_regions-operations-get-iam-policy.md', 'Projects', 'Regions Operations Get Iam Policy'] -- ['projects_regions-operations-list.md', 'Projects', 'Regions Operations List'] -- ['projects_regions-operations-set-iam-policy.md', 'Projects', 'Regions Operations Set Iam Policy'] -- ['projects_regions-operations-test-iam-permissions.md', 'Projects', 'Regions Operations Test Iam Permissions'] -- ['projects_regions-workflow-templates-create.md', 'Projects', 'Regions Workflow Templates Create'] -- ['projects_regions-workflow-templates-delete.md', 'Projects', 'Regions Workflow Templates Delete'] -- ['projects_regions-workflow-templates-get.md', 'Projects', 'Regions Workflow Templates Get'] -- ['projects_regions-workflow-templates-get-iam-policy.md', 'Projects', 'Regions Workflow Templates Get Iam Policy'] -- ['projects_regions-workflow-templates-instantiate.md', 'Projects', 'Regions Workflow Templates Instantiate'] -- ['projects_regions-workflow-templates-instantiate-inline.md', 'Projects', 'Regions Workflow Templates Instantiate Inline'] -- ['projects_regions-workflow-templates-list.md', 'Projects', 'Regions Workflow Templates List'] -- ['projects_regions-workflow-templates-set-iam-policy.md', 'Projects', 'Regions Workflow Templates Set Iam Policy'] -- ['projects_regions-workflow-templates-test-iam-permissions.md', 'Projects', 'Regions Workflow Templates Test Iam Permissions'] -- ['projects_regions-workflow-templates-update.md', 'Projects', 'Regions Workflow Templates Update'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Autoscaling Policies Create': 'projects_locations-autoscaling-policies-create.md' + - 'Locations Autoscaling Policies Delete': 'projects_locations-autoscaling-policies-delete.md' + - 'Locations Autoscaling Policies Get': 'projects_locations-autoscaling-policies-get.md' + - 'Locations Autoscaling Policies Get Iam Policy': 'projects_locations-autoscaling-policies-get-iam-policy.md' + - 'Locations Autoscaling Policies List': 'projects_locations-autoscaling-policies-list.md' + - 'Locations Autoscaling Policies Set Iam Policy': 'projects_locations-autoscaling-policies-set-iam-policy.md' + - 'Locations Autoscaling Policies Test Iam Permissions': 'projects_locations-autoscaling-policies-test-iam-permissions.md' + - 'Locations Autoscaling Policies Update': 'projects_locations-autoscaling-policies-update.md' + - 'Locations Batches Create': 'projects_locations-batches-create.md' + - 'Locations Batches Delete': 'projects_locations-batches-delete.md' + - 'Locations Batches Get': 'projects_locations-batches-get.md' + - 'Locations Batches List': 'projects_locations-batches-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Workflow Templates Create': 'projects_locations-workflow-templates-create.md' + - 'Locations Workflow Templates Delete': 'projects_locations-workflow-templates-delete.md' + - 'Locations Workflow Templates Get': 'projects_locations-workflow-templates-get.md' + - 'Locations Workflow Templates Get Iam Policy': 'projects_locations-workflow-templates-get-iam-policy.md' + - 'Locations Workflow Templates Instantiate': 'projects_locations-workflow-templates-instantiate.md' + - 'Locations Workflow Templates Instantiate Inline': 'projects_locations-workflow-templates-instantiate-inline.md' + - 'Locations Workflow Templates List': 'projects_locations-workflow-templates-list.md' + - 'Locations Workflow Templates Set Iam Policy': 'projects_locations-workflow-templates-set-iam-policy.md' + - 'Locations Workflow Templates Test Iam Permissions': 'projects_locations-workflow-templates-test-iam-permissions.md' + - 'Locations Workflow Templates Update': 'projects_locations-workflow-templates-update.md' + - 'Regions Autoscaling Policies Create': 'projects_regions-autoscaling-policies-create.md' + - 'Regions Autoscaling Policies Delete': 'projects_regions-autoscaling-policies-delete.md' + - 'Regions Autoscaling Policies Get': 'projects_regions-autoscaling-policies-get.md' + - 'Regions Autoscaling Policies Get Iam Policy': 'projects_regions-autoscaling-policies-get-iam-policy.md' + - 'Regions Autoscaling Policies List': 'projects_regions-autoscaling-policies-list.md' + - 'Regions Autoscaling Policies Set Iam Policy': 'projects_regions-autoscaling-policies-set-iam-policy.md' + - 'Regions Autoscaling Policies Test Iam Permissions': 'projects_regions-autoscaling-policies-test-iam-permissions.md' + - 'Regions Autoscaling Policies Update': 'projects_regions-autoscaling-policies-update.md' + - 'Regions Clusters Create': 'projects_regions-clusters-create.md' + - 'Regions Clusters Delete': 'projects_regions-clusters-delete.md' + - 'Regions Clusters Diagnose': 'projects_regions-clusters-diagnose.md' + - 'Regions Clusters Get': 'projects_regions-clusters-get.md' + - 'Regions Clusters Get Iam Policy': 'projects_regions-clusters-get-iam-policy.md' + - 'Regions Clusters Inject Credentials': 'projects_regions-clusters-inject-credentials.md' + - 'Regions Clusters List': 'projects_regions-clusters-list.md' + - 'Regions Clusters Node Groups Create': 'projects_regions-clusters-node-groups-create.md' + - 'Regions Clusters Node Groups Get': 'projects_regions-clusters-node-groups-get.md' + - 'Regions Clusters Node Groups Resize': 'projects_regions-clusters-node-groups-resize.md' + - 'Regions Clusters Patch': 'projects_regions-clusters-patch.md' + - 'Regions Clusters Repair': 'projects_regions-clusters-repair.md' + - 'Regions Clusters Set Iam Policy': 'projects_regions-clusters-set-iam-policy.md' + - 'Regions Clusters Start': 'projects_regions-clusters-start.md' + - 'Regions Clusters Stop': 'projects_regions-clusters-stop.md' + - 'Regions Clusters Test Iam Permissions': 'projects_regions-clusters-test-iam-permissions.md' + - 'Regions Jobs Cancel': 'projects_regions-jobs-cancel.md' + - 'Regions Jobs Delete': 'projects_regions-jobs-delete.md' + - 'Regions Jobs Get': 'projects_regions-jobs-get.md' + - 'Regions Jobs Get Iam Policy': 'projects_regions-jobs-get-iam-policy.md' + - 'Regions Jobs List': 'projects_regions-jobs-list.md' + - 'Regions Jobs Patch': 'projects_regions-jobs-patch.md' + - 'Regions Jobs Set Iam Policy': 'projects_regions-jobs-set-iam-policy.md' + - 'Regions Jobs Submit': 'projects_regions-jobs-submit.md' + - 'Regions Jobs Submit As Operation': 'projects_regions-jobs-submit-as-operation.md' + - 'Regions Jobs Test Iam Permissions': 'projects_regions-jobs-test-iam-permissions.md' + - 'Regions Operations Cancel': 'projects_regions-operations-cancel.md' + - 'Regions Operations Delete': 'projects_regions-operations-delete.md' + - 'Regions Operations Get': 'projects_regions-operations-get.md' + - 'Regions Operations Get Iam Policy': 'projects_regions-operations-get-iam-policy.md' + - 'Regions Operations List': 'projects_regions-operations-list.md' + - 'Regions Operations Set Iam Policy': 'projects_regions-operations-set-iam-policy.md' + - 'Regions Operations Test Iam Permissions': 'projects_regions-operations-test-iam-permissions.md' + - 'Regions Workflow Templates Create': 'projects_regions-workflow-templates-create.md' + - 'Regions Workflow Templates Delete': 'projects_regions-workflow-templates-delete.md' + - 'Regions Workflow Templates Get': 'projects_regions-workflow-templates-get.md' + - 'Regions Workflow Templates Get Iam Policy': 'projects_regions-workflow-templates-get-iam-policy.md' + - 'Regions Workflow Templates Instantiate': 'projects_regions-workflow-templates-instantiate.md' + - 'Regions Workflow Templates Instantiate Inline': 'projects_regions-workflow-templates-instantiate-inline.md' + - 'Regions Workflow Templates List': 'projects_regions-workflow-templates-list.md' + - 'Regions Workflow Templates Set Iam Policy': 'projects_regions-workflow-templates-set-iam-policy.md' + - 'Regions Workflow Templates Test Iam Permissions': 'projects_regions-workflow-templates-test-iam-permissions.md' + - 'Regions Workflow Templates Update': 'projects_regions-workflow-templates-update.md' theme: readthedocs diff --git a/gen/dataproc1-cli/src/client.rs b/gen/dataproc1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dataproc1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dataproc1-cli/src/main.rs b/gen/dataproc1-cli/src/main.rs index b2804d5ff0..8e003886f0 100644 --- a/gen/dataproc1-cli/src/main.rs +++ b/gen/dataproc1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dataproc1::{api, Error, oauth2}; +use google_dataproc1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -354,7 +353,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -703,6 +702,7 @@ where match &temp_cursor.to_string()[..] { "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "creator" => Some(("creator", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "environment-config.execution-config.idle-ttl" => Some(("environmentConfig.executionConfig.idleTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "environment-config.execution-config.kms-key" => Some(("environmentConfig.executionConfig.kmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "environment-config.execution-config.network-tags" => Some(("environmentConfig.executionConfig.networkTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "environment-config.execution-config.network-uri" => Some(("environmentConfig.executionConfig.networkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -722,6 +722,11 @@ where "runtime-config.container-image" => Some(("runtimeConfig.containerImage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "runtime-config.properties" => Some(("runtimeConfig.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "runtime-config.version" => Some(("runtimeConfig.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "runtime-info.approximate-usage.milli-dcu-seconds" => Some(("runtimeInfo.approximateUsage.milliDcuSeconds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "runtime-info.approximate-usage.shuffle-storage-gb-seconds" => Some(("runtimeInfo.approximateUsage.shuffleStorageGbSeconds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "runtime-info.current-usage.milli-dcu" => Some(("runtimeInfo.currentUsage.milliDcu", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "runtime-info.current-usage.shuffle-storage-gb" => Some(("runtimeInfo.currentUsage.shuffleStorageGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "runtime-info.current-usage.snapshot-time" => Some(("runtimeInfo.currentUsage.snapshotTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "runtime-info.diagnostic-output-uri" => Some(("runtimeInfo.diagnosticOutputUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "runtime-info.endpoints" => Some(("runtimeInfo.endpoints", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "runtime-info.output-uri" => Some(("runtimeInfo.outputUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -743,7 +748,7 @@ where "state-time" => Some(("stateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uuid" => Some(("uuid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "container-image", "create-time", "creator", "dataproc-cluster", "diagnostic-output-uri", "endpoints", "environment-config", "execution-config", "file-uris", "jar-file-uris", "kms-key", "labels", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "metastore-service", "name", "network-tags", "network-uri", "operation", "output-uri", "peripherals-config", "properties", "pyspark-batch", "python-file-uris", "query-file-uri", "query-variables", "runtime-config", "runtime-info", "service-account", "spark-batch", "spark-history-server-config", "spark-r-batch", "spark-sql-batch", "state", "state-message", "state-time", "subnetwork-uri", "uuid", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["approximate-usage", "archive-uris", "args", "container-image", "create-time", "creator", "current-usage", "dataproc-cluster", "diagnostic-output-uri", "endpoints", "environment-config", "execution-config", "file-uris", "idle-ttl", "jar-file-uris", "kms-key", "labels", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "metastore-service", "milli-dcu", "milli-dcu-seconds", "name", "network-tags", "network-uri", "operation", "output-uri", "peripherals-config", "properties", "pyspark-batch", "python-file-uris", "query-file-uri", "query-variables", "runtime-config", "runtime-info", "service-account", "shuffle-storage-gb", "shuffle-storage-gb-seconds", "snapshot-time", "spark-batch", "spark-history-server-config", "spark-r-batch", "spark-sql-batch", "state", "state-message", "state-time", "subnetwork-uri", "uuid", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -924,7 +929,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -973,6 +978,224 @@ where } } + async fn _projects_locations_operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_cancel(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_list(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_workflow_templates_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1026,6 +1249,7 @@ where "placement.managed-cluster.config.gce-cluster-config.subnetwork-uri" => Some(("placement.managedCluster.config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gce-cluster-config.tags" => Some(("placement.managedCluster.config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "placement.managed-cluster.config.gce-cluster-config.zone-uri" => Some(("placement.managedCluster.config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "placement.managed-cluster.config.gke-cluster-config.gke-cluster-target" => Some(("placement.managedCluster.config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.lifecycle-config.auto-delete-time" => Some(("placement.managedCluster.config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1096,7 +1320,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1163,7 +1387,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "version" => { - call = call.version(arg_from_str(value.unwrap_or("-0"), err, "version", "integer")); + call = call.version( value.map(|v| arg_from_str(v, err, "version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1219,7 +1443,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "version" => { - call = call.version(arg_from_str(value.unwrap_or("-0"), err, "version", "integer")); + call = call.version( value.map(|v| arg_from_str(v, err, "version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1493,6 +1717,7 @@ where "placement.managed-cluster.config.gce-cluster-config.subnetwork-uri" => Some(("placement.managedCluster.config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gce-cluster-config.tags" => Some(("placement.managedCluster.config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "placement.managed-cluster.config.gce-cluster-config.zone-uri" => Some(("placement.managedCluster.config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "placement.managed-cluster.config.gke-cluster-config.gke-cluster-target" => Some(("placement.managedCluster.config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.lifecycle-config.auto-delete-time" => Some(("placement.managedCluster.config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1563,7 +1788,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1637,7 +1862,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1910,6 +2135,7 @@ where "placement.managed-cluster.config.gce-cluster-config.subnetwork-uri" => Some(("placement.managedCluster.config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gce-cluster-config.tags" => Some(("placement.managedCluster.config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "placement.managed-cluster.config.gce-cluster-config.zone-uri" => Some(("placement.managedCluster.config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "placement.managed-cluster.config.gke-cluster-config.gke-cluster-target" => Some(("placement.managedCluster.config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.lifecycle-config.auto-delete-time" => Some(("placement.managedCluster.config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1980,7 +2206,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2343,7 +2569,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2714,6 +2940,7 @@ where "config.gce-cluster-config.subnetwork-uri" => Some(("config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.gce-cluster-config.tags" => Some(("config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.gce-cluster-config.zone-uri" => Some(("config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.gke-cluster-config.gke-cluster-target" => Some(("config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.lifecycle-config.auto-delete-time" => Some(("config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2788,8 +3015,17 @@ where "status.state" => Some(("status.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.state-start-time" => Some(("status.stateStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.substate" => Some(("status.substate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.auxiliary-services-config.metastore-config.dataproc-metastore-service" => Some(("virtualClusterConfig.auxiliaryServicesConfig.metastoreConfig.dataprocMetastoreService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.auxiliary-services-config.spark-history-server-config.dataproc-cluster" => Some(("virtualClusterConfig.auxiliaryServicesConfig.sparkHistoryServerConfig.dataprocCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.gke-cluster-config.gke-cluster-target" => Some(("virtualClusterConfig.kubernetesClusterConfig.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("virtualClusterConfig.kubernetesClusterConfig.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("virtualClusterConfig.kubernetesClusterConfig.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.kubernetes-namespace" => Some(("virtualClusterConfig.kubernetesClusterConfig.kubernetesNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.kubernetes-software-config.component-version" => Some(("virtualClusterConfig.kubernetesClusterConfig.kubernetesSoftwareConfig.componentVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-cluster-config.kubernetes-cluster-config.kubernetes-software-config.properties" => Some(("virtualClusterConfig.kubernetesClusterConfig.kubernetesSoftwareConfig.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-cluster-config.staging-bucket" => Some(("virtualClusterConfig.stagingBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-name", "cluster-namespace", "cluster-uuid", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dataproc-metastore-service", "detail", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "hdfs-metrics", "http-ports", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-group-config", "master-config", "metadata", "metastore-config", "metrics", "min-cpu-platform", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "policy-uri", "preemptibility", "private-ipv6-google-access", "project-id", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "state", "state-start-time", "status", "subnetwork-uri", "substate", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "user-service-account-mapping", "values", "worker-config", "yarn-metrics", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "auxiliary-services-config", "boot-disk-size-gb", "boot-disk-type", "cluster-name", "cluster-namespace", "cluster-uuid", "component-version", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dataproc-cluster", "dataproc-metastore-service", "detail", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "hdfs-metrics", "http-ports", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "kubernetes-cluster-config", "kubernetes-namespace", "kubernetes-software-config", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-group-config", "master-config", "metadata", "metastore-config", "metrics", "min-cpu-platform", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "policy-uri", "preemptibility", "private-ipv6-google-access", "project-id", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "spark-history-server-config", "staging-bucket", "state", "state-start-time", "status", "subnetwork-uri", "substate", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "user-service-account-mapping", "values", "virtual-cluster-config", "worker-config", "yarn-metrics", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3232,7 +3468,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3284,6 +3520,252 @@ where } } + async fn _projects_regions_clusters_node_groups_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.disk-config.boot-disk-size-gb" => Some(("nodeGroupConfig.diskConfig.bootDiskSizeGb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-group-config.disk-config.boot-disk-type" => Some(("nodeGroupConfig.diskConfig.bootDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.disk-config.local-ssd-interface" => Some(("nodeGroupConfig.diskConfig.localSsdInterface", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.disk-config.num-local-ssds" => Some(("nodeGroupConfig.diskConfig.numLocalSsds", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-group-config.image-uri" => Some(("nodeGroupConfig.imageUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.instance-names" => Some(("nodeGroupConfig.instanceNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "node-group-config.is-preemptible" => Some(("nodeGroupConfig.isPreemptible", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "node-group-config.machine-type-uri" => Some(("nodeGroupConfig.machineTypeUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.managed-group-config.instance-group-manager-name" => Some(("nodeGroupConfig.managedGroupConfig.instanceGroupManagerName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.managed-group-config.instance-template-name" => Some(("nodeGroupConfig.managedGroupConfig.instanceTemplateName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.min-cpu-platform" => Some(("nodeGroupConfig.minCpuPlatform", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "node-group-config.num-instances" => Some(("nodeGroupConfig.numInstances", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "node-group-config.preemptibility" => Some(("nodeGroupConfig.preemptibility", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "roles" => Some(("roles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["boot-disk-size-gb", "boot-disk-type", "disk-config", "image-uri", "instance-group-manager-name", "instance-names", "instance-template-name", "is-preemptible", "labels", "local-ssd-interface", "machine-type-uri", "managed-group-config", "min-cpu-platform", "name", "node-group-config", "num-instances", "num-local-ssds", "preemptibility", "roles"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NodeGroup = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().regions_clusters_node_groups_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + "node-group-id" => { + call = call.node_group_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["node-group-id", "request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_regions_clusters_node_groups_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().regions_clusters_node_groups_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_regions_clusters_node_groups_resize(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "graceful-decommission-timeout" => Some(("gracefulDecommissionTimeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "size" => Some(("size", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["graceful-decommission-timeout", "request-id", "size"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ResizeNodeGroupRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().regions_clusters_node_groups_resize(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_regions_clusters_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3331,6 +3813,7 @@ where "config.gce-cluster-config.subnetwork-uri" => Some(("config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.gce-cluster-config.tags" => Some(("config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.gce-cluster-config.zone-uri" => Some(("config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.gke-cluster-config.gke-cluster-target" => Some(("config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.lifecycle-config.auto-delete-time" => Some(("config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3405,8 +3888,17 @@ where "status.state" => Some(("status.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.state-start-time" => Some(("status.stateStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.substate" => Some(("status.substate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.auxiliary-services-config.metastore-config.dataproc-metastore-service" => Some(("virtualClusterConfig.auxiliaryServicesConfig.metastoreConfig.dataprocMetastoreService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.auxiliary-services-config.spark-history-server-config.dataproc-cluster" => Some(("virtualClusterConfig.auxiliaryServicesConfig.sparkHistoryServerConfig.dataprocCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.gke-cluster-config.gke-cluster-target" => Some(("virtualClusterConfig.kubernetesClusterConfig.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("virtualClusterConfig.kubernetesClusterConfig.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("virtualClusterConfig.kubernetesClusterConfig.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.kubernetes-namespace" => Some(("virtualClusterConfig.kubernetesClusterConfig.kubernetesNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-cluster-config.kubernetes-cluster-config.kubernetes-software-config.component-version" => Some(("virtualClusterConfig.kubernetesClusterConfig.kubernetesSoftwareConfig.componentVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-cluster-config.kubernetes-cluster-config.kubernetes-software-config.properties" => Some(("virtualClusterConfig.kubernetesClusterConfig.kubernetesSoftwareConfig.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-cluster-config.staging-bucket" => Some(("virtualClusterConfig.stagingBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-name", "cluster-namespace", "cluster-uuid", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dataproc-metastore-service", "detail", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "hdfs-metrics", "http-ports", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-group-config", "master-config", "metadata", "metastore-config", "metrics", "min-cpu-platform", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "policy-uri", "preemptibility", "private-ipv6-google-access", "project-id", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "state", "state-start-time", "status", "subnetwork-uri", "substate", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "user-service-account-mapping", "values", "worker-config", "yarn-metrics", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "auxiliary-services-config", "boot-disk-size-gb", "boot-disk-type", "cluster-name", "cluster-namespace", "cluster-uuid", "component-version", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dataproc-cluster", "dataproc-metastore-service", "detail", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "hdfs-metrics", "http-ports", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "kubernetes-cluster-config", "kubernetes-namespace", "kubernetes-software-config", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-group-config", "master-config", "metadata", "metastore-config", "metrics", "min-cpu-platform", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "policy-uri", "preemptibility", "private-ipv6-google-access", "project-id", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "spark-history-server-config", "staging-bucket", "state", "state-start-time", "status", "subnetwork-uri", "substate", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "user-service-account-mapping", "values", "virtual-cluster-config", "worker-config", "yarn-metrics", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3421,13 +3913,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "graceful-decommission-timeout" => { - call = call.graceful_decommission_timeout(value.unwrap_or("")); + call = call.graceful_decommission_timeout( value.map(|v| arg_from_str(v, err, "graceful-decommission-timeout", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -3500,9 +3992,11 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cluster-uuid" => Some(("clusterUuid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "graceful-decommission-timeout" => Some(("gracefulDecommissionTimeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent-operation-id" => Some(("parentOperationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-uuid", "request-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cluster-uuid", "graceful-decommission-timeout", "parent-operation-id", "request-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4188,7 +4682,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "job-state-matcher" => { call = call.job_state_matcher(value.unwrap_or("")); @@ -4272,6 +4766,8 @@ where "done" => Some(("done", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "driver-control-files-uri" => Some(("driverControlFilesUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "driver-output-resource-uri" => Some(("driverOutputResourceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "driver-scheduling-config.memory-mb" => Some(("driverSchedulingConfig.memoryMb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "driver-scheduling-config.vcores" => Some(("driverSchedulingConfig.vcores", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "hadoop-job.archive-uris" => Some(("hadoopJob.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "hadoop-job.args" => Some(("hadoopJob.args", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "hadoop-job.file-uris" => Some(("hadoopJob.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -4341,8 +4837,15 @@ where "status.state" => Some(("status.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.state-start-time" => Some(("status.stateStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.substate" => Some(("status.substate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trino-job.client-tags" => Some(("trinoJob.clientTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "trino-job.continue-on-failure" => Some(("trinoJob.continueOnFailure", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "trino-job.logging-config.driver-log-levels" => Some(("trinoJob.loggingConfig.driverLogLevels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "trino-job.output-format" => Some(("trinoJob.outputFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trino-job.properties" => Some(("trinoJob.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "trino-job.query-file-uri" => Some(("trinoJob.queryFileUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "trino-job.query-list.queries" => Some(("trinoJob.queryList.queries", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "client-tags", "cluster-labels", "cluster-name", "cluster-uuid", "continue-on-failure", "details", "done", "driver-control-files-uri", "driver-log-levels", "driver-output-resource-uri", "file-uris", "hadoop-job", "hive-job", "jar-file-uris", "job-id", "job-uuid", "labels", "logging-config", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "max-failures-per-hour", "max-failures-total", "output-format", "pig-job", "placement", "presto-job", "project-id", "properties", "pyspark-job", "python-file-uris", "queries", "query-file-uri", "query-list", "reference", "scheduling", "script-variables", "spark-job", "spark-r-job", "spark-sql-job", "state", "state-start-time", "status", "substate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "client-tags", "cluster-labels", "cluster-name", "cluster-uuid", "continue-on-failure", "details", "done", "driver-control-files-uri", "driver-log-levels", "driver-output-resource-uri", "driver-scheduling-config", "file-uris", "hadoop-job", "hive-job", "jar-file-uris", "job-id", "job-uuid", "labels", "logging-config", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "max-failures-per-hour", "max-failures-total", "memory-mb", "output-format", "pig-job", "placement", "presto-job", "project-id", "properties", "pyspark-job", "python-file-uris", "queries", "query-file-uri", "query-list", "reference", "scheduling", "script-variables", "spark-job", "spark-r-job", "spark-sql-job", "state", "state-start-time", "status", "substate", "trino-job", "vcores"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4357,7 +4860,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4518,6 +5021,8 @@ where "job.done" => Some(("job.done", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "job.driver-control-files-uri" => Some(("job.driverControlFilesUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "job.driver-output-resource-uri" => Some(("job.driverOutputResourceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.driver-scheduling-config.memory-mb" => Some(("job.driverSchedulingConfig.memoryMb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "job.driver-scheduling-config.vcores" => Some(("job.driverSchedulingConfig.vcores", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "job.hadoop-job.archive-uris" => Some(("job.hadoopJob.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "job.hadoop-job.args" => Some(("job.hadoopJob.args", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "job.hadoop-job.file-uris" => Some(("job.hadoopJob.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -4587,9 +5092,16 @@ where "job.status.state" => Some(("job.status.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "job.status.state-start-time" => Some(("job.status.stateStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "job.status.substate" => Some(("job.status.substate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.trino-job.client-tags" => Some(("job.trinoJob.clientTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "job.trino-job.continue-on-failure" => Some(("job.trinoJob.continueOnFailure", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "job.trino-job.logging-config.driver-log-levels" => Some(("job.trinoJob.loggingConfig.driverLogLevels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "job.trino-job.output-format" => Some(("job.trinoJob.outputFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.trino-job.properties" => Some(("job.trinoJob.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "job.trino-job.query-file-uri" => Some(("job.trinoJob.queryFileUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.trino-job.query-list.queries" => Some(("job.trinoJob.queryList.queries", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "client-tags", "cluster-labels", "cluster-name", "cluster-uuid", "continue-on-failure", "details", "done", "driver-control-files-uri", "driver-log-levels", "driver-output-resource-uri", "file-uris", "hadoop-job", "hive-job", "jar-file-uris", "job", "job-id", "job-uuid", "labels", "logging-config", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "max-failures-per-hour", "max-failures-total", "output-format", "pig-job", "placement", "presto-job", "project-id", "properties", "pyspark-job", "python-file-uris", "queries", "query-file-uri", "query-list", "reference", "request-id", "scheduling", "script-variables", "spark-job", "spark-r-job", "spark-sql-job", "state", "state-start-time", "status", "substate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "client-tags", "cluster-labels", "cluster-name", "cluster-uuid", "continue-on-failure", "details", "done", "driver-control-files-uri", "driver-log-levels", "driver-output-resource-uri", "driver-scheduling-config", "file-uris", "hadoop-job", "hive-job", "jar-file-uris", "job", "job-id", "job-uuid", "labels", "logging-config", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "max-failures-per-hour", "max-failures-total", "memory-mb", "output-format", "pig-job", "placement", "presto-job", "project-id", "properties", "pyspark-job", "python-file-uris", "queries", "query-file-uri", "query-list", "reference", "request-id", "scheduling", "script-variables", "spark-job", "spark-r-job", "spark-sql-job", "state", "state-start-time", "status", "substate", "trino-job", "vcores"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4675,6 +5187,8 @@ where "job.done" => Some(("job.done", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "job.driver-control-files-uri" => Some(("job.driverControlFilesUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "job.driver-output-resource-uri" => Some(("job.driverOutputResourceUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.driver-scheduling-config.memory-mb" => Some(("job.driverSchedulingConfig.memoryMb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "job.driver-scheduling-config.vcores" => Some(("job.driverSchedulingConfig.vcores", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "job.hadoop-job.archive-uris" => Some(("job.hadoopJob.archiveUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "job.hadoop-job.args" => Some(("job.hadoopJob.args", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "job.hadoop-job.file-uris" => Some(("job.hadoopJob.fileUris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -4744,9 +5258,16 @@ where "job.status.state" => Some(("job.status.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "job.status.state-start-time" => Some(("job.status.stateStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "job.status.substate" => Some(("job.status.substate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.trino-job.client-tags" => Some(("job.trinoJob.clientTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "job.trino-job.continue-on-failure" => Some(("job.trinoJob.continueOnFailure", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "job.trino-job.logging-config.driver-log-levels" => Some(("job.trinoJob.loggingConfig.driverLogLevels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "job.trino-job.output-format" => Some(("job.trinoJob.outputFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.trino-job.properties" => Some(("job.trinoJob.properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "job.trino-job.query-file-uri" => Some(("job.trinoJob.queryFileUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "job.trino-job.query-list.queries" => Some(("job.trinoJob.queryList.queries", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "client-tags", "cluster-labels", "cluster-name", "cluster-uuid", "continue-on-failure", "details", "done", "driver-control-files-uri", "driver-log-levels", "driver-output-resource-uri", "file-uris", "hadoop-job", "hive-job", "jar-file-uris", "job", "job-id", "job-uuid", "labels", "logging-config", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "max-failures-per-hour", "max-failures-total", "output-format", "pig-job", "placement", "presto-job", "project-id", "properties", "pyspark-job", "python-file-uris", "queries", "query-file-uri", "query-list", "reference", "request-id", "scheduling", "script-variables", "spark-job", "spark-r-job", "spark-sql-job", "state", "state-start-time", "status", "substate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["archive-uris", "args", "client-tags", "cluster-labels", "cluster-name", "cluster-uuid", "continue-on-failure", "details", "done", "driver-control-files-uri", "driver-log-levels", "driver-output-resource-uri", "driver-scheduling-config", "file-uris", "hadoop-job", "hive-job", "jar-file-uris", "job", "job-id", "job-uuid", "labels", "logging-config", "main-class", "main-jar-file-uri", "main-python-file-uri", "main-r-file-uri", "max-failures-per-hour", "max-failures-total", "memory-mb", "output-format", "pig-job", "placement", "presto-job", "project-id", "properties", "pyspark-job", "python-file-uris", "queries", "query-file-uri", "query-list", "reference", "request-id", "scheduling", "script-variables", "spark-job", "spark-r-job", "spark-sql-job", "state", "state-start-time", "status", "substate", "trino-job", "vcores"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5142,7 +5663,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5418,6 +5939,7 @@ where "placement.managed-cluster.config.gce-cluster-config.subnetwork-uri" => Some(("placement.managedCluster.config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gce-cluster-config.tags" => Some(("placement.managedCluster.config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "placement.managed-cluster.config.gce-cluster-config.zone-uri" => Some(("placement.managedCluster.config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "placement.managed-cluster.config.gke-cluster-config.gke-cluster-target" => Some(("placement.managedCluster.config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.lifecycle-config.auto-delete-time" => Some(("placement.managedCluster.config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5488,7 +6010,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5555,7 +6077,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "version" => { - call = call.version(arg_from_str(value.unwrap_or("-0"), err, "version", "integer")); + call = call.version( value.map(|v| arg_from_str(v, err, "version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5611,7 +6133,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "version" => { - call = call.version(arg_from_str(value.unwrap_or("-0"), err, "version", "integer")); + call = call.version( value.map(|v| arg_from_str(v, err, "version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5885,6 +6407,7 @@ where "placement.managed-cluster.config.gce-cluster-config.subnetwork-uri" => Some(("placement.managedCluster.config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gce-cluster-config.tags" => Some(("placement.managedCluster.config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "placement.managed-cluster.config.gce-cluster-config.zone-uri" => Some(("placement.managedCluster.config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "placement.managed-cluster.config.gke-cluster-config.gke-cluster-target" => Some(("placement.managedCluster.config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.lifecycle-config.auto-delete-time" => Some(("placement.managedCluster.config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5955,7 +6478,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6029,7 +6552,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6302,6 +6825,7 @@ where "placement.managed-cluster.config.gce-cluster-config.subnetwork-uri" => Some(("placement.managedCluster.config.gceClusterConfig.subnetworkUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gce-cluster-config.tags" => Some(("placement.managedCluster.config.gceClusterConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "placement.managed-cluster.config.gce-cluster-config.zone-uri" => Some(("placement.managedCluster.config.gceClusterConfig.zoneUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "placement.managed-cluster.config.gke-cluster-config.gke-cluster-target" => Some(("placement.managedCluster.config.gkeClusterConfig.gkeClusterTarget", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.cluster-namespace" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.clusterNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.gke-cluster-config.namespaced-gke-deployment-target.target-gke-cluster" => Some(("placement.managedCluster.config.gkeClusterConfig.namespacedGkeDeploymentTarget.targetGkeCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "placement.managed-cluster.config.lifecycle-config.auto-delete-time" => Some(("placement.managedCluster.config.lifecycleConfig.autoDeleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -6372,7 +6896,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auto-delete-time", "auto-delete-ttl", "autoscaling-config", "boot-disk-size-gb", "boot-disk-type", "cluster-labels", "cluster-name", "cluster-namespace", "cluster-selector", "confidential-instance-config", "config", "config-bucket", "consume-reservation-type", "create-time", "cross-realm-trust-admin-server", "cross-realm-trust-kdc", "cross-realm-trust-realm", "cross-realm-trust-shared-password-uri", "dag-timeout", "dataproc-metastore-service", "disk-config", "enable-confidential-compute", "enable-http-port-access", "enable-integrity-monitoring", "enable-kerberos", "enable-secure-boot", "enable-vtpm", "encryption-config", "endpoint-config", "gce-cluster-config", "gce-pd-kms-key-name", "gke-cluster-config", "gke-cluster-target", "http-ports", "id", "identity-config", "idle-delete-ttl", "idle-start-time", "image-uri", "image-version", "instance-group-manager-name", "instance-names", "instance-template-name", "internal-ip-only", "is-preemptible", "kdc-db-key-uri", "kerberos-config", "key", "key-password-uri", "keystore-password-uri", "keystore-uri", "kms-key-uri", "labels", "lifecycle-config", "local-ssd-interface", "machine-type-uri", "managed-cluster", "managed-group-config", "master-config", "metadata", "metastore-config", "min-cpu-platform", "name", "namespaced-gke-deployment-target", "network-uri", "node-group-affinity", "node-group-uri", "num-instances", "num-local-ssds", "optional-components", "placement", "policy-uri", "preemptibility", "private-ipv6-google-access", "properties", "realm", "reservation-affinity", "root-principal-password-uri", "secondary-worker-config", "security-config", "service-account", "service-account-scopes", "shielded-instance-config", "software-config", "subnetwork-uri", "tags", "target-gke-cluster", "temp-bucket", "tgt-lifetime-hours", "truststore-password-uri", "truststore-uri", "update-time", "user-service-account-mapping", "values", "version", "worker-config", "zone", "zone-uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6475,6 +6999,18 @@ where ("locations-batches-list", Some(opt)) => { call_result = self._projects_locations_batches_list(opt, dry_run, &mut err).await; }, + ("locations-operations-cancel", Some(opt)) => { + call_result = self._projects_locations_operations_cancel(opt, dry_run, &mut err).await; + }, + ("locations-operations-delete", Some(opt)) => { + call_result = self._projects_locations_operations_delete(opt, dry_run, &mut err).await; + }, + ("locations-operations-get", Some(opt)) => { + call_result = self._projects_locations_operations_get(opt, dry_run, &mut err).await; + }, + ("locations-operations-list", Some(opt)) => { + call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; + }, ("locations-workflow-templates-create", Some(opt)) => { call_result = self._projects_locations_workflow_templates_create(opt, dry_run, &mut err).await; }, @@ -6550,6 +7086,15 @@ where ("regions-clusters-list", Some(opt)) => { call_result = self._projects_regions_clusters_list(opt, dry_run, &mut err).await; }, + ("regions-clusters-node-groups-create", Some(opt)) => { + call_result = self._projects_regions_clusters_node_groups_create(opt, dry_run, &mut err).await; + }, + ("regions-clusters-node-groups-get", Some(opt)) => { + call_result = self._projects_regions_clusters_node_groups_get(opt, dry_run, &mut err).await; + }, + ("regions-clusters-node-groups-resize", Some(opt)) => { + call_result = self._projects_regions_clusters_node_groups_resize(opt, dry_run, &mut err).await; + }, ("regions-clusters-patch", Some(opt)) => { call_result = self._projects_regions_clusters_patch(opt, dry_run, &mut err).await; }, @@ -6728,7 +7273,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-autoscaling-policies-create', 'locations-autoscaling-policies-delete', 'locations-autoscaling-policies-get', 'locations-autoscaling-policies-get-iam-policy', 'locations-autoscaling-policies-list', 'locations-autoscaling-policies-set-iam-policy', 'locations-autoscaling-policies-test-iam-permissions', 'locations-autoscaling-policies-update', 'locations-batches-create', 'locations-batches-delete', 'locations-batches-get', 'locations-batches-list', 'locations-workflow-templates-create', 'locations-workflow-templates-delete', 'locations-workflow-templates-get', 'locations-workflow-templates-get-iam-policy', 'locations-workflow-templates-instantiate', 'locations-workflow-templates-instantiate-inline', 'locations-workflow-templates-list', 'locations-workflow-templates-set-iam-policy', 'locations-workflow-templates-test-iam-permissions', 'locations-workflow-templates-update', 'regions-autoscaling-policies-create', 'regions-autoscaling-policies-delete', 'regions-autoscaling-policies-get', 'regions-autoscaling-policies-get-iam-policy', 'regions-autoscaling-policies-list', 'regions-autoscaling-policies-set-iam-policy', 'regions-autoscaling-policies-test-iam-permissions', 'regions-autoscaling-policies-update', 'regions-clusters-create', 'regions-clusters-delete', 'regions-clusters-diagnose', 'regions-clusters-get', 'regions-clusters-get-iam-policy', 'regions-clusters-inject-credentials', 'regions-clusters-list', 'regions-clusters-patch', 'regions-clusters-repair', 'regions-clusters-set-iam-policy', 'regions-clusters-start', 'regions-clusters-stop', 'regions-clusters-test-iam-permissions', 'regions-jobs-cancel', 'regions-jobs-delete', 'regions-jobs-get', 'regions-jobs-get-iam-policy', 'regions-jobs-list', 'regions-jobs-patch', 'regions-jobs-set-iam-policy', 'regions-jobs-submit', 'regions-jobs-submit-as-operation', 'regions-jobs-test-iam-permissions', 'regions-operations-cancel', 'regions-operations-delete', 'regions-operations-get', 'regions-operations-get-iam-policy', 'regions-operations-list', 'regions-operations-set-iam-policy', 'regions-operations-test-iam-permissions', 'regions-workflow-templates-create', 'regions-workflow-templates-delete', 'regions-workflow-templates-get', 'regions-workflow-templates-get-iam-policy', 'regions-workflow-templates-instantiate', 'regions-workflow-templates-instantiate-inline', 'regions-workflow-templates-list', 'regions-workflow-templates-set-iam-policy', 'regions-workflow-templates-test-iam-permissions' and 'regions-workflow-templates-update'", vec![ + ("projects", "methods: 'locations-autoscaling-policies-create', 'locations-autoscaling-policies-delete', 'locations-autoscaling-policies-get', 'locations-autoscaling-policies-get-iam-policy', 'locations-autoscaling-policies-list', 'locations-autoscaling-policies-set-iam-policy', 'locations-autoscaling-policies-test-iam-permissions', 'locations-autoscaling-policies-update', 'locations-batches-create', 'locations-batches-delete', 'locations-batches-get', 'locations-batches-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-workflow-templates-create', 'locations-workflow-templates-delete', 'locations-workflow-templates-get', 'locations-workflow-templates-get-iam-policy', 'locations-workflow-templates-instantiate', 'locations-workflow-templates-instantiate-inline', 'locations-workflow-templates-list', 'locations-workflow-templates-set-iam-policy', 'locations-workflow-templates-test-iam-permissions', 'locations-workflow-templates-update', 'regions-autoscaling-policies-create', 'regions-autoscaling-policies-delete', 'regions-autoscaling-policies-get', 'regions-autoscaling-policies-get-iam-policy', 'regions-autoscaling-policies-list', 'regions-autoscaling-policies-set-iam-policy', 'regions-autoscaling-policies-test-iam-permissions', 'regions-autoscaling-policies-update', 'regions-clusters-create', 'regions-clusters-delete', 'regions-clusters-diagnose', 'regions-clusters-get', 'regions-clusters-get-iam-policy', 'regions-clusters-inject-credentials', 'regions-clusters-list', 'regions-clusters-node-groups-create', 'regions-clusters-node-groups-get', 'regions-clusters-node-groups-resize', 'regions-clusters-patch', 'regions-clusters-repair', 'regions-clusters-set-iam-policy', 'regions-clusters-start', 'regions-clusters-stop', 'regions-clusters-test-iam-permissions', 'regions-jobs-cancel', 'regions-jobs-delete', 'regions-jobs-get', 'regions-jobs-get-iam-policy', 'regions-jobs-list', 'regions-jobs-patch', 'regions-jobs-set-iam-policy', 'regions-jobs-submit', 'regions-jobs-submit-as-operation', 'regions-jobs-test-iam-permissions', 'regions-operations-cancel', 'regions-operations-delete', 'regions-operations-get', 'regions-operations-get-iam-policy', 'regions-operations-list', 'regions-operations-set-iam-policy', 'regions-operations-test-iam-permissions', 'regions-workflow-templates-create', 'regions-workflow-templates-delete', 'regions-workflow-templates-get', 'regions-workflow-templates-get-iam-policy', 'regions-workflow-templates-instantiate', 'regions-workflow-templates-instantiate-inline', 'regions-workflow-templates-list', 'regions-workflow-templates-set-iam-policy', 'regions-workflow-templates-test-iam-permissions' and 'regions-workflow-templates-update'", vec![ ("locations-autoscaling-policies-create", Some(r##"Creates new autoscaling policy."##), "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_locations-autoscaling-policies-create", @@ -6807,7 +7352,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6857,7 +7402,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6885,7 +7430,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -6969,7 +7514,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the batch resource to delete."##), + Some(r##"Required. The fully qualified name of the batch to retrieve in the format "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID""##), Some(true), Some(false)), @@ -6991,7 +7536,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the batch to retrieve."##), + Some(r##"Required. The fully qualified name of the batch to retrieve in the format "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID""##), Some(true), Some(false)), @@ -7023,6 +7568,94 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-cancel", + Some(r##"Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns google.rpc.Code.UNIMPLEMENTED. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to Code.CANCELLED."##), + "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_locations-operations-cancel", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource to be cancelled."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-delete", + Some(r##"Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns google.rpc.Code.UNIMPLEMENTED."##), + "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_locations-operations-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource to be deleted."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_locations-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-list", + Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns UNIMPLEMENTED.NOTE: the name binding allows API services to override the binding to use different resource name schemes, such as users/*/operations. To override the binding, API services can add a binding such as "/v1/{name=users/*}/operations" to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), + "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_locations-operations-list", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation's parent resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -7107,7 +7740,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7213,7 +7846,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7241,7 +7874,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7369,7 +8002,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7419,7 +8052,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7447,7 +8080,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7645,7 +8278,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7729,6 +8362,84 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("regions-clusters-node-groups-create", + Some(r##"Creates a node group in a cluster. The returned Operation.metadata is NodeGroupOperationMetadata (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#nodegroupoperationmetadata)."##), + "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_regions-clusters-node-groups-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource where this node group will be created. Format: projects/{project}/regions/{region}/clusters/{cluster}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("regions-clusters-node-groups-get", + Some(r##"Gets the resource representation for a node group in a cluster."##), + "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_regions-clusters-node-groups-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the node group to retrieve. Format: projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("regions-clusters-node-groups-resize", + Some(r##"Resizes a node group in a cluster. The returned Operation.metadata is NodeGroupOperationMetadata (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#nodegroupoperationmetadata)."##), + "Details at http://byron.github.io/google-apis-rs/google_dataproc1_cli/projects_regions-clusters-node-groups-resize", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the node group to resize. Format: projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -7821,7 +8532,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -7929,7 +8640,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8065,7 +8776,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8161,7 +8872,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8257,7 +8968,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8351,7 +9062,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8401,7 +9112,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8429,7 +9140,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8529,7 +9240,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8635,7 +9346,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8663,7 +9374,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8719,7 +9430,7 @@ async fn main() { let mut app = App::new("dataproc1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230103") .about("Manages Hadoop-based clusters and jobs on Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dataproc1_cli") .arg(Arg::with_name("url") diff --git a/gen/dataproc1/Cargo.toml b/gen/dataproc1/Cargo.toml index dc21e3a04e..3b4495e05c 100644 --- a/gen/dataproc1/Cargo.toml +++ b/gen/dataproc1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dataproc1" -version = "5.0.2-beta-1+20230103" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dataproc (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dataproc1" homepage = "https://cloud.google.com/dataproc/" -documentation = "https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103" +documentation = "https://docs.rs/google-dataproc1/5.0.2+20230103" license = "MIT" keywords = ["dataproc", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dataproc1/README.md b/gen/dataproc1/README.md index d12f2070ff..c16426e024 100644 --- a/gen/dataproc1/README.md +++ b/gen/dataproc1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-dataproc1` library allows access to all features of the *Google Dataproc* service. -This documentation was generated from *Dataproc* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *dataproc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dataproc* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *dataproc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dataproc* *v1* API can be found at the [official documentation site](https://cloud.google.com/dataproc/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/Dataproc) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/Dataproc) ... * projects - * [*locations autoscaling policies create*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyCreateCall), [*locations autoscaling policies delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyDeleteCall), [*locations autoscaling policies get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyGetCall), [*locations autoscaling policies get iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyGetIamPolicyCall), [*locations autoscaling policies list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyListCall), [*locations autoscaling policies set iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicySetIamPolicyCall), [*locations autoscaling policies test iam permissions*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyTestIamPermissionCall), [*locations autoscaling policies update*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyUpdateCall), [*locations batches create*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationBatchCreateCall), [*locations batches delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationBatchDeleteCall), [*locations batches get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationBatchGetCall), [*locations batches list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationBatchListCall), [*locations operations cancel*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationOperationListCall), [*locations workflow templates create*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateCreateCall), [*locations workflow templates delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateDeleteCall), [*locations workflow templates get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateGetCall), [*locations workflow templates get iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateGetIamPolicyCall), [*locations workflow templates instantiate*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateInstantiateCall), [*locations workflow templates instantiate inline*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateInstantiateInlineCall), [*locations workflow templates list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateListCall), [*locations workflow templates set iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateSetIamPolicyCall), [*locations workflow templates test iam permissions*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateTestIamPermissionCall), [*locations workflow templates update*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateUpdateCall), [*regions autoscaling policies create*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyCreateCall), [*regions autoscaling policies delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyDeleteCall), [*regions autoscaling policies get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyGetCall), [*regions autoscaling policies get iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyGetIamPolicyCall), [*regions autoscaling policies list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyListCall), [*regions autoscaling policies set iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicySetIamPolicyCall), [*regions autoscaling policies test iam permissions*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyTestIamPermissionCall), [*regions autoscaling policies update*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyUpdateCall), [*regions clusters create*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterCreateCall), [*regions clusters delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterDeleteCall), [*regions clusters diagnose*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterDiagnoseCall), [*regions clusters get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterGetCall), [*regions clusters get iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterGetIamPolicyCall), [*regions clusters inject credentials*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterInjectCredentialCall), [*regions clusters list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterListCall), [*regions clusters node groups create*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterNodeGroupCreateCall), [*regions clusters node groups get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterNodeGroupGetCall), [*regions clusters node groups resize*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterNodeGroupResizeCall), [*regions clusters patch*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterPatchCall), [*regions clusters repair*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterRepairCall), [*regions clusters set iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterSetIamPolicyCall), [*regions clusters start*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterStartCall), [*regions clusters stop*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterStopCall), [*regions clusters test iam permissions*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionClusterTestIamPermissionCall), [*regions jobs cancel*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobCancelCall), [*regions jobs delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobDeleteCall), [*regions jobs get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobGetCall), [*regions jobs get iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobGetIamPolicyCall), [*regions jobs list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobListCall), [*regions jobs patch*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobPatchCall), [*regions jobs set iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobSetIamPolicyCall), [*regions jobs submit*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobSubmitCall), [*regions jobs submit as operation*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobSubmitAsOperationCall), [*regions jobs test iam permissions*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionJobTestIamPermissionCall), [*regions operations cancel*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionOperationCancelCall), [*regions operations delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionOperationDeleteCall), [*regions operations get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionOperationGetCall), [*regions operations get iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionOperationGetIamPolicyCall), [*regions operations list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionOperationListCall), [*regions operations set iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionOperationSetIamPolicyCall), [*regions operations test iam permissions*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionOperationTestIamPermissionCall), [*regions workflow templates create*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateCreateCall), [*regions workflow templates delete*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateDeleteCall), [*regions workflow templates get*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateGetCall), [*regions workflow templates get iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateGetIamPolicyCall), [*regions workflow templates instantiate*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateInstantiateCall), [*regions workflow templates instantiate inline*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateInstantiateInlineCall), [*regions workflow templates list*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateListCall), [*regions workflow templates set iam policy*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateSetIamPolicyCall), [*regions workflow templates test iam permissions*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateTestIamPermissionCall) and [*regions workflow templates update*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateUpdateCall) + * [*locations autoscaling policies create*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyCreateCall), [*locations autoscaling policies delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyDeleteCall), [*locations autoscaling policies get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyGetCall), [*locations autoscaling policies get iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyGetIamPolicyCall), [*locations autoscaling policies list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyListCall), [*locations autoscaling policies set iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicySetIamPolicyCall), [*locations autoscaling policies test iam permissions*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyTestIamPermissionCall), [*locations autoscaling policies update*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationAutoscalingPolicyUpdateCall), [*locations batches create*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationBatchCreateCall), [*locations batches delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationBatchDeleteCall), [*locations batches get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationBatchGetCall), [*locations batches list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationBatchListCall), [*locations operations cancel*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationOperationListCall), [*locations workflow templates create*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateCreateCall), [*locations workflow templates delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateDeleteCall), [*locations workflow templates get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateGetCall), [*locations workflow templates get iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateGetIamPolicyCall), [*locations workflow templates instantiate*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateInstantiateCall), [*locations workflow templates instantiate inline*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateInstantiateInlineCall), [*locations workflow templates list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateListCall), [*locations workflow templates set iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateSetIamPolicyCall), [*locations workflow templates test iam permissions*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateTestIamPermissionCall), [*locations workflow templates update*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectLocationWorkflowTemplateUpdateCall), [*regions autoscaling policies create*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyCreateCall), [*regions autoscaling policies delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyDeleteCall), [*regions autoscaling policies get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyGetCall), [*regions autoscaling policies get iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyGetIamPolicyCall), [*regions autoscaling policies list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyListCall), [*regions autoscaling policies set iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicySetIamPolicyCall), [*regions autoscaling policies test iam permissions*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyTestIamPermissionCall), [*regions autoscaling policies update*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionAutoscalingPolicyUpdateCall), [*regions clusters create*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterCreateCall), [*regions clusters delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterDeleteCall), [*regions clusters diagnose*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterDiagnoseCall), [*regions clusters get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterGetCall), [*regions clusters get iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterGetIamPolicyCall), [*regions clusters inject credentials*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterInjectCredentialCall), [*regions clusters list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterListCall), [*regions clusters node groups create*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterNodeGroupCreateCall), [*regions clusters node groups get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterNodeGroupGetCall), [*regions clusters node groups resize*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterNodeGroupResizeCall), [*regions clusters patch*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterPatchCall), [*regions clusters repair*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterRepairCall), [*regions clusters set iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterSetIamPolicyCall), [*regions clusters start*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterStartCall), [*regions clusters stop*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterStopCall), [*regions clusters test iam permissions*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionClusterTestIamPermissionCall), [*regions jobs cancel*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobCancelCall), [*regions jobs delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobDeleteCall), [*regions jobs get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobGetCall), [*regions jobs get iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobGetIamPolicyCall), [*regions jobs list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobListCall), [*regions jobs patch*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobPatchCall), [*regions jobs set iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobSetIamPolicyCall), [*regions jobs submit*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobSubmitCall), [*regions jobs submit as operation*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobSubmitAsOperationCall), [*regions jobs test iam permissions*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionJobTestIamPermissionCall), [*regions operations cancel*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionOperationCancelCall), [*regions operations delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionOperationDeleteCall), [*regions operations get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionOperationGetCall), [*regions operations get iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionOperationGetIamPolicyCall), [*regions operations list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionOperationListCall), [*regions operations set iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionOperationSetIamPolicyCall), [*regions operations test iam permissions*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionOperationTestIamPermissionCall), [*regions workflow templates create*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateCreateCall), [*regions workflow templates delete*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateDeleteCall), [*regions workflow templates get*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateGetCall), [*regions workflow templates get iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateGetIamPolicyCall), [*regions workflow templates instantiate*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateInstantiateCall), [*regions workflow templates instantiate inline*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateInstantiateInlineCall), [*regions workflow templates list*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateListCall), [*regions workflow templates set iam policy*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateSetIamPolicyCall), [*regions workflow templates test iam permissions*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateTestIamPermissionCall) and [*regions workflow templates update*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/api::ProjectRegionWorkflowTemplateUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/Dataproc)** +* **[Hub](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/Dataproc)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::CallBuilder) -* **[Resources](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::CallBuilder) +* **[Resources](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Part)** + * **[Parts](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -144,17 +144,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -164,29 +164,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Delegate) to the -[Method Builder](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Delegate) to the +[Method Builder](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::RequestValue) and -[decodable](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::RequestValue) and +[decodable](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dataproc1/5.0.2-beta-1+20230103/google_dataproc1/client::RequestValue) are moved +* [request values](https://docs.rs/google-dataproc1/5.0.2+20230103/google_dataproc1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dataproc1/src/api.rs b/gen/dataproc1/src/api.rs index 2b01fe4e04..a50cb6946e 100644 --- a/gen/dataproc1/src/api.rs +++ b/gen/dataproc1/src/api.rs @@ -128,7 +128,7 @@ impl<'a, S> Dataproc { Dataproc { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dataproc.googleapis.com/".to_string(), _root_url: "https://dataproc.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> Dataproc { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dataproc1/src/client.rs b/gen/dataproc1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dataproc1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dataproc1/src/lib.rs b/gen/dataproc1/src/lib.rs index 08ef0c9e8a..c22334d5f9 100644 --- a/gen/dataproc1/src/lib.rs +++ b/gen/dataproc1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dataproc* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *dataproc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dataproc* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *dataproc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dataproc* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/dataproc/). diff --git a/gen/datastore1-cli/Cargo.toml b/gen/datastore1-cli/Cargo.toml index cf361d9bad..a15e5f4424 100644 --- a/gen/datastore1-cli/Cargo.toml +++ b/gen/datastore1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datastore1-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with datastore (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datastore1-cli" @@ -20,13 +20,13 @@ name = "datastore1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datastore1] path = "../datastore1" -version = "4.0.1+20220221" +version = "5.0.2+20230118" + diff --git a/gen/datastore1-cli/README.md b/gen/datastore1-cli/README.md index 227fb4d558..2b7aa78fb4 100644 --- a/gen/datastore1-cli/README.md +++ b/gen/datastore1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *datastore* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *datastore* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash datastore1 [options] @@ -46,6 +46,7 @@ datastore1 [options] operations-list [-p ]... [-o ] reserve-ids (-r )... [-p ]... [-o ] rollback (-r )... [-p ]... [-o ] + run-aggregation-query (-r )... [-p ]... [-o ] run-query (-r )... [-p ]... [-o ] datastore1 --help diff --git a/gen/datastore1-cli/mkdocs.yml b/gen/datastore1-cli/mkdocs.yml index 3fd562965b..f5670dbb00 100644 --- a/gen/datastore1-cli/mkdocs.yml +++ b/gen/datastore1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: datastore v4.0.1+20220221 +site_name: datastore v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-datastore1-cli site_description: A complete library to interact with datastore (protocol v1) @@ -7,25 +7,27 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datastore1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_allocate-ids.md', 'Projects', 'Allocate Ids'] -- ['projects_begin-transaction.md', 'Projects', 'Begin Transaction'] -- ['projects_commit.md', 'Projects', 'Commit'] -- ['projects_export.md', 'Projects', 'Export'] -- ['projects_import.md', 'Projects', 'Import'] -- ['projects_indexes-create.md', 'Projects', 'Indexes Create'] -- ['projects_indexes-delete.md', 'Projects', 'Indexes Delete'] -- ['projects_indexes-get.md', 'Projects', 'Indexes Get'] -- ['projects_indexes-list.md', 'Projects', 'Indexes List'] -- ['projects_lookup.md', 'Projects', 'Lookup'] -- ['projects_operations-cancel.md', 'Projects', 'Operations Cancel'] -- ['projects_operations-delete.md', 'Projects', 'Operations Delete'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] -- ['projects_reserve-ids.md', 'Projects', 'Reserve Ids'] -- ['projects_rollback.md', 'Projects', 'Rollback'] -- ['projects_run-query.md', 'Projects', 'Run Query'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Allocate Ids': 'projects_allocate-ids.md' + - 'Begin Transaction': 'projects_begin-transaction.md' + - 'Commit': 'projects_commit.md' + - 'Export': 'projects_export.md' + - 'Import': 'projects_import.md' + - 'Indexes Create': 'projects_indexes-create.md' + - 'Indexes Delete': 'projects_indexes-delete.md' + - 'Indexes Get': 'projects_indexes-get.md' + - 'Indexes List': 'projects_indexes-list.md' + - 'Lookup': 'projects_lookup.md' + - 'Operations Cancel': 'projects_operations-cancel.md' + - 'Operations Delete': 'projects_operations-delete.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' + - 'Reserve Ids': 'projects_reserve-ids.md' + - 'Rollback': 'projects_rollback.md' + - 'Run Aggregation Query': 'projects_run-aggregation-query.md' + - 'Run Query': 'projects_run-query.md' theme: readthedocs diff --git a/gen/datastore1-cli/src/client.rs b/gen/datastore1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datastore1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datastore1-cli/src/main.rs b/gen/datastore1-cli/src/main.rs index 9d8a26da29..561d937ee0 100644 --- a/gen/datastore1-cli/src/main.rs +++ b/gen/datastore1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datastore1::{api, Error, oauth2}; +use google_datastore1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -74,8 +73,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["database-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -158,9 +158,11 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transaction-options.read-only.read-time" => Some(("transactionOptions.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transaction-options.read-write.previous-transaction" => Some(("transactionOptions.readWrite.previousTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["previous-transaction", "read-write", "transaction-options"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["database-id", "previous-transaction", "read-only", "read-time", "read-write", "transaction-options"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -243,10 +245,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mode" => Some(("mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "single-use-transaction.read-only.read-time" => Some(("singleUseTransaction.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "single-use-transaction.read-write.previous-transaction" => Some(("singleUseTransaction.readWrite.previousTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transaction" => Some(("transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["mode", "transaction"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["database-id", "mode", "previous-transaction", "read-only", "read-time", "read-write", "single-use-transaction", "transaction"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -685,7 +690,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -760,10 +765,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.new-transaction.read-only.read-time" => Some(("readOptions.newTransaction.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.new-transaction.read-write.previous-transaction" => Some(("readOptions.newTransaction.readWrite.previousTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-time" => Some(("readOptions.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["read-consistency", "read-options", "transaction"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["database-id", "new-transaction", "previous-transaction", "read-consistency", "read-only", "read-options", "read-time", "read-write", "transaction"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -989,7 +998,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1149,9 +1158,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transaction" => Some(("transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["transaction"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["database-id", "transaction"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1211,6 +1221,125 @@ where } } + async fn _projects_run_aggregation_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "aggregation-query.nested-query.end-cursor" => Some(("aggregationQuery.nestedQuery.endCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.composite-filter.op" => Some(("aggregationQuery.nestedQuery.filter.compositeFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.op" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.property.name" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.property.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.blob-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.blobValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.boolean-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.booleanValue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.double-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.doubleValue", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.entity-value.key.partition-id.database-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.entityValue.key.partitionId.databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.entity-value.key.partition-id.namespace-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.entityValue.key.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.entity-value.key.partition-id.project-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.entityValue.key.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.exclude-from-indexes" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.excludeFromIndexes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.geo-point-value.latitude" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.geoPointValue.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.geo-point-value.longitude" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.geoPointValue.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.integer-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.integerValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.key-value.partition-id.database-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.keyValue.partitionId.databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.key-value.partition-id.namespace-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.keyValue.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.key-value.partition-id.project-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.keyValue.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.meaning" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.meaning", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.null-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.nullValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.string-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.stringValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.timestamp-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.timestampValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.limit" => Some(("aggregationQuery.nestedQuery.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.offset" => Some(("aggregationQuery.nestedQuery.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.start-cursor" => Some(("aggregationQuery.nestedQuery.startCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gql-query.allow-literals" => Some(("gqlQuery.allowLiterals", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gql-query.query-string" => Some(("gqlQuery.queryString", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partition-id.database-id" => Some(("partitionId.databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partition-id.namespace-id" => Some(("partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partition-id.project-id" => Some(("partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.new-transaction.read-only.read-time" => Some(("readOptions.newTransaction.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.new-transaction.read-write.previous-transaction" => Some(("readOptions.newTransaction.readWrite.previousTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-time" => Some(("readOptions.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["aggregation-query", "allow-literals", "blob-value", "boolean-value", "composite-filter", "database-id", "double-value", "end-cursor", "entity-value", "exclude-from-indexes", "filter", "geo-point-value", "gql-query", "integer-value", "key", "key-value", "latitude", "limit", "longitude", "meaning", "name", "namespace-id", "nested-query", "new-transaction", "null-value", "offset", "op", "partition-id", "previous-transaction", "project-id", "property", "property-filter", "query-string", "read-consistency", "read-only", "read-options", "read-time", "read-write", "start-cursor", "string-value", "timestamp-value", "transaction", "value"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RunAggregationQueryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().run_aggregation_query(request, opt.value_of("project-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_run_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1234,8 +1363,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "database-id" => Some(("databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gql-query.allow-literals" => Some(("gqlQuery.allowLiterals", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "gql-query.query-string" => Some(("gqlQuery.queryString", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partition-id.database-id" => Some(("partitionId.databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "partition-id.namespace-id" => Some(("partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "partition-id.project-id" => Some(("partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.end-cursor" => Some(("query.endCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1245,12 +1376,14 @@ where "query.filter.property-filter.value.blob-value" => Some(("query.filter.propertyFilter.value.blobValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.filter.property-filter.value.boolean-value" => Some(("query.filter.propertyFilter.value.booleanValue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.filter.property-filter.value.double-value" => Some(("query.filter.propertyFilter.value.doubleValue", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "query.filter.property-filter.value.entity-value.key.partition-id.database-id" => Some(("query.filter.propertyFilter.value.entityValue.key.partitionId.databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.filter.property-filter.value.entity-value.key.partition-id.namespace-id" => Some(("query.filter.propertyFilter.value.entityValue.key.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.filter.property-filter.value.entity-value.key.partition-id.project-id" => Some(("query.filter.propertyFilter.value.entityValue.key.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.filter.property-filter.value.exclude-from-indexes" => Some(("query.filter.propertyFilter.value.excludeFromIndexes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.filter.property-filter.value.geo-point-value.latitude" => Some(("query.filter.propertyFilter.value.geoPointValue.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "query.filter.property-filter.value.geo-point-value.longitude" => Some(("query.filter.propertyFilter.value.geoPointValue.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "query.filter.property-filter.value.integer-value" => Some(("query.filter.propertyFilter.value.integerValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "query.filter.property-filter.value.key-value.partition-id.database-id" => Some(("query.filter.propertyFilter.value.keyValue.partitionId.databaseId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.filter.property-filter.value.key-value.partition-id.namespace-id" => Some(("query.filter.propertyFilter.value.keyValue.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.filter.property-filter.value.key-value.partition-id.project-id" => Some(("query.filter.propertyFilter.value.keyValue.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.filter.property-filter.value.meaning" => Some(("query.filter.propertyFilter.value.meaning", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -1260,10 +1393,13 @@ where "query.limit" => Some(("query.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "query.offset" => Some(("query.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "query.start-cursor" => Some(("query.startCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.new-transaction.read-only.read-time" => Some(("readOptions.newTransaction.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.new-transaction.read-write.previous-transaction" => Some(("readOptions.newTransaction.readWrite.previousTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-time" => Some(("readOptions.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-literals", "blob-value", "boolean-value", "composite-filter", "double-value", "end-cursor", "entity-value", "exclude-from-indexes", "filter", "geo-point-value", "gql-query", "integer-value", "key", "key-value", "latitude", "limit", "longitude", "meaning", "name", "namespace-id", "null-value", "offset", "op", "partition-id", "project-id", "property", "property-filter", "query", "query-string", "read-consistency", "read-options", "start-cursor", "string-value", "timestamp-value", "transaction", "value"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-literals", "blob-value", "boolean-value", "composite-filter", "database-id", "double-value", "end-cursor", "entity-value", "exclude-from-indexes", "filter", "geo-point-value", "gql-query", "integer-value", "key", "key-value", "latitude", "limit", "longitude", "meaning", "name", "namespace-id", "new-transaction", "null-value", "offset", "op", "partition-id", "previous-transaction", "project-id", "property", "property-filter", "query", "query-string", "read-consistency", "read-only", "read-options", "read-time", "read-write", "start-cursor", "string-value", "timestamp-value", "transaction", "value"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1378,6 +1514,9 @@ where ("rollback", Some(opt)) => { call_result = self._projects_rollback(opt, dry_run, &mut err).await; }, + ("run-aggregation-query", Some(opt)) => { + call_result = self._projects_run_aggregation_query(opt, dry_run, &mut err).await; + }, ("run-query", Some(opt)) => { call_result = self._projects_run_query(opt, dry_run, &mut err).await; }, @@ -1460,7 +1599,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'allocate-ids', 'begin-transaction', 'commit', 'export', 'import', 'indexes-create', 'indexes-delete', 'indexes-get', 'indexes-list', 'lookup', 'operations-cancel', 'operations-delete', 'operations-get', 'operations-list', 'reserve-ids', 'rollback' and 'run-query'", vec![ + ("projects", "methods: 'allocate-ids', 'begin-transaction', 'commit', 'export', 'import', 'indexes-create', 'indexes-delete', 'indexes-get', 'indexes-list', 'lookup', 'operations-cancel', 'operations-delete', 'operations-get', 'operations-list', 'reserve-ids', 'rollback', 'run-aggregation-query' and 'run-query'", vec![ ("allocate-ids", Some(r##"Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted."##), "Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_allocate-ids", @@ -1873,6 +2012,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("run-aggregation-query", + Some(r##"Runs an aggregation query."##), + "Details at http://byron.github.io/google-apis-rs/google_datastore1_cli/projects_run-aggregation-query", + vec![ + (Some(r##"project-id"##), + None, + Some(r##"Required. The ID of the project against which to make the request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1913,7 +2080,7 @@ async fn main() { let mut app = App::new("datastore1") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230118") .about("Accesses the schemaless NoSQL database to provide fully managed, robust, scalable storage for your application. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datastore1_cli") .arg(Arg::with_name("url") diff --git a/gen/datastore1/Cargo.toml b/gen/datastore1/Cargo.toml index 73ac76e0c1..f1020124f4 100644 --- a/gen/datastore1/Cargo.toml +++ b/gen/datastore1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datastore1" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with datastore (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datastore1" homepage = "https://cloud.google.com/datastore/" -documentation = "https://docs.rs/google-datastore1/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-datastore1/5.0.2+20230118" license = "MIT" keywords = ["datastore", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datastore1/README.md b/gen/datastore1/README.md index dfc95385f9..d00ba64655 100644 --- a/gen/datastore1/README.md +++ b/gen/datastore1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datastore1` library allows access to all features of the *Google datastore* service. -This documentation was generated from *datastore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *datastore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *datastore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *datastore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *datastore* *v1* API can be found at the [official documentation site](https://cloud.google.com/datastore/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/Datastore) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/Datastore) ... * projects - * [*allocate ids*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectAllocateIdCall), [*begin transaction*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectBeginTransactionCall), [*commit*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectCommitCall), [*export*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectExportCall), [*import*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectImportCall), [*indexes create*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectIndexCreateCall), [*indexes delete*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectIndexDeleteCall), [*indexes get*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectIndexGetCall), [*indexes list*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectIndexListCall), [*lookup*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectLookupCall), [*operations cancel*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectOperationCancelCall), [*operations delete*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectOperationDeleteCall), [*operations get*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectOperationListCall), [*reserve ids*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectReserveIdCall), [*rollback*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectRollbackCall), [*run aggregation query*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectRunAggregationQueryCall) and [*run query*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/api::ProjectRunQueryCall) + * [*allocate ids*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectAllocateIdCall), [*begin transaction*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectBeginTransactionCall), [*commit*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectCommitCall), [*export*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectExportCall), [*import*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectImportCall), [*indexes create*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectIndexCreateCall), [*indexes delete*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectIndexDeleteCall), [*indexes get*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectIndexGetCall), [*indexes list*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectIndexListCall), [*lookup*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectLookupCall), [*operations cancel*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectOperationCancelCall), [*operations delete*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectOperationDeleteCall), [*operations get*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectOperationListCall), [*reserve ids*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectReserveIdCall), [*rollback*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectRollbackCall), [*run aggregation query*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectRunAggregationQueryCall) and [*run query*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/api::ProjectRunQueryCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/Datastore)** +* **[Hub](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/Datastore)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Part)** + * **[Parts](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::RequestValue) and -[decodable](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::RequestValue) and +[decodable](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datastore1/5.0.2-beta-1+20230118/google_datastore1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datastore1/5.0.2+20230118/google_datastore1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datastore1/src/api.rs b/gen/datastore1/src/api.rs index 38c2e2a9f2..c7efaa470d 100644 --- a/gen/datastore1/src/api.rs +++ b/gen/datastore1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Datastore { Datastore { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datastore.googleapis.com/".to_string(), _root_url: "https://datastore.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Datastore { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datastore1/src/client.rs b/gen/datastore1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datastore1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datastore1/src/lib.rs b/gen/datastore1/src/lib.rs index fadb66f21c..994f5416a5 100644 --- a/gen/datastore1/src/lib.rs +++ b/gen/datastore1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *datastore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *datastore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *datastore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *datastore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *datastore* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/datastore/). diff --git a/gen/datastore1_beta3-cli/Cargo.toml b/gen/datastore1_beta3-cli/Cargo.toml index a42155efb5..12cddb0c47 100644 --- a/gen/datastore1_beta3-cli/Cargo.toml +++ b/gen/datastore1_beta3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datastore1_beta3-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with datastore (protocol v1beta3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datastore1_beta3-cli" @@ -20,13 +20,13 @@ name = "datastore1-beta3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datastore1_beta3] path = "../datastore1_beta3" -version = "4.0.1+20220221" +version = "5.0.2+20230118" + diff --git a/gen/datastore1_beta3-cli/README.md b/gen/datastore1_beta3-cli/README.md index 8ea9c87351..0131b79cf9 100644 --- a/gen/datastore1_beta3-cli/README.md +++ b/gen/datastore1_beta3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *datastore* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *datastore* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash datastore1-beta3 [options] @@ -36,6 +36,7 @@ datastore1-beta3 [options] lookup (-r )... [-p ]... [-o ] reserve-ids (-r )... [-p ]... [-o ] rollback (-r )... [-p ]... [-o ] + run-aggregation-query (-r )... [-p ]... [-o ] run-query (-r )... [-p ]... [-o ] datastore1-beta3 --help diff --git a/gen/datastore1_beta3-cli/mkdocs.yml b/gen/datastore1_beta3-cli/mkdocs.yml index d72277d995..f77bc679d6 100644 --- a/gen/datastore1_beta3-cli/mkdocs.yml +++ b/gen/datastore1_beta3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: datastore v4.0.1+20220221 +site_name: datastore v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-datastore1_beta3-cli site_description: A complete library to interact with datastore (protocol v1beta3) @@ -7,15 +7,17 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datastore1_beta3 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_allocate-ids.md', 'Projects', 'Allocate Ids'] -- ['projects_begin-transaction.md', 'Projects', 'Begin Transaction'] -- ['projects_commit.md', 'Projects', 'Commit'] -- ['projects_lookup.md', 'Projects', 'Lookup'] -- ['projects_reserve-ids.md', 'Projects', 'Reserve Ids'] -- ['projects_rollback.md', 'Projects', 'Rollback'] -- ['projects_run-query.md', 'Projects', 'Run Query'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Allocate Ids': 'projects_allocate-ids.md' + - 'Begin Transaction': 'projects_begin-transaction.md' + - 'Commit': 'projects_commit.md' + - 'Lookup': 'projects_lookup.md' + - 'Reserve Ids': 'projects_reserve-ids.md' + - 'Rollback': 'projects_rollback.md' + - 'Run Aggregation Query': 'projects_run-aggregation-query.md' + - 'Run Query': 'projects_run-query.md' theme: readthedocs diff --git a/gen/datastore1_beta3-cli/src/client.rs b/gen/datastore1_beta3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datastore1_beta3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datastore1_beta3-cli/src/main.rs b/gen/datastore1_beta3-cli/src/main.rs index 050461639c..9970d2d62a 100644 --- a/gen/datastore1_beta3-cli/src/main.rs +++ b/gen/datastore1_beta3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datastore1_beta3::{api, Error, oauth2}; +use google_datastore1_beta3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -158,9 +157,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "transaction-options.read-only.read-time" => Some(("transactionOptions.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transaction-options.read-write.previous-transaction" => Some(("transactionOptions.readWrite.previousTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["previous-transaction", "read-write", "transaction-options"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["previous-transaction", "read-only", "read-time", "read-write", "transaction-options"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -330,9 +330,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-time" => Some(("readOptions.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["read-consistency", "read-options", "transaction"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["read-consistency", "read-options", "read-time", "transaction"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -562,6 +563,119 @@ where } } + async fn _projects_run_aggregation_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "aggregation-query.nested-query.end-cursor" => Some(("aggregationQuery.nestedQuery.endCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.composite-filter.op" => Some(("aggregationQuery.nestedQuery.filter.compositeFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.op" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.property.name" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.property.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.blob-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.blobValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.boolean-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.booleanValue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.double-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.doubleValue", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.entity-value.key.partition-id.namespace-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.entityValue.key.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.entity-value.key.partition-id.project-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.entityValue.key.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.exclude-from-indexes" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.excludeFromIndexes", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.geo-point-value.latitude" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.geoPointValue.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.geo-point-value.longitude" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.geoPointValue.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.integer-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.integerValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.key-value.partition-id.namespace-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.keyValue.partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.key-value.partition-id.project-id" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.keyValue.partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.meaning" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.meaning", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.null-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.nullValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.string-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.stringValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.filter.property-filter.value.timestamp-value" => Some(("aggregationQuery.nestedQuery.filter.propertyFilter.value.timestampValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.limit" => Some(("aggregationQuery.nestedQuery.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.offset" => Some(("aggregationQuery.nestedQuery.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aggregation-query.nested-query.start-cursor" => Some(("aggregationQuery.nestedQuery.startCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gql-query.allow-literals" => Some(("gqlQuery.allowLiterals", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "gql-query.query-string" => Some(("gqlQuery.queryString", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partition-id.namespace-id" => Some(("partitionId.namespaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "partition-id.project-id" => Some(("partitionId.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-time" => Some(("readOptions.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["aggregation-query", "allow-literals", "blob-value", "boolean-value", "composite-filter", "double-value", "end-cursor", "entity-value", "exclude-from-indexes", "filter", "geo-point-value", "gql-query", "integer-value", "key", "key-value", "latitude", "limit", "longitude", "meaning", "name", "namespace-id", "nested-query", "null-value", "offset", "op", "partition-id", "project-id", "property", "property-filter", "query-string", "read-consistency", "read-options", "read-time", "start-cursor", "string-value", "timestamp-value", "transaction", "value"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RunAggregationQueryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().run_aggregation_query(request, opt.value_of("project-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_run_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -612,9 +726,10 @@ where "query.offset" => Some(("query.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "query.start-cursor" => Some(("query.startCursor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "read-options.read-consistency" => Some(("readOptions.readConsistency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-options.read-time" => Some(("readOptions.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "read-options.transaction" => Some(("readOptions.transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-literals", "blob-value", "boolean-value", "composite-filter", "double-value", "end-cursor", "entity-value", "exclude-from-indexes", "filter", "geo-point-value", "gql-query", "integer-value", "key", "key-value", "latitude", "limit", "longitude", "meaning", "name", "namespace-id", "null-value", "offset", "op", "partition-id", "project-id", "property", "property-filter", "query", "query-string", "read-consistency", "read-options", "start-cursor", "string-value", "timestamp-value", "transaction", "value"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-literals", "blob-value", "boolean-value", "composite-filter", "double-value", "end-cursor", "entity-value", "exclude-from-indexes", "filter", "geo-point-value", "gql-query", "integer-value", "key", "key-value", "latitude", "limit", "longitude", "meaning", "name", "namespace-id", "null-value", "offset", "op", "partition-id", "project-id", "property", "property-filter", "query", "query-string", "read-consistency", "read-options", "read-time", "start-cursor", "string-value", "timestamp-value", "transaction", "value"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -699,6 +814,9 @@ where ("rollback", Some(opt)) => { call_result = self._projects_rollback(opt, dry_run, &mut err).await; }, + ("run-aggregation-query", Some(opt)) => { + call_result = self._projects_run_aggregation_query(opt, dry_run, &mut err).await; + }, ("run-query", Some(opt)) => { call_result = self._projects_run_query(opt, dry_run, &mut err).await; }, @@ -781,7 +899,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'allocate-ids', 'begin-transaction', 'commit', 'lookup', 'reserve-ids', 'rollback' and 'run-query'", vec![ + ("projects", "methods: 'allocate-ids', 'begin-transaction', 'commit', 'lookup', 'reserve-ids', 'rollback', 'run-aggregation-query' and 'run-query'", vec![ ("allocate-ids", Some(r##"Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted."##), "Details at http://byron.github.io/google-apis-rs/google_datastore1_beta3_cli/projects_allocate-ids", @@ -944,6 +1062,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("run-aggregation-query", + Some(r##"Runs an aggregation query."##), + "Details at http://byron.github.io/google-apis-rs/google_datastore1_beta3_cli/projects_run-aggregation-query", + vec![ + (Some(r##"project-id"##), + None, + Some(r##"Required. The ID of the project against which to make the request."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -984,7 +1130,7 @@ async fn main() { let mut app = App::new("datastore1-beta3") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230118") .about("Accesses the schemaless NoSQL database to provide fully managed, robust, scalable storage for your application. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datastore1_beta3_cli") .arg(Arg::with_name("url") diff --git a/gen/datastore1_beta3/Cargo.toml b/gen/datastore1_beta3/Cargo.toml index 1d3c18996d..797b1c413b 100644 --- a/gen/datastore1_beta3/Cargo.toml +++ b/gen/datastore1_beta3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datastore1_beta3" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with datastore (protocol v1beta3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datastore1_beta3" homepage = "https://cloud.google.com/datastore/" -documentation = "https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-datastore1_beta3/5.0.2+20230118" license = "MIT" keywords = ["datastore", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datastore1_beta3/README.md b/gen/datastore1_beta3/README.md index 4dcb216d2b..17176e1278 100644 --- a/gen/datastore1_beta3/README.md +++ b/gen/datastore1_beta3/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datastore1_beta3` library allows access to all features of the *Google datastore* service. -This documentation was generated from *datastore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *datastore:v1beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *datastore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *datastore:v1beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *datastore* *v1_beta3* API can be found at the [official documentation site](https://cloud.google.com/datastore/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/Datastore) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/Datastore) ... * projects - * [*allocate ids*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectAllocateIdCall), [*begin transaction*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectBeginTransactionCall), [*commit*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectCommitCall), [*lookup*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectLookupCall), [*reserve ids*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectReserveIdCall), [*rollback*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectRollbackCall), [*run aggregation query*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectRunAggregationQueryCall) and [*run query*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/api::ProjectRunQueryCall) + * [*allocate ids*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectAllocateIdCall), [*begin transaction*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectBeginTransactionCall), [*commit*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectCommitCall), [*lookup*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectLookupCall), [*reserve ids*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectReserveIdCall), [*rollback*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectRollbackCall), [*run aggregation query*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectRunAggregationQueryCall) and [*run query*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/api::ProjectRunQueryCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/Datastore)** +* **[Hub](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/Datastore)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::CallBuilder) -* **[Resources](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::CallBuilder) +* **[Resources](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Part)** + * **[Parts](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Delegate) to the -[Method Builder](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Delegate) to the +[Method Builder](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::RequestValue) and -[decodable](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::RequestValue) and +[decodable](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datastore1_beta3/5.0.2-beta-1+20230118/google_datastore1_beta3/client::RequestValue) are moved +* [request values](https://docs.rs/google-datastore1_beta3/5.0.2+20230118/google_datastore1_beta3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datastore1_beta3/src/api.rs b/gen/datastore1_beta3/src/api.rs index 595c9c78dc..37f9a614fe 100644 --- a/gen/datastore1_beta3/src/api.rs +++ b/gen/datastore1_beta3/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Datastore { Datastore { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datastore.googleapis.com/".to_string(), _root_url: "https://datastore.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Datastore { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datastore1_beta3/src/client.rs b/gen/datastore1_beta3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datastore1_beta3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datastore1_beta3/src/lib.rs b/gen/datastore1_beta3/src/lib.rs index b03911d15e..599c4e0188 100644 --- a/gen/datastore1_beta3/src/lib.rs +++ b/gen/datastore1_beta3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *datastore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *datastore:v1beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *datastore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *datastore:v1beta3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *datastore* *v1_beta3* API can be found at the //! [official documentation site](https://cloud.google.com/datastore/). diff --git a/gen/datastream1-cli/Cargo.toml b/gen/datastream1-cli/Cargo.toml index 89bc05caa5..912c6d06ee 100644 --- a/gen/datastream1-cli/Cargo.toml +++ b/gen/datastream1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-datastream1-cli" -version = "4.0.1+20220207" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with Datastream (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datastream1-cli" @@ -20,13 +20,13 @@ name = "datastream1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-datastream1] path = "../datastream1" -version = "4.0.1+20220207" +version = "5.0.2+20230111" + diff --git a/gen/datastream1-cli/README.md b/gen/datastream1-cli/README.md index f354e9679a..7d8fe03b75 100644 --- a/gen/datastream1-cli/README.md +++ b/gen/datastream1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Datastream* API at revision *20220207*. The CLI is at version *4.0.1*. +This documentation was generated from the *Datastream* API at revision *20230111*. The CLI is at version *5.0.2*. ```bash datastream1 [options] diff --git a/gen/datastream1-cli/mkdocs.yml b/gen/datastream1-cli/mkdocs.yml index b12d959a7d..4921a6ce47 100644 --- a/gen/datastream1-cli/mkdocs.yml +++ b/gen/datastream1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Datastream v4.0.1+20220207 +site_name: Datastream v5.0.2+20230111 site_url: http://byron.github.io/google-apis-rs/google-datastream1-cli site_description: A complete library to interact with Datastream (protocol v1) @@ -7,39 +7,40 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/datastream1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-connection-profiles-create.md', 'Projects', 'Locations Connection Profiles Create'] -- ['projects_locations-connection-profiles-delete.md', 'Projects', 'Locations Connection Profiles Delete'] -- ['projects_locations-connection-profiles-discover.md', 'Projects', 'Locations Connection Profiles Discover'] -- ['projects_locations-connection-profiles-get.md', 'Projects', 'Locations Connection Profiles Get'] -- ['projects_locations-connection-profiles-list.md', 'Projects', 'Locations Connection Profiles List'] -- ['projects_locations-connection-profiles-patch.md', 'Projects', 'Locations Connection Profiles Patch'] -- ['projects_locations-fetch-static-ips.md', 'Projects', 'Locations Fetch Static Ips'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-private-connections-create.md', 'Projects', 'Locations Private Connections Create'] -- ['projects_locations-private-connections-delete.md', 'Projects', 'Locations Private Connections Delete'] -- ['projects_locations-private-connections-get.md', 'Projects', 'Locations Private Connections Get'] -- ['projects_locations-private-connections-list.md', 'Projects', 'Locations Private Connections List'] -- ['projects_locations-private-connections-routes-create.md', 'Projects', 'Locations Private Connections Routes Create'] -- ['projects_locations-private-connections-routes-delete.md', 'Projects', 'Locations Private Connections Routes Delete'] -- ['projects_locations-private-connections-routes-get.md', 'Projects', 'Locations Private Connections Routes Get'] -- ['projects_locations-private-connections-routes-list.md', 'Projects', 'Locations Private Connections Routes List'] -- ['projects_locations-streams-create.md', 'Projects', 'Locations Streams Create'] -- ['projects_locations-streams-delete.md', 'Projects', 'Locations Streams Delete'] -- ['projects_locations-streams-get.md', 'Projects', 'Locations Streams Get'] -- ['projects_locations-streams-list.md', 'Projects', 'Locations Streams List'] -- ['projects_locations-streams-objects-get.md', 'Projects', 'Locations Streams Objects Get'] -- ['projects_locations-streams-objects-list.md', 'Projects', 'Locations Streams Objects List'] -- ['projects_locations-streams-objects-lookup.md', 'Projects', 'Locations Streams Objects Lookup'] -- ['projects_locations-streams-objects-start-backfill-job.md', 'Projects', 'Locations Streams Objects Start Backfill Job'] -- ['projects_locations-streams-objects-stop-backfill-job.md', 'Projects', 'Locations Streams Objects Stop Backfill Job'] -- ['projects_locations-streams-patch.md', 'Projects', 'Locations Streams Patch'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Connection Profiles Create': 'projects_locations-connection-profiles-create.md' + - 'Locations Connection Profiles Delete': 'projects_locations-connection-profiles-delete.md' + - 'Locations Connection Profiles Discover': 'projects_locations-connection-profiles-discover.md' + - 'Locations Connection Profiles Get': 'projects_locations-connection-profiles-get.md' + - 'Locations Connection Profiles List': 'projects_locations-connection-profiles-list.md' + - 'Locations Connection Profiles Patch': 'projects_locations-connection-profiles-patch.md' + - 'Locations Fetch Static Ips': 'projects_locations-fetch-static-ips.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Private Connections Create': 'projects_locations-private-connections-create.md' + - 'Locations Private Connections Delete': 'projects_locations-private-connections-delete.md' + - 'Locations Private Connections Get': 'projects_locations-private-connections-get.md' + - 'Locations Private Connections List': 'projects_locations-private-connections-list.md' + - 'Locations Private Connections Routes Create': 'projects_locations-private-connections-routes-create.md' + - 'Locations Private Connections Routes Delete': 'projects_locations-private-connections-routes-delete.md' + - 'Locations Private Connections Routes Get': 'projects_locations-private-connections-routes-get.md' + - 'Locations Private Connections Routes List': 'projects_locations-private-connections-routes-list.md' + - 'Locations Streams Create': 'projects_locations-streams-create.md' + - 'Locations Streams Delete': 'projects_locations-streams-delete.md' + - 'Locations Streams Get': 'projects_locations-streams-get.md' + - 'Locations Streams List': 'projects_locations-streams-list.md' + - 'Locations Streams Objects Get': 'projects_locations-streams-objects-get.md' + - 'Locations Streams Objects List': 'projects_locations-streams-objects-list.md' + - 'Locations Streams Objects Lookup': 'projects_locations-streams-objects-lookup.md' + - 'Locations Streams Objects Start Backfill Job': 'projects_locations-streams-objects-start-backfill-job.md' + - 'Locations Streams Objects Stop Backfill Job': 'projects_locations-streams-objects-stop-backfill-job.md' + - 'Locations Streams Patch': 'projects_locations-streams-patch.md' theme: readthedocs diff --git a/gen/datastream1-cli/src/client.rs b/gen/datastream1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/datastream1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/datastream1-cli/src/main.rs b/gen/datastream1-cli/src/main.rs index bc511b0489..1b38e8c8e5 100644 --- a/gen/datastream1-cli/src/main.rs +++ b/gen/datastream1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_datastream1::{api, Error, oauth2}; +use google_datastream1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -101,10 +100,15 @@ where "oracle-profile.password" => Some(("oracleProfile.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "oracle-profile.port" => Some(("oracleProfile.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "oracle-profile.username" => Some(("oracleProfile.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.database" => Some(("postgresqlProfile.database", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.hostname" => Some(("postgresqlProfile.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.password" => Some(("postgresqlProfile.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.port" => Some(("postgresqlProfile.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "postgresql-profile.username" => Some(("postgresqlProfile.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-connectivity.private-connection" => Some(("privateConnectivity.privateConnection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket", "ca-certificate", "ca-certificate-set", "client-certificate", "client-certificate-set", "client-key", "client-key-set", "connection-attributes", "create-time", "database-service", "display-name", "forward-ssh-connectivity", "gcs-profile", "hostname", "labels", "mysql-profile", "name", "oracle-profile", "password", "port", "private-connection", "private-connectivity", "private-key", "root-path", "ssl-config", "update-time", "username"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket", "ca-certificate", "ca-certificate-set", "client-certificate", "client-certificate-set", "client-key", "client-key-set", "connection-attributes", "create-time", "database", "database-service", "display-name", "forward-ssh-connectivity", "gcs-profile", "hostname", "labels", "mysql-profile", "name", "oracle-profile", "password", "port", "postgresql-profile", "private-connection", "private-connectivity", "private-key", "root-path", "ssl-config", "update-time", "username"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -119,13 +123,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, "connection-profile-id" => { call = call.connection_profile_id(value.unwrap_or("")); @@ -283,13 +287,18 @@ where "connection-profile.oracle-profile.password" => Some(("connectionProfile.oracleProfile.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-profile.oracle-profile.port" => Some(("connectionProfile.oracleProfile.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "connection-profile.oracle-profile.username" => Some(("connectionProfile.oracleProfile.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "connection-profile.postgresql-profile.database" => Some(("connectionProfile.postgresqlProfile.database", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "connection-profile.postgresql-profile.hostname" => Some(("connectionProfile.postgresqlProfile.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "connection-profile.postgresql-profile.password" => Some(("connectionProfile.postgresqlProfile.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "connection-profile.postgresql-profile.port" => Some(("connectionProfile.postgresqlProfile.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "connection-profile.postgresql-profile.username" => Some(("connectionProfile.postgresqlProfile.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-profile.private-connectivity.private-connection" => Some(("connectionProfile.privateConnectivity.privateConnection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-profile.update-time" => Some(("connectionProfile.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "connection-profile-name" => Some(("connectionProfileName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "full-hierarchy" => Some(("fullHierarchy", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "hierarchy-depth" => Some(("hierarchyDepth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket", "ca-certificate", "ca-certificate-set", "client-certificate", "client-certificate-set", "client-key", "client-key-set", "connection-attributes", "connection-profile", "connection-profile-name", "create-time", "database-service", "display-name", "forward-ssh-connectivity", "full-hierarchy", "gcs-profile", "hierarchy-depth", "hostname", "labels", "mysql-profile", "name", "oracle-profile", "password", "port", "private-connection", "private-connectivity", "private-key", "root-path", "ssl-config", "update-time", "username"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket", "ca-certificate", "ca-certificate-set", "client-certificate", "client-certificate-set", "client-key", "client-key-set", "connection-attributes", "connection-profile", "connection-profile-name", "create-time", "database", "database-service", "display-name", "forward-ssh-connectivity", "full-hierarchy", "gcs-profile", "hierarchy-depth", "hostname", "labels", "mysql-profile", "name", "oracle-profile", "password", "port", "postgresql-profile", "private-connection", "private-connectivity", "private-key", "root-path", "ssl-config", "update-time", "username"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -411,7 +420,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -516,10 +525,15 @@ where "oracle-profile.password" => Some(("oracleProfile.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "oracle-profile.port" => Some(("oracleProfile.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "oracle-profile.username" => Some(("oracleProfile.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.database" => Some(("postgresqlProfile.database", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.hostname" => Some(("postgresqlProfile.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.password" => Some(("postgresqlProfile.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "postgresql-profile.port" => Some(("postgresqlProfile.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "postgresql-profile.username" => Some(("postgresqlProfile.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-connectivity.private-connection" => Some(("privateConnectivity.privateConnection", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket", "ca-certificate", "ca-certificate-set", "client-certificate", "client-certificate-set", "client-key", "client-key-set", "connection-attributes", "create-time", "database-service", "display-name", "forward-ssh-connectivity", "gcs-profile", "hostname", "labels", "mysql-profile", "name", "oracle-profile", "password", "port", "private-connection", "private-connectivity", "private-key", "root-path", "ssl-config", "update-time", "username"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bucket", "ca-certificate", "ca-certificate-set", "client-certificate", "client-certificate-set", "client-key", "client-key-set", "connection-attributes", "create-time", "database", "database-service", "display-name", "forward-ssh-connectivity", "gcs-profile", "hostname", "labels", "mysql-profile", "name", "oracle-profile", "password", "port", "postgresql-profile", "private-connection", "private-connectivity", "private-key", "root-path", "ssl-config", "update-time", "username"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -534,16 +548,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -602,7 +616,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -713,7 +727,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -963,7 +977,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1072,6 +1086,9 @@ where "private-connection-id" => { call = call.private_connection_id(value.unwrap_or("")); }, + "force" => { + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -1085,7 +1102,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["private-connection-id", "request-id"].iter().map(|v|*v)); + v.extend(["force", "private-connection-id", "request-id"].iter().map(|v|*v)); v } )); } } @@ -1129,7 +1146,7 @@ where call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1240,7 +1257,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1511,7 +1528,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1591,6 +1608,11 @@ where match &temp_cursor.to_string()[..] { "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "customer-managed-encryption-key" => Some(("customerManagedEncryptionKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.data-freshness" => Some(("destinationConfig.bigqueryDestinationConfig.dataFreshness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.single-target-dataset.dataset-id" => Some(("destinationConfig.bigqueryDestinationConfig.singleTargetDataset.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.source-hierarchy-datasets.dataset-template.dataset-id-prefix" => Some(("destinationConfig.bigqueryDestinationConfig.sourceHierarchyDatasets.datasetTemplate.datasetIdPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.source-hierarchy-datasets.dataset-template.kms-key-name" => Some(("destinationConfig.bigqueryDestinationConfig.sourceHierarchyDatasets.datasetTemplate.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.source-hierarchy-datasets.dataset-template.location" => Some(("destinationConfig.bigqueryDestinationConfig.sourceHierarchyDatasets.datasetTemplate.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-config.destination-connection-profile" => Some(("destinationConfig.destinationConnectionProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-config.gcs-destination-config.file-rotation-interval" => Some(("destinationConfig.gcsDestinationConfig.fileRotationInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-config.gcs-destination-config.file-rotation-mb" => Some(("destinationConfig.gcsDestinationConfig.fileRotationMb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -1600,11 +1622,15 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-config.mysql-source-config.max-concurrent-cdc-tasks" => Some(("sourceConfig.mysqlSourceConfig.maxConcurrentCdcTasks", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "source-config.oracle-source-config.max-concurrent-cdc-tasks" => Some(("sourceConfig.oracleSourceConfig.maxConcurrentCdcTasks", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "source-config.postgresql-source-config.publication" => Some(("sourceConfig.postgresqlSourceConfig.publication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-config.postgresql-source-config.replication-slot" => Some(("sourceConfig.postgresqlSourceConfig.replicationSlot", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-config.source-connection-profile" => Some(("sourceConfig.sourceConnectionProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["compression", "create-time", "customer-managed-encryption-key", "destination-config", "destination-connection-profile", "display-name", "file-rotation-interval", "file-rotation-mb", "gcs-destination-config", "json-file-format", "labels", "name", "path", "schema-file-format", "source-config", "source-connection-profile", "state", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bigquery-destination-config", "compression", "create-time", "customer-managed-encryption-key", "data-freshness", "dataset-id", "dataset-id-prefix", "dataset-template", "destination-config", "destination-connection-profile", "display-name", "file-rotation-interval", "file-rotation-mb", "gcs-destination-config", "json-file-format", "kms-key-name", "labels", "location", "max-concurrent-cdc-tasks", "mysql-source-config", "name", "oracle-source-config", "path", "postgresql-source-config", "publication", "replication-slot", "schema-file-format", "single-target-dataset", "source-config", "source-connection-profile", "source-hierarchy-datasets", "state", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1619,7 +1645,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "stream-id" => { call = call.stream_id(value.unwrap_or("")); @@ -1628,7 +1654,7 @@ where call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1795,7 +1821,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1912,7 +1938,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1988,8 +2014,10 @@ where "source-object-identifier.mysql-identifier.table" => Some(("sourceObjectIdentifier.mysqlIdentifier.table", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-object-identifier.oracle-identifier.schema" => Some(("sourceObjectIdentifier.oracleIdentifier.schema", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-object-identifier.oracle-identifier.table" => Some(("sourceObjectIdentifier.oracleIdentifier.table", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-object-identifier.postgresql-identifier.schema" => Some(("sourceObjectIdentifier.postgresqlIdentifier.schema", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-object-identifier.postgresql-identifier.table" => Some(("sourceObjectIdentifier.postgresqlIdentifier.table", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["database", "mysql-identifier", "oracle-identifier", "schema", "source-object-identifier", "table"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["database", "mysql-identifier", "oracle-identifier", "postgresql-identifier", "schema", "source-object-identifier", "table"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2242,6 +2270,11 @@ where match &temp_cursor.to_string()[..] { "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "customer-managed-encryption-key" => Some(("customerManagedEncryptionKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.data-freshness" => Some(("destinationConfig.bigqueryDestinationConfig.dataFreshness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.single-target-dataset.dataset-id" => Some(("destinationConfig.bigqueryDestinationConfig.singleTargetDataset.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.source-hierarchy-datasets.dataset-template.dataset-id-prefix" => Some(("destinationConfig.bigqueryDestinationConfig.sourceHierarchyDatasets.datasetTemplate.datasetIdPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.source-hierarchy-datasets.dataset-template.kms-key-name" => Some(("destinationConfig.bigqueryDestinationConfig.sourceHierarchyDatasets.datasetTemplate.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-config.bigquery-destination-config.source-hierarchy-datasets.dataset-template.location" => Some(("destinationConfig.bigqueryDestinationConfig.sourceHierarchyDatasets.datasetTemplate.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-config.destination-connection-profile" => Some(("destinationConfig.destinationConnectionProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-config.gcs-destination-config.file-rotation-interval" => Some(("destinationConfig.gcsDestinationConfig.fileRotationInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-config.gcs-destination-config.file-rotation-mb" => Some(("destinationConfig.gcsDestinationConfig.fileRotationMb", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2251,11 +2284,15 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-config.mysql-source-config.max-concurrent-cdc-tasks" => Some(("sourceConfig.mysqlSourceConfig.maxConcurrentCdcTasks", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "source-config.oracle-source-config.max-concurrent-cdc-tasks" => Some(("sourceConfig.oracleSourceConfig.maxConcurrentCdcTasks", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "source-config.postgresql-source-config.publication" => Some(("sourceConfig.postgresqlSourceConfig.publication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-config.postgresql-source-config.replication-slot" => Some(("sourceConfig.postgresqlSourceConfig.replicationSlot", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-config.source-connection-profile" => Some(("sourceConfig.sourceConnectionProfile", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["compression", "create-time", "customer-managed-encryption-key", "destination-config", "destination-connection-profile", "display-name", "file-rotation-interval", "file-rotation-mb", "gcs-destination-config", "json-file-format", "labels", "name", "path", "schema-file-format", "source-config", "source-connection-profile", "state", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bigquery-destination-config", "compression", "create-time", "customer-managed-encryption-key", "data-freshness", "dataset-id", "dataset-id-prefix", "dataset-template", "destination-config", "destination-connection-profile", "display-name", "file-rotation-interval", "file-rotation-mb", "gcs-destination-config", "json-file-format", "kms-key-name", "labels", "location", "max-concurrent-cdc-tasks", "mysql-source-config", "name", "oracle-source-config", "path", "postgresql-source-config", "publication", "replication-slot", "schema-file-format", "single-target-dataset", "source-config", "source-connection-profile", "source-hierarchy-datasets", "state", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2270,16 +2307,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3262,7 +3299,7 @@ async fn main() { let mut app = App::new("datastream1") .author("Sebastian Thiel ") - .version("4.0.1+20220207") + .version("5.0.2+20230111") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_datastream1_cli") .arg(Arg::with_name("url") diff --git a/gen/datastream1/Cargo.toml b/gen/datastream1/Cargo.toml index 37bf526457..f1822fe78b 100644 --- a/gen/datastream1/Cargo.toml +++ b/gen/datastream1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-datastream1" -version = "5.0.2-beta-1+20230111" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with Datastream (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/datastream1" homepage = "https://cloud.google.com/datastream/" -documentation = "https://docs.rs/google-datastream1/5.0.2-beta-1+20230111" +documentation = "https://docs.rs/google-datastream1/5.0.2+20230111" license = "MIT" keywords = ["datastream", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/datastream1/README.md b/gen/datastream1/README.md index 56b05c747d..56842bf2fb 100644 --- a/gen/datastream1/README.md +++ b/gen/datastream1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-datastream1` library allows access to all features of the *Google Datastream* service. -This documentation was generated from *Datastream* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *datastream:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Datastream* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *datastream:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Datastream* *v1* API can be found at the [official documentation site](https://cloud.google.com/datastream/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/Datastream) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/Datastream) ... * projects - * [*locations connection profiles create*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationConnectionProfileCreateCall), [*locations connection profiles delete*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationConnectionProfileDeleteCall), [*locations connection profiles discover*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationConnectionProfileDiscoverCall), [*locations connection profiles get*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationConnectionProfileGetCall), [*locations connection profiles list*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationConnectionProfileListCall), [*locations connection profiles patch*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationConnectionProfilePatchCall), [*locations fetch static ips*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationFetchStaticIpCall), [*locations get*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationOperationListCall), [*locations private connections create*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionCreateCall), [*locations private connections delete*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionDeleteCall), [*locations private connections get*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionGetCall), [*locations private connections list*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionListCall), [*locations private connections routes create*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteCreateCall), [*locations private connections routes delete*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteDeleteCall), [*locations private connections routes get*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteGetCall), [*locations private connections routes list*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteListCall), [*locations streams create*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamCreateCall), [*locations streams delete*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamDeleteCall), [*locations streams get*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamGetCall), [*locations streams list*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamListCall), [*locations streams objects get*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamObjectGetCall), [*locations streams objects list*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamObjectListCall), [*locations streams objects lookup*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamObjectLookupCall), [*locations streams objects start backfill job*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamObjectStartBackfillJobCall), [*locations streams objects stop backfill job*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamObjectStopBackfillJobCall) and [*locations streams patch*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/api::ProjectLocationStreamPatchCall) + * [*locations connection profiles create*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationConnectionProfileCreateCall), [*locations connection profiles delete*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationConnectionProfileDeleteCall), [*locations connection profiles discover*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationConnectionProfileDiscoverCall), [*locations connection profiles get*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationConnectionProfileGetCall), [*locations connection profiles list*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationConnectionProfileListCall), [*locations connection profiles patch*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationConnectionProfilePatchCall), [*locations fetch static ips*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationFetchStaticIpCall), [*locations get*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationOperationListCall), [*locations private connections create*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionCreateCall), [*locations private connections delete*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionDeleteCall), [*locations private connections get*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionGetCall), [*locations private connections list*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionListCall), [*locations private connections routes create*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteCreateCall), [*locations private connections routes delete*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteDeleteCall), [*locations private connections routes get*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteGetCall), [*locations private connections routes list*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationPrivateConnectionRouteListCall), [*locations streams create*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamCreateCall), [*locations streams delete*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamDeleteCall), [*locations streams get*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamGetCall), [*locations streams list*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamListCall), [*locations streams objects get*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamObjectGetCall), [*locations streams objects list*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamObjectListCall), [*locations streams objects lookup*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamObjectLookupCall), [*locations streams objects start backfill job*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamObjectStartBackfillJobCall), [*locations streams objects stop backfill job*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamObjectStopBackfillJobCall) and [*locations streams patch*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/api::ProjectLocationStreamPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/Datastream)** +* **[Hub](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/Datastream)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::CallBuilder) -* **[Resources](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::CallBuilder) +* **[Resources](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Part)** + * **[Parts](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -138,17 +138,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -158,29 +158,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Delegate) to the -[Method Builder](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Delegate) to the +[Method Builder](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::RequestValue) and -[decodable](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::RequestValue) and +[decodable](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-datastream1/5.0.2-beta-1+20230111/google_datastream1/client::RequestValue) are moved +* [request values](https://docs.rs/google-datastream1/5.0.2+20230111/google_datastream1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/datastream1/src/api.rs b/gen/datastream1/src/api.rs index a038d64a00..a82b83e75d 100644 --- a/gen/datastream1/src/api.rs +++ b/gen/datastream1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Datastream { Datastream { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://datastream.googleapis.com/".to_string(), _root_url: "https://datastream.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Datastream { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/datastream1/src/client.rs b/gen/datastream1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/datastream1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/datastream1/src/lib.rs b/gen/datastream1/src/lib.rs index 779022f664..b4d232dc7a 100644 --- a/gen/datastream1/src/lib.rs +++ b/gen/datastream1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Datastream* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *datastream:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Datastream* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *datastream:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Datastream* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/datastream/). diff --git a/gen/deploymentmanager2-cli/Cargo.toml b/gen/deploymentmanager2-cli/Cargo.toml index 088339306a..f6e30ce797 100644 --- a/gen/deploymentmanager2-cli/Cargo.toml +++ b/gen/deploymentmanager2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-deploymentmanager2-cli" -version = "4.0.1+20220225" +version = "5.0.2+20221208" authors = ["Sebastian Thiel "] description = "A complete library to interact with Deployment Manager (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/deploymentmanager2-cli" @@ -20,13 +20,13 @@ name = "deploymentmanager2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-deploymentmanager2] path = "../deploymentmanager2" -version = "4.0.1+20220225" +version = "5.0.2+20221208" + diff --git a/gen/deploymentmanager2-cli/README.md b/gen/deploymentmanager2-cli/README.md index 12f3cd9c59..e783f95ce8 100644 --- a/gen/deploymentmanager2-cli/README.md +++ b/gen/deploymentmanager2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Deployment Manager* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Deployment Manager* API at revision *20221208*. The CLI is at version *5.0.2*. ```bash deploymentmanager2 [options] diff --git a/gen/deploymentmanager2-cli/mkdocs.yml b/gen/deploymentmanager2-cli/mkdocs.yml index 0a56709cbf..cfa68e3734 100644 --- a/gen/deploymentmanager2-cli/mkdocs.yml +++ b/gen/deploymentmanager2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Deployment Manager v4.0.1+20220225 +site_name: Deployment Manager v5.0.2+20221208 site_url: http://byron.github.io/google-apis-rs/google-deploymentmanager2-cli site_description: A complete library to interact with Deployment Manager (protocol v2) @@ -7,26 +7,31 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/deploymentmanage docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['deployments_cancel-preview.md', 'Deployments', 'Cancel Preview'] -- ['deployments_delete.md', 'Deployments', 'Delete'] -- ['deployments_get.md', 'Deployments', 'Get'] -- ['deployments_get-iam-policy.md', 'Deployments', 'Get Iam Policy'] -- ['deployments_insert.md', 'Deployments', 'Insert'] -- ['deployments_list.md', 'Deployments', 'List'] -- ['deployments_patch.md', 'Deployments', 'Patch'] -- ['deployments_set-iam-policy.md', 'Deployments', 'Set Iam Policy'] -- ['deployments_stop.md', 'Deployments', 'Stop'] -- ['deployments_test-iam-permissions.md', 'Deployments', 'Test Iam Permissions'] -- ['deployments_update.md', 'Deployments', 'Update'] -- ['manifests_get.md', 'Manifests', 'Get'] -- ['manifests_list.md', 'Manifests', 'List'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['resources_get.md', 'Resources', 'Get'] -- ['resources_list.md', 'Resources', 'List'] -- ['types_list.md', 'Types', 'List'] +nav: +- Home: 'index.md' +- 'Deployments': + - 'Cancel Preview': 'deployments_cancel-preview.md' + - 'Delete': 'deployments_delete.md' + - 'Get': 'deployments_get.md' + - 'Get Iam Policy': 'deployments_get-iam-policy.md' + - 'Insert': 'deployments_insert.md' + - 'List': 'deployments_list.md' + - 'Patch': 'deployments_patch.md' + - 'Set Iam Policy': 'deployments_set-iam-policy.md' + - 'Stop': 'deployments_stop.md' + - 'Test Iam Permissions': 'deployments_test-iam-permissions.md' + - 'Update': 'deployments_update.md' +- 'Manifests': + - 'Get': 'manifests_get.md' + - 'List': 'manifests_list.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Resources': + - 'Get': 'resources_get.md' + - 'List': 'resources_list.md' +- 'Types': + - 'List': 'types_list.md' theme: readthedocs diff --git a/gen/deploymentmanager2-cli/src/client.rs b/gen/deploymentmanager2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/deploymentmanager2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/deploymentmanager2-cli/src/main.rs b/gen/deploymentmanager2-cli/src/main.rs index db99c5dc69..8996b27356 100644 --- a/gen/deploymentmanager2-cli/src/main.rs +++ b/gen/deploymentmanager2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_deploymentmanager2::{api, Error, oauth2}; +use google_deploymentmanager2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -251,7 +250,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -372,7 +371,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "preview" => { - call = call.preview(arg_from_str(value.unwrap_or("false"), err, "preview", "boolean")); + call = call.preview( value.map(|v| arg_from_str(v, err, "preview", "boolean")).unwrap_or(false)); }, "create-policy" => { call = call.create_policy(value.unwrap_or("")); @@ -437,7 +436,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -561,7 +560,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "preview" => { - call = call.preview(arg_from_str(value.unwrap_or("false"), err, "preview", "boolean")); + call = call.preview( value.map(|v| arg_from_str(v, err, "preview", "boolean")).unwrap_or(false)); }, "delete-policy" => { call = call.delete_policy(value.unwrap_or("")); @@ -945,7 +944,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "preview" => { - call = call.preview(arg_from_str(value.unwrap_or("false"), err, "preview", "boolean")); + call = call.preview( value.map(|v| arg_from_str(v, err, "preview", "boolean")).unwrap_or(false)); }, "delete-policy" => { call = call.delete_policy(value.unwrap_or("")); @@ -1065,7 +1064,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1182,7 +1181,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1299,7 +1298,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1364,7 +1363,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2141,7 +2140,7 @@ async fn main() { let mut app = App::new("deploymentmanager2") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20221208") .about("The Google Cloud Deployment Manager v2 API provides services for configuring, deploying, and viewing Google Cloud services and APIs via templates which specify deployments of Cloud resources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_deploymentmanager2_cli") .arg(Arg::with_name("url") diff --git a/gen/deploymentmanager2/Cargo.toml b/gen/deploymentmanager2/Cargo.toml index df030c6f47..3c5fd013f0 100644 --- a/gen/deploymentmanager2/Cargo.toml +++ b/gen/deploymentmanager2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-deploymentmanager2" -version = "5.0.2-beta-1+20221208" +version = "5.0.2+20221208" authors = ["Sebastian Thiel "] description = "A complete library to interact with Deployment Manager (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/deploymentmanager2" homepage = "https://cloud.google.com/deployment-manager" -documentation = "https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208" +documentation = "https://docs.rs/google-deploymentmanager2/5.0.2+20221208" license = "MIT" keywords = ["deploymentmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/deploymentmanager2/README.md b/gen/deploymentmanager2/README.md index 3aaf17f19e..0fd240a2c8 100644 --- a/gen/deploymentmanager2/README.md +++ b/gen/deploymentmanager2/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-deploymentmanager2` library allows access to all features of the *Google Deployment Manager* service. -This documentation was generated from *Deployment Manager* crate version *5.0.2-beta-1+20221208*, where *20221208* is the exact revision of the *deploymentmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Deployment Manager* crate version *5.0.2+20221208*, where *20221208* is the exact revision of the *deploymentmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Deployment Manager* *v2* API can be found at the [official documentation site](https://cloud.google.com/deployment-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/DeploymentManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/DeploymentManager) ... -* [deployments](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::Deployment) - * [*cancel preview*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentCancelPreviewCall), [*delete*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentDeleteCall), [*get*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentGetCall), [*get iam policy*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentGetIamPolicyCall), [*insert*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentInsertCall), [*list*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentListCall), [*patch*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentPatchCall), [*set iam policy*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentSetIamPolicyCall), [*stop*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentStopCall), [*test iam permissions*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentTestIamPermissionCall) and [*update*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::DeploymentUpdateCall) -* [manifests](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::Manifest) - * [*get*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::ManifestGetCall) and [*list*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::ManifestListCall) -* [operations](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::Operation) - * [*get*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::OperationGetCall) and [*list*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::OperationListCall) -* [resources](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::Resource) - * [*get*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::ResourceGetCall) and [*list*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::ResourceListCall) -* [types](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::Type) - * [*list*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/api::TypeListCall) +* [deployments](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::Deployment) + * [*cancel preview*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentCancelPreviewCall), [*delete*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentDeleteCall), [*get*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentGetCall), [*get iam policy*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentGetIamPolicyCall), [*insert*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentInsertCall), [*list*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentListCall), [*patch*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentPatchCall), [*set iam policy*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentSetIamPolicyCall), [*stop*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentStopCall), [*test iam permissions*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentTestIamPermissionCall) and [*update*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::DeploymentUpdateCall) +* [manifests](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::Manifest) + * [*get*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::ManifestGetCall) and [*list*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::ManifestListCall) +* [operations](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::Operation) + * [*get*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::OperationGetCall) and [*list*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::OperationListCall) +* [resources](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::Resource) + * [*get*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::ResourceGetCall) and [*list*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::ResourceListCall) +* [types](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::Type) + * [*list*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/api::TypeListCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/DeploymentManager)** +* **[Hub](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/DeploymentManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::CallBuilder) -* **[Resources](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::CallBuilder) +* **[Resources](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Part)** + * **[Parts](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -145,17 +145,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -165,29 +165,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Delegate) to the -[Method Builder](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Delegate) to the +[Method Builder](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::RequestValue) and -[decodable](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::RequestValue) and +[decodable](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-deploymentmanager2/5.0.2-beta-1+20221208/google_deploymentmanager2/client::RequestValue) are moved +* [request values](https://docs.rs/google-deploymentmanager2/5.0.2+20221208/google_deploymentmanager2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/deploymentmanager2/src/api.rs b/gen/deploymentmanager2/src/api.rs index a6a8e51576..44345b5cff 100644 --- a/gen/deploymentmanager2/src/api.rs +++ b/gen/deploymentmanager2/src/api.rs @@ -140,7 +140,7 @@ impl<'a, S> DeploymentManager { DeploymentManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://deploymentmanager.googleapis.com/".to_string(), _root_url: "https://deploymentmanager.googleapis.com/".to_string(), } @@ -163,7 +163,7 @@ impl<'a, S> DeploymentManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/deploymentmanager2/src/client.rs b/gen/deploymentmanager2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/deploymentmanager2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/deploymentmanager2/src/lib.rs b/gen/deploymentmanager2/src/lib.rs index 16321d2a6a..482f7591fd 100644 --- a/gen/deploymentmanager2/src/lib.rs +++ b/gen/deploymentmanager2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Deployment Manager* crate version *5.0.2-beta-1+20221208*, where *20221208* is the exact revision of the *deploymentmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Deployment Manager* crate version *5.0.2+20221208*, where *20221208* is the exact revision of the *deploymentmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Deployment Manager* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/deployment-manager). diff --git a/gen/deploymentmanager2_beta2-cli/Cargo.toml b/gen/deploymentmanager2_beta2-cli/Cargo.toml index a73c99bbfe..02a9b5f38c 100644 --- a/gen/deploymentmanager2_beta2-cli/Cargo.toml +++ b/gen/deploymentmanager2_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-deploymentmanager2_beta2-cli" -version = "4.0.1+20160201" +version = "5.0.2+20160201" authors = ["Sebastian Thiel "] description = "A complete library to interact with Deployment Manager (protocol v2beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/deploymentmanager2_beta2-cli" @@ -20,13 +20,13 @@ name = "deploymentmanager2-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-deploymentmanager2_beta2] path = "../deploymentmanager2_beta2" -version = "4.0.1+20160201" +version = "5.0.2+20160201" + diff --git a/gen/deploymentmanager2_beta2-cli/README.md b/gen/deploymentmanager2_beta2-cli/README.md index 8d783f6184..5ea79a5b62 100644 --- a/gen/deploymentmanager2_beta2-cli/README.md +++ b/gen/deploymentmanager2_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Deployment Manager* API at revision *20160201*. The CLI is at version *4.0.1*. +This documentation was generated from the *Deployment Manager* API at revision *20160201*. The CLI is at version *5.0.2*. ```bash deploymentmanager2-beta2 [options] diff --git a/gen/deploymentmanager2_beta2-cli/mkdocs.yml b/gen/deploymentmanager2_beta2-cli/mkdocs.yml index 5119648ac6..c7b8d8c79f 100644 --- a/gen/deploymentmanager2_beta2-cli/mkdocs.yml +++ b/gen/deploymentmanager2_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Deployment Manager v4.0.1+20160201 +site_name: Deployment Manager v5.0.2+20160201 site_url: http://byron.github.io/google-apis-rs/google-deploymentmanager2_beta2-cli site_description: A complete library to interact with Deployment Manager (protocol v2beta2) @@ -7,21 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/deploymentmanage docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['deployments_delete.md', 'Deployments', 'Delete'] -- ['deployments_get.md', 'Deployments', 'Get'] -- ['deployments_insert.md', 'Deployments', 'Insert'] -- ['deployments_list.md', 'Deployments', 'List'] -- ['deployments_patch.md', 'Deployments', 'Patch'] -- ['deployments_update.md', 'Deployments', 'Update'] -- ['manifests_get.md', 'Manifests', 'Get'] -- ['manifests_list.md', 'Manifests', 'List'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['resources_get.md', 'Resources', 'Get'] -- ['resources_list.md', 'Resources', 'List'] -- ['types_list.md', 'Types', 'List'] +nav: +- Home: 'index.md' +- 'Deployments': + - 'Delete': 'deployments_delete.md' + - 'Get': 'deployments_get.md' + - 'Insert': 'deployments_insert.md' + - 'List': 'deployments_list.md' + - 'Patch': 'deployments_patch.md' + - 'Update': 'deployments_update.md' +- 'Manifests': + - 'Get': 'manifests_get.md' + - 'List': 'manifests_list.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Resources': + - 'Get': 'resources_get.md' + - 'List': 'resources_list.md' +- 'Types': + - 'List': 'types_list.md' theme: readthedocs diff --git a/gen/deploymentmanager2_beta2-cli/src/client.rs b/gen/deploymentmanager2_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/deploymentmanager2_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/deploymentmanager2_beta2-cli/src/main.rs b/gen/deploymentmanager2_beta2-cli/src/main.rs index 3ce926e440..b2f726d279 100644 --- a/gen/deploymentmanager2_beta2-cli/src/main.rs +++ b/gen/deploymentmanager2_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_deploymentmanager2_beta2::{api, Error, oauth2}; +use google_deploymentmanager2_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -261,7 +260,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -587,7 +586,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -701,7 +700,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -815,7 +814,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -877,7 +876,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1472,7 +1471,7 @@ async fn main() { let mut app = App::new("deploymentmanager2-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20160201") + .version("5.0.2+20160201") .about("The Deployment Manager API allows users to declaratively configure, deploy and run complex solutions on the Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_deploymentmanager2_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/deploymentmanager2_beta2/Cargo.toml b/gen/deploymentmanager2_beta2/Cargo.toml index 9da8c787d6..a9584b69fe 100644 --- a/gen/deploymentmanager2_beta2/Cargo.toml +++ b/gen/deploymentmanager2_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-deploymentmanager2_beta2" -version = "5.0.2-beta-1+20160201" +version = "5.0.2+20160201" authors = ["Sebastian Thiel "] description = "A complete library to interact with Deployment Manager (protocol v2beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/deploymentmanager2_beta2" homepage = "https://developers.google.com/deployment-manager/" -documentation = "https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201" +documentation = "https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201" license = "MIT" keywords = ["deploymentmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/deploymentmanager2_beta2/README.md b/gen/deploymentmanager2_beta2/README.md index b9d716aee1..b9b9db7950 100644 --- a/gen/deploymentmanager2_beta2/README.md +++ b/gen/deploymentmanager2_beta2/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-deploymentmanager2_beta2` library allows access to all features of the *Google Deployment Manager* service. -This documentation was generated from *Deployment Manager* crate version *5.0.2-beta-1+20160201*, where *20160201* is the exact revision of the *deploymentmanager:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Deployment Manager* crate version *5.0.2+20160201*, where *20160201* is the exact revision of the *deploymentmanager:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Deployment Manager* *v2_beta2* API can be found at the [official documentation site](https://developers.google.com/deployment-manager/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/DeploymentManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/DeploymentManager) ... -* [deployments](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::Deployment) - * [*delete*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::DeploymentDeleteCall), [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::DeploymentGetCall), [*insert*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::DeploymentInsertCall), [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::DeploymentListCall), [*patch*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::DeploymentPatchCall) and [*update*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::DeploymentUpdateCall) -* [manifests](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::Manifest) - * [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::ManifestGetCall) and [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::ManifestListCall) -* [operations](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::Operation) - * [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::OperationGetCall) and [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::OperationListCall) -* [resources](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::Resource) - * [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::ResourceGetCall) and [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::ResourceListCall) -* [types](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::Type) - * [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/api::TypeListCall) +* [deployments](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::Deployment) + * [*delete*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::DeploymentDeleteCall), [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::DeploymentGetCall), [*insert*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::DeploymentInsertCall), [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::DeploymentListCall), [*patch*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::DeploymentPatchCall) and [*update*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::DeploymentUpdateCall) +* [manifests](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::Manifest) + * [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::ManifestGetCall) and [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::ManifestListCall) +* [operations](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::Operation) + * [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::OperationGetCall) and [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::OperationListCall) +* [resources](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::Resource) + * [*get*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::ResourceGetCall) and [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::ResourceListCall) +* [types](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::Type) + * [*list*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/api::TypeListCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/DeploymentManager)** +* **[Hub](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/DeploymentManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Part)** + * **[Parts](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -140,17 +140,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -160,29 +160,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-deploymentmanager2_beta2/5.0.2-beta-1+20160201/google_deploymentmanager2_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-deploymentmanager2_beta2/5.0.2+20160201/google_deploymentmanager2_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/deploymentmanager2_beta2/src/api.rs b/gen/deploymentmanager2_beta2/src/api.rs index 080e9f1773..ae4e18158f 100644 --- a/gen/deploymentmanager2_beta2/src/api.rs +++ b/gen/deploymentmanager2_beta2/src/api.rs @@ -140,7 +140,7 @@ impl<'a, S> DeploymentManager { DeploymentManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/deploymentmanager/v2beta2/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -163,7 +163,7 @@ impl<'a, S> DeploymentManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/deploymentmanager2_beta2/src/client.rs b/gen/deploymentmanager2_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/deploymentmanager2_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/deploymentmanager2_beta2/src/lib.rs b/gen/deploymentmanager2_beta2/src/lib.rs index 8f23ab8bb9..30ca0092dc 100644 --- a/gen/deploymentmanager2_beta2/src/lib.rs +++ b/gen/deploymentmanager2_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Deployment Manager* crate version *5.0.2-beta-1+20160201*, where *20160201* is the exact revision of the *deploymentmanager:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Deployment Manager* crate version *5.0.2+20160201*, where *20160201* is the exact revision of the *deploymentmanager:v2beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Deployment Manager* *v2_beta2* API can be found at the //! [official documentation site](https://developers.google.com/deployment-manager/). diff --git a/gen/dfareporting2d8-cli/Cargo.toml b/gen/dfareporting2d8-cli/Cargo.toml index 9db2dae39e..5676fa4a07 100644 --- a/gen/dfareporting2d8-cli/Cargo.toml +++ b/gen/dfareporting2d8-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dfareporting2d8-cli" -version = "4.0.1+20180830" +version = "5.0.2+20180830" authors = ["Sebastian Thiel "] description = "A complete library to interact with dfareporting (protocol v2.8)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting2d8-cli" @@ -20,13 +20,13 @@ name = "dfareporting2d8" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dfareporting2d8] path = "../dfareporting2d8" -version = "4.0.1+20180830" +version = "5.0.2+20180830" + diff --git a/gen/dfareporting2d8-cli/README.md b/gen/dfareporting2d8-cli/README.md index 776804c993..874bbf5762 100644 --- a/gen/dfareporting2d8-cli/README.md +++ b/gen/dfareporting2d8-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *dfareporting* API at revision *20180830*. The CLI is at version *4.0.1*. +This documentation was generated from the *dfareporting* API at revision *20180830*. The CLI is at version *5.0.2*. ```bash dfareporting2d8 [options] diff --git a/gen/dfareporting2d8-cli/mkdocs.yml b/gen/dfareporting2d8-cli/mkdocs.yml index 6bb3a9af3b..8a3054a4a7 100644 --- a/gen/dfareporting2d8-cli/mkdocs.yml +++ b/gen/dfareporting2d8-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: dfareporting v4.0.1+20180830 +site_name: dfareporting v5.0.2+20180830 site_url: http://byron.github.io/google-apis-rs/google-dfareporting2d8-cli site_description: A complete library to interact with dfareporting (protocol v2.8) @@ -7,213 +7,273 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting2d8- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-active-ad-summaries_get.md', 'Account Active Ad Summaries', 'Get'] -- ['account-permission-groups_get.md', 'Account Permission Groups', 'Get'] -- ['account-permission-groups_list.md', 'Account Permission Groups', 'List'] -- ['account-permissions_get.md', 'Account Permissions', 'Get'] -- ['account-permissions_list.md', 'Account Permissions', 'List'] -- ['account-user-profiles_get.md', 'Account User Profiles', 'Get'] -- ['account-user-profiles_insert.md', 'Account User Profiles', 'Insert'] -- ['account-user-profiles_list.md', 'Account User Profiles', 'List'] -- ['account-user-profiles_patch.md', 'Account User Profiles', 'Patch'] -- ['account-user-profiles_update.md', 'Account User Profiles', 'Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['ads_get.md', 'Ads', 'Get'] -- ['ads_insert.md', 'Ads', 'Insert'] -- ['ads_list.md', 'Ads', 'List'] -- ['ads_patch.md', 'Ads', 'Patch'] -- ['ads_update.md', 'Ads', 'Update'] -- ['advertiser-groups_delete.md', 'Advertiser Groups', 'Delete'] -- ['advertiser-groups_get.md', 'Advertiser Groups', 'Get'] -- ['advertiser-groups_insert.md', 'Advertiser Groups', 'Insert'] -- ['advertiser-groups_list.md', 'Advertiser Groups', 'List'] -- ['advertiser-groups_patch.md', 'Advertiser Groups', 'Patch'] -- ['advertiser-groups_update.md', 'Advertiser Groups', 'Update'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_insert.md', 'Advertisers', 'Insert'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['advertisers_patch.md', 'Advertisers', 'Patch'] -- ['advertisers_update.md', 'Advertisers', 'Update'] -- ['browsers_list.md', 'Browsers', 'List'] -- ['campaign-creative-associations_insert.md', 'Campaign Creative Associations', 'Insert'] -- ['campaign-creative-associations_list.md', 'Campaign Creative Associations', 'List'] -- ['campaigns_get.md', 'Campaigns', 'Get'] -- ['campaigns_insert.md', 'Campaigns', 'Insert'] -- ['campaigns_list.md', 'Campaigns', 'List'] -- ['campaigns_patch.md', 'Campaigns', 'Patch'] -- ['campaigns_update.md', 'Campaigns', 'Update'] -- ['change-logs_get.md', 'Change Logs', 'Get'] -- ['change-logs_list.md', 'Change Logs', 'List'] -- ['cities_list.md', 'Cities', 'List'] -- ['connection-types_get.md', 'Connection Types', 'Get'] -- ['connection-types_list.md', 'Connection Types', 'List'] -- ['content-categories_delete.md', 'Content Categories', 'Delete'] -- ['content-categories_get.md', 'Content Categories', 'Get'] -- ['content-categories_insert.md', 'Content Categories', 'Insert'] -- ['content-categories_list.md', 'Content Categories', 'List'] -- ['content-categories_patch.md', 'Content Categories', 'Patch'] -- ['content-categories_update.md', 'Content Categories', 'Update'] -- ['conversions_batchinsert.md', 'Conversions', 'Batchinsert'] -- ['conversions_batchupdate.md', 'Conversions', 'Batchupdate'] -- ['countries_get.md', 'Countries', 'Get'] -- ['countries_list.md', 'Countries', 'List'] -- ['creative-assets_insert.md', 'Creative Assets', 'Insert'] -- ['creative-field-values_delete.md', 'Creative Field Values', 'Delete'] -- ['creative-field-values_get.md', 'Creative Field Values', 'Get'] -- ['creative-field-values_insert.md', 'Creative Field Values', 'Insert'] -- ['creative-field-values_list.md', 'Creative Field Values', 'List'] -- ['creative-field-values_patch.md', 'Creative Field Values', 'Patch'] -- ['creative-field-values_update.md', 'Creative Field Values', 'Update'] -- ['creative-fields_delete.md', 'Creative Fields', 'Delete'] -- ['creative-fields_get.md', 'Creative Fields', 'Get'] -- ['creative-fields_insert.md', 'Creative Fields', 'Insert'] -- ['creative-fields_list.md', 'Creative Fields', 'List'] -- ['creative-fields_patch.md', 'Creative Fields', 'Patch'] -- ['creative-fields_update.md', 'Creative Fields', 'Update'] -- ['creative-groups_get.md', 'Creative Groups', 'Get'] -- ['creative-groups_insert.md', 'Creative Groups', 'Insert'] -- ['creative-groups_list.md', 'Creative Groups', 'List'] -- ['creative-groups_patch.md', 'Creative Groups', 'Patch'] -- ['creative-groups_update.md', 'Creative Groups', 'Update'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['creatives_patch.md', 'Creatives', 'Patch'] -- ['creatives_update.md', 'Creatives', 'Update'] -- ['dimension-values_query.md', 'Dimension Values', 'Query'] -- ['directory-site-contacts_get.md', 'Directory Site Contacts', 'Get'] -- ['directory-site-contacts_list.md', 'Directory Site Contacts', 'List'] -- ['directory-sites_get.md', 'Directory Sites', 'Get'] -- ['directory-sites_insert.md', 'Directory Sites', 'Insert'] -- ['directory-sites_list.md', 'Directory Sites', 'List'] -- ['dynamic-targeting-keys_delete.md', 'Dynamic Targeting Keys', 'Delete'] -- ['dynamic-targeting-keys_insert.md', 'Dynamic Targeting Keys', 'Insert'] -- ['dynamic-targeting-keys_list.md', 'Dynamic Targeting Keys', 'List'] -- ['event-tags_delete.md', 'Event Tags', 'Delete'] -- ['event-tags_get.md', 'Event Tags', 'Get'] -- ['event-tags_insert.md', 'Event Tags', 'Insert'] -- ['event-tags_list.md', 'Event Tags', 'List'] -- ['event-tags_patch.md', 'Event Tags', 'Patch'] -- ['event-tags_update.md', 'Event Tags', 'Update'] -- ['files_get.md', 'Files', 'Get'] -- ['files_list.md', 'Files', 'List'] -- ['floodlight-activities_delete.md', 'Floodlight Activities', 'Delete'] -- ['floodlight-activities_generatetag.md', 'Floodlight Activities', 'Generatetag'] -- ['floodlight-activities_get.md', 'Floodlight Activities', 'Get'] -- ['floodlight-activities_insert.md', 'Floodlight Activities', 'Insert'] -- ['floodlight-activities_list.md', 'Floodlight Activities', 'List'] -- ['floodlight-activities_patch.md', 'Floodlight Activities', 'Patch'] -- ['floodlight-activities_update.md', 'Floodlight Activities', 'Update'] -- ['floodlight-activity-groups_get.md', 'Floodlight Activity Groups', 'Get'] -- ['floodlight-activity-groups_insert.md', 'Floodlight Activity Groups', 'Insert'] -- ['floodlight-activity-groups_list.md', 'Floodlight Activity Groups', 'List'] -- ['floodlight-activity-groups_patch.md', 'Floodlight Activity Groups', 'Patch'] -- ['floodlight-activity-groups_update.md', 'Floodlight Activity Groups', 'Update'] -- ['floodlight-configurations_get.md', 'Floodlight Configurations', 'Get'] -- ['floodlight-configurations_list.md', 'Floodlight Configurations', 'List'] -- ['floodlight-configurations_patch.md', 'Floodlight Configurations', 'Patch'] -- ['floodlight-configurations_update.md', 'Floodlight Configurations', 'Update'] -- ['inventory-items_get.md', 'Inventory Items', 'Get'] -- ['inventory-items_list.md', 'Inventory Items', 'List'] -- ['landing-pages_delete.md', 'Landing Pages', 'Delete'] -- ['landing-pages_get.md', 'Landing Pages', 'Get'] -- ['landing-pages_insert.md', 'Landing Pages', 'Insert'] -- ['landing-pages_list.md', 'Landing Pages', 'List'] -- ['landing-pages_patch.md', 'Landing Pages', 'Patch'] -- ['landing-pages_update.md', 'Landing Pages', 'Update'] -- ['languages_list.md', 'Languages', 'List'] -- ['metros_list.md', 'Metros', 'List'] -- ['mobile-carriers_get.md', 'Mobile Carriers', 'Get'] -- ['mobile-carriers_list.md', 'Mobile Carriers', 'List'] -- ['operating-system-versions_get.md', 'Operating System Versions', 'Get'] -- ['operating-system-versions_list.md', 'Operating System Versions', 'List'] -- ['operating-systems_get.md', 'Operating Systems', 'Get'] -- ['operating-systems_list.md', 'Operating Systems', 'List'] -- ['order-documents_get.md', 'Order Documents', 'Get'] -- ['order-documents_list.md', 'Order Documents', 'List'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_list.md', 'Orders', 'List'] -- ['placement-groups_get.md', 'Placement Groups', 'Get'] -- ['placement-groups_insert.md', 'Placement Groups', 'Insert'] -- ['placement-groups_list.md', 'Placement Groups', 'List'] -- ['placement-groups_patch.md', 'Placement Groups', 'Patch'] -- ['placement-groups_update.md', 'Placement Groups', 'Update'] -- ['placement-strategies_delete.md', 'Placement Strategies', 'Delete'] -- ['placement-strategies_get.md', 'Placement Strategies', 'Get'] -- ['placement-strategies_insert.md', 'Placement Strategies', 'Insert'] -- ['placement-strategies_list.md', 'Placement Strategies', 'List'] -- ['placement-strategies_patch.md', 'Placement Strategies', 'Patch'] -- ['placement-strategies_update.md', 'Placement Strategies', 'Update'] -- ['placements_generatetags.md', 'Placements', 'Generatetags'] -- ['placements_get.md', 'Placements', 'Get'] -- ['placements_insert.md', 'Placements', 'Insert'] -- ['placements_list.md', 'Placements', 'List'] -- ['placements_patch.md', 'Placements', 'Patch'] -- ['placements_update.md', 'Placements', 'Update'] -- ['platform-types_get.md', 'Platform Types', 'Get'] -- ['platform-types_list.md', 'Platform Types', 'List'] -- ['postal-codes_get.md', 'Postal Codes', 'Get'] -- ['postal-codes_list.md', 'Postal Codes', 'List'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_list.md', 'Projects', 'List'] -- ['regions_list.md', 'Regions', 'List'] -- ['remarketing-list-shares_get.md', 'Remarketing List Shares', 'Get'] -- ['remarketing-list-shares_patch.md', 'Remarketing List Shares', 'Patch'] -- ['remarketing-list-shares_update.md', 'Remarketing List Shares', 'Update'] -- ['remarketing-lists_get.md', 'Remarketing Lists', 'Get'] -- ['remarketing-lists_insert.md', 'Remarketing Lists', 'Insert'] -- ['remarketing-lists_list.md', 'Remarketing Lists', 'List'] -- ['remarketing-lists_patch.md', 'Remarketing Lists', 'Patch'] -- ['remarketing-lists_update.md', 'Remarketing Lists', 'Update'] -- ['reports_compatible-fields-query.md', 'Reports', 'Compatible Fields Query'] -- ['reports_delete.md', 'Reports', 'Delete'] -- ['reports_files-get.md', 'Reports', 'Files Get'] -- ['reports_files-list.md', 'Reports', 'Files List'] -- ['reports_get.md', 'Reports', 'Get'] -- ['reports_insert.md', 'Reports', 'Insert'] -- ['reports_list.md', 'Reports', 'List'] -- ['reports_patch.md', 'Reports', 'Patch'] -- ['reports_run.md', 'Reports', 'Run'] -- ['reports_update.md', 'Reports', 'Update'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_insert.md', 'Sites', 'Insert'] -- ['sites_list.md', 'Sites', 'List'] -- ['sites_patch.md', 'Sites', 'Patch'] -- ['sites_update.md', 'Sites', 'Update'] -- ['sizes_get.md', 'Sizes', 'Get'] -- ['sizes_insert.md', 'Sizes', 'Insert'] -- ['sizes_list.md', 'Sizes', 'List'] -- ['subaccounts_get.md', 'Subaccounts', 'Get'] -- ['subaccounts_insert.md', 'Subaccounts', 'Insert'] -- ['subaccounts_list.md', 'Subaccounts', 'List'] -- ['subaccounts_patch.md', 'Subaccounts', 'Patch'] -- ['subaccounts_update.md', 'Subaccounts', 'Update'] -- ['targetable-remarketing-lists_get.md', 'Targetable Remarketing Lists', 'Get'] -- ['targetable-remarketing-lists_list.md', 'Targetable Remarketing Lists', 'List'] -- ['targeting-templates_get.md', 'Targeting Templates', 'Get'] -- ['targeting-templates_insert.md', 'Targeting Templates', 'Insert'] -- ['targeting-templates_list.md', 'Targeting Templates', 'List'] -- ['targeting-templates_patch.md', 'Targeting Templates', 'Patch'] -- ['targeting-templates_update.md', 'Targeting Templates', 'Update'] -- ['user-profiles_get.md', 'User Profiles', 'Get'] -- ['user-profiles_list.md', 'User Profiles', 'List'] -- ['user-role-permission-groups_get.md', 'User Role Permission Groups', 'Get'] -- ['user-role-permission-groups_list.md', 'User Role Permission Groups', 'List'] -- ['user-role-permissions_get.md', 'User Role Permissions', 'Get'] -- ['user-role-permissions_list.md', 'User Role Permissions', 'List'] -- ['user-roles_delete.md', 'User Roles', 'Delete'] -- ['user-roles_get.md', 'User Roles', 'Get'] -- ['user-roles_insert.md', 'User Roles', 'Insert'] -- ['user-roles_list.md', 'User Roles', 'List'] -- ['user-roles_patch.md', 'User Roles', 'Patch'] -- ['user-roles_update.md', 'User Roles', 'Update'] -- ['video-formats_get.md', 'Video Formats', 'Get'] -- ['video-formats_list.md', 'Video Formats', 'List'] +nav: +- Home: 'index.md' +- 'Account Active Ad Summaries': + - 'Get': 'account-active-ad-summaries_get.md' +- 'Account Permission Groups': + - 'Get': 'account-permission-groups_get.md' + - 'List': 'account-permission-groups_list.md' +- 'Account Permissions': + - 'Get': 'account-permissions_get.md' + - 'List': 'account-permissions_list.md' +- 'Account User Profiles': + - 'Get': 'account-user-profiles_get.md' + - 'Insert': 'account-user-profiles_insert.md' + - 'List': 'account-user-profiles_list.md' + - 'Patch': 'account-user-profiles_patch.md' + - 'Update': 'account-user-profiles_update.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Ads': + - 'Get': 'ads_get.md' + - 'Insert': 'ads_insert.md' + - 'List': 'ads_list.md' + - 'Patch': 'ads_patch.md' + - 'Update': 'ads_update.md' +- 'Advertiser Groups': + - 'Delete': 'advertiser-groups_delete.md' + - 'Get': 'advertiser-groups_get.md' + - 'Insert': 'advertiser-groups_insert.md' + - 'List': 'advertiser-groups_list.md' + - 'Patch': 'advertiser-groups_patch.md' + - 'Update': 'advertiser-groups_update.md' +- 'Advertisers': + - 'Get': 'advertisers_get.md' + - 'Insert': 'advertisers_insert.md' + - 'List': 'advertisers_list.md' + - 'Patch': 'advertisers_patch.md' + - 'Update': 'advertisers_update.md' +- 'Browsers': + - 'List': 'browsers_list.md' +- 'Campaign Creative Associations': + - 'Insert': 'campaign-creative-associations_insert.md' + - 'List': 'campaign-creative-associations_list.md' +- 'Campaigns': + - 'Get': 'campaigns_get.md' + - 'Insert': 'campaigns_insert.md' + - 'List': 'campaigns_list.md' + - 'Patch': 'campaigns_patch.md' + - 'Update': 'campaigns_update.md' +- 'Change Logs': + - 'Get': 'change-logs_get.md' + - 'List': 'change-logs_list.md' +- 'Cities': + - 'List': 'cities_list.md' +- 'Connection Types': + - 'Get': 'connection-types_get.md' + - 'List': 'connection-types_list.md' +- 'Content Categories': + - 'Delete': 'content-categories_delete.md' + - 'Get': 'content-categories_get.md' + - 'Insert': 'content-categories_insert.md' + - 'List': 'content-categories_list.md' + - 'Patch': 'content-categories_patch.md' + - 'Update': 'content-categories_update.md' +- 'Conversions': + - 'Batchinsert': 'conversions_batchinsert.md' + - 'Batchupdate': 'conversions_batchupdate.md' +- 'Countries': + - 'Get': 'countries_get.md' + - 'List': 'countries_list.md' +- 'Creative Assets': + - 'Insert': 'creative-assets_insert.md' +- 'Creative Field Values': + - 'Delete': 'creative-field-values_delete.md' + - 'Get': 'creative-field-values_get.md' + - 'Insert': 'creative-field-values_insert.md' + - 'List': 'creative-field-values_list.md' + - 'Patch': 'creative-field-values_patch.md' + - 'Update': 'creative-field-values_update.md' +- 'Creative Fields': + - 'Delete': 'creative-fields_delete.md' + - 'Get': 'creative-fields_get.md' + - 'Insert': 'creative-fields_insert.md' + - 'List': 'creative-fields_list.md' + - 'Patch': 'creative-fields_patch.md' + - 'Update': 'creative-fields_update.md' +- 'Creative Groups': + - 'Get': 'creative-groups_get.md' + - 'Insert': 'creative-groups_insert.md' + - 'List': 'creative-groups_list.md' + - 'Patch': 'creative-groups_patch.md' + - 'Update': 'creative-groups_update.md' +- 'Creatives': + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' + - 'Patch': 'creatives_patch.md' + - 'Update': 'creatives_update.md' +- 'Dimension Values': + - 'Query': 'dimension-values_query.md' +- 'Directory Site Contacts': + - 'Get': 'directory-site-contacts_get.md' + - 'List': 'directory-site-contacts_list.md' +- 'Directory Sites': + - 'Get': 'directory-sites_get.md' + - 'Insert': 'directory-sites_insert.md' + - 'List': 'directory-sites_list.md' +- 'Dynamic Targeting Keys': + - 'Delete': 'dynamic-targeting-keys_delete.md' + - 'Insert': 'dynamic-targeting-keys_insert.md' + - 'List': 'dynamic-targeting-keys_list.md' +- 'Event Tags': + - 'Delete': 'event-tags_delete.md' + - 'Get': 'event-tags_get.md' + - 'Insert': 'event-tags_insert.md' + - 'List': 'event-tags_list.md' + - 'Patch': 'event-tags_patch.md' + - 'Update': 'event-tags_update.md' +- 'Files': + - 'Get': 'files_get.md' + - 'List': 'files_list.md' +- 'Floodlight Activities': + - 'Delete': 'floodlight-activities_delete.md' + - 'Generatetag': 'floodlight-activities_generatetag.md' + - 'Get': 'floodlight-activities_get.md' + - 'Insert': 'floodlight-activities_insert.md' + - 'List': 'floodlight-activities_list.md' + - 'Patch': 'floodlight-activities_patch.md' + - 'Update': 'floodlight-activities_update.md' +- 'Floodlight Activity Groups': + - 'Get': 'floodlight-activity-groups_get.md' + - 'Insert': 'floodlight-activity-groups_insert.md' + - 'List': 'floodlight-activity-groups_list.md' + - 'Patch': 'floodlight-activity-groups_patch.md' + - 'Update': 'floodlight-activity-groups_update.md' +- 'Floodlight Configurations': + - 'Get': 'floodlight-configurations_get.md' + - 'List': 'floodlight-configurations_list.md' + - 'Patch': 'floodlight-configurations_patch.md' + - 'Update': 'floodlight-configurations_update.md' +- 'Inventory Items': + - 'Get': 'inventory-items_get.md' + - 'List': 'inventory-items_list.md' +- 'Landing Pages': + - 'Delete': 'landing-pages_delete.md' + - 'Get': 'landing-pages_get.md' + - 'Insert': 'landing-pages_insert.md' + - 'List': 'landing-pages_list.md' + - 'Patch': 'landing-pages_patch.md' + - 'Update': 'landing-pages_update.md' +- 'Languages': + - 'List': 'languages_list.md' +- 'Metros': + - 'List': 'metros_list.md' +- 'Mobile Carriers': + - 'Get': 'mobile-carriers_get.md' + - 'List': 'mobile-carriers_list.md' +- 'Operating System Versions': + - 'Get': 'operating-system-versions_get.md' + - 'List': 'operating-system-versions_list.md' +- 'Operating Systems': + - 'Get': 'operating-systems_get.md' + - 'List': 'operating-systems_list.md' +- 'Order Documents': + - 'Get': 'order-documents_get.md' + - 'List': 'order-documents_list.md' +- 'Orders': + - 'Get': 'orders_get.md' + - 'List': 'orders_list.md' +- 'Placement Groups': + - 'Get': 'placement-groups_get.md' + - 'Insert': 'placement-groups_insert.md' + - 'List': 'placement-groups_list.md' + - 'Patch': 'placement-groups_patch.md' + - 'Update': 'placement-groups_update.md' +- 'Placement Strategies': + - 'Delete': 'placement-strategies_delete.md' + - 'Get': 'placement-strategies_get.md' + - 'Insert': 'placement-strategies_insert.md' + - 'List': 'placement-strategies_list.md' + - 'Patch': 'placement-strategies_patch.md' + - 'Update': 'placement-strategies_update.md' +- 'Placements': + - 'Generatetags': 'placements_generatetags.md' + - 'Get': 'placements_get.md' + - 'Insert': 'placements_insert.md' + - 'List': 'placements_list.md' + - 'Patch': 'placements_patch.md' + - 'Update': 'placements_update.md' +- 'Platform Types': + - 'Get': 'platform-types_get.md' + - 'List': 'platform-types_list.md' +- 'Postal Codes': + - 'Get': 'postal-codes_get.md' + - 'List': 'postal-codes_list.md' +- 'Projects': + - 'Get': 'projects_get.md' + - 'List': 'projects_list.md' +- 'Regions': + - 'List': 'regions_list.md' +- 'Remarketing List Shares': + - 'Get': 'remarketing-list-shares_get.md' + - 'Patch': 'remarketing-list-shares_patch.md' + - 'Update': 'remarketing-list-shares_update.md' +- 'Remarketing Lists': + - 'Get': 'remarketing-lists_get.md' + - 'Insert': 'remarketing-lists_insert.md' + - 'List': 'remarketing-lists_list.md' + - 'Patch': 'remarketing-lists_patch.md' + - 'Update': 'remarketing-lists_update.md' +- 'Reports': + - 'Compatible Fields Query': 'reports_compatible-fields-query.md' + - 'Delete': 'reports_delete.md' + - 'Files Get': 'reports_files-get.md' + - 'Files List': 'reports_files-list.md' + - 'Get': 'reports_get.md' + - 'Insert': 'reports_insert.md' + - 'List': 'reports_list.md' + - 'Patch': 'reports_patch.md' + - 'Run': 'reports_run.md' + - 'Update': 'reports_update.md' +- 'Sites': + - 'Get': 'sites_get.md' + - 'Insert': 'sites_insert.md' + - 'List': 'sites_list.md' + - 'Patch': 'sites_patch.md' + - 'Update': 'sites_update.md' +- 'Sizes': + - 'Get': 'sizes_get.md' + - 'Insert': 'sizes_insert.md' + - 'List': 'sizes_list.md' +- 'Subaccounts': + - 'Get': 'subaccounts_get.md' + - 'Insert': 'subaccounts_insert.md' + - 'List': 'subaccounts_list.md' + - 'Patch': 'subaccounts_patch.md' + - 'Update': 'subaccounts_update.md' +- 'Targetable Remarketing Lists': + - 'Get': 'targetable-remarketing-lists_get.md' + - 'List': 'targetable-remarketing-lists_list.md' +- 'Targeting Templates': + - 'Get': 'targeting-templates_get.md' + - 'Insert': 'targeting-templates_insert.md' + - 'List': 'targeting-templates_list.md' + - 'Patch': 'targeting-templates_patch.md' + - 'Update': 'targeting-templates_update.md' +- 'User Profiles': + - 'Get': 'user-profiles_get.md' + - 'List': 'user-profiles_list.md' +- 'User Role Permission Groups': + - 'Get': 'user-role-permission-groups_get.md' + - 'List': 'user-role-permission-groups_list.md' +- 'User Role Permissions': + - 'Get': 'user-role-permissions_get.md' + - 'List': 'user-role-permissions_list.md' +- 'User Roles': + - 'Delete': 'user-roles_delete.md' + - 'Get': 'user-roles_get.md' + - 'Insert': 'user-roles_insert.md' + - 'List': 'user-roles_list.md' + - 'Patch': 'user-roles_patch.md' + - 'Update': 'user-roles_update.md' +- 'Video Formats': + - 'Get': 'video-formats_get.md' + - 'List': 'video-formats_list.md' theme: readthedocs diff --git a/gen/dfareporting2d8-cli/src/client.rs b/gen/dfareporting2d8-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dfareporting2d8-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dfareporting2d8-cli/src/main.rs b/gen/dfareporting2d8-cli/src/main.rs index a5419eb699..7367c913ab 100644 --- a/gen/dfareporting2d8-cli/src/main.rs +++ b/gen/dfareporting2d8-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dfareporting2d8::{api, Error, oauth2}; +use google_dfareporting2d8::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -478,10 +477,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-role-id" => { - call = call.user_role_id(value.unwrap_or("")); + call = call.user_role_id( value.map(|v| arg_from_str(v, err, "user-role-id", "int64")).unwrap_or(-0)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -496,13 +495,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -838,13 +837,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1317,10 +1316,10 @@ where call = call.add_type(value.unwrap_or("")); }, "ssl-required" => { - call = call.ssl_required(arg_from_str(value.unwrap_or("false"), err, "ssl-required", "boolean")); + call = call.ssl_required( value.map(|v| arg_from_str(v, err, "ssl-required", "boolean")).unwrap_or(false)); }, "ssl-compliant" => { - call = call.ssl_compliant(arg_from_str(value.unwrap_or("false"), err, "ssl-compliant", "boolean")); + call = call.ssl_compliant( value.map(|v| arg_from_str(v, err, "ssl-compliant", "boolean")).unwrap_or(false)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -1329,58 +1328,58 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "remarketing-list-ids" => { - call = call.add_remarketing_list_ids(value.unwrap_or("")); + call = call.add_remarketing_list_ids( value.map(|v| arg_from_str(v, err, "remarketing-list-ids", "int64")).unwrap_or(-0)); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "landing-page-ids" => { - call = call.add_landing_page_ids(value.unwrap_or("")); + call = call.add_landing_page_ids( value.map(|v| arg_from_str(v, err, "landing-page-ids", "int64")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dynamic-click-tracker" => { - call = call.dynamic_click_tracker(arg_from_str(value.unwrap_or("false"), err, "dynamic-click-tracker", "boolean")); + call = call.dynamic_click_tracker( value.map(|v| arg_from_str(v, err, "dynamic-click-tracker", "boolean")).unwrap_or(false)); }, "creative-optimization-configuration-ids" => { - call = call.add_creative_optimization_configuration_ids(value.unwrap_or("")); + call = call.add_creative_optimization_configuration_ids( value.map(|v| arg_from_str(v, err, "creative-optimization-configuration-ids", "int64")).unwrap_or(-0)); }, "creative-ids" => { - call = call.add_creative_ids(value.unwrap_or("")); + call = call.add_creative_ids( value.map(|v| arg_from_str(v, err, "creative-ids", "int64")).unwrap_or(-0)); }, "compatibility" => { call = call.compatibility(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "audience-segment-ids" => { - call = call.add_audience_segment_ids(value.unwrap_or("")); + call = call.add_audience_segment_ids( value.map(|v| arg_from_str(v, err, "audience-segment-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1932,10 +1931,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2328,7 +2327,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "status" => { call = call.status(value.unwrap_or("")); @@ -2346,22 +2345,22 @@ where call = call.page_token(value.unwrap_or("")); }, "only-parent" => { - call = call.only_parent(arg_from_str(value.unwrap_or("false"), err, "only-parent", "boolean")); + call = call.only_parent( value.map(|v| arg_from_str(v, err, "only-parent", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-advertisers-without-groups-only" => { - call = call.include_advertisers_without_groups_only(arg_from_str(value.unwrap_or("false"), err, "include-advertisers-without-groups-only", "boolean")); + call = call.include_advertisers_without_groups_only( value.map(|v| arg_from_str(v, err, "include-advertisers-without-groups-only", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-ids" => { - call = call.add_floodlight_configuration_ids(value.unwrap_or("")); + call = call.add_floodlight_configuration_ids( value.map(|v| arg_from_str(v, err, "floodlight-configuration-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2779,7 +2778,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3014,7 +3013,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -3029,28 +3028,28 @@ where call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "excluded-ids" => { - call = call.add_excluded_ids(value.unwrap_or("")); + call = call.add_excluded_ids( value.map(|v| arg_from_str(v, err, "excluded-ids", "int64")).unwrap_or(-0)); }, "at-least-one-optimization-activity" => { - call = call.at_least_one_optimization_activity(arg_from_str(value.unwrap_or("false"), err, "at-least-one-optimization-activity", "boolean")); + call = call.at_least_one_optimization_activity( value.map(|v| arg_from_str(v, err, "at-least-one-optimization-activity", "boolean")).unwrap_or(false)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3412,7 +3411,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-profile-ids" => { - call = call.add_user_profile_ids(value.unwrap_or("")); + call = call.add_user_profile_ids( value.map(|v| arg_from_str(v, err, "user-profile-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -3424,19 +3423,19 @@ where call = call.object_type(value.unwrap_or("")); }, "object-ids" => { - call = call.add_object_ids(value.unwrap_or("")); + call = call.add_object_ids( value.map(|v| arg_from_str(v, err, "object-ids", "int64")).unwrap_or(-0)); }, "min-change-time" => { call = call.min_change_time(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-change-time" => { call = call.max_change_time(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "action" => { call = call.action(value.unwrap_or("")); @@ -3495,16 +3494,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "region-dart-ids" => { - call = call.add_region_dart_ids(value.unwrap_or("")); + call = call.add_region_dart_ids( value.map(|v| arg_from_str(v, err, "region-dart-ids", "int64")).unwrap_or(-0)); }, "name-prefix" => { call = call.name_prefix(value.unwrap_or("")); }, "dart-ids" => { - call = call.add_dart_ids(value.unwrap_or("")); + call = call.add_dart_ids( value.map(|v| arg_from_str(v, err, "dart-ids", "int64")).unwrap_or(-0)); }, "country-dart-ids" => { - call = call.add_country_dart_ids(value.unwrap_or("")); + call = call.add_country_dart_ids( value.map(|v| arg_from_str(v, err, "country-dart-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3860,10 +3859,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4671,10 +4670,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5108,13 +5107,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5523,16 +5522,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-number" => { - call = call.group_number(arg_from_str(value.unwrap_or("-0"), err, "group-number", "integer")); + call = call.group_number( value.map(|v| arg_from_str(v, err, "group-number", "int32")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5999,7 +5998,7 @@ where call = call.add_types(value.unwrap_or("")); }, "studio-creative-id" => { - call = call.studio_creative_id(value.unwrap_or("")); + call = call.studio_creative_id( value.map(|v| arg_from_str(v, err, "studio-creative-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -6008,40 +6007,40 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "rendering-ids" => { - call = call.add_rendering_ids(value.unwrap_or("")); + call = call.add_rendering_ids( value.map(|v| arg_from_str(v, err, "rendering-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "creative-field-ids" => { - call = call.add_creative_field_ids(value.unwrap_or("")); + call = call.add_creative_field_ids( value.map(|v| arg_from_str(v, err, "creative-field-ids", "int64")).unwrap_or(-0)); }, "companion-creative-ids" => { - call = call.add_companion_creative_ids(value.unwrap_or("")); + call = call.add_companion_creative_ids( value.map(|v| arg_from_str(v, err, "companion-creative-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6460,7 +6459,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6580,13 +6579,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6815,34 +6814,34 @@ where call = call.search_string(value.unwrap_or("")); }, "parent-id" => { - call = call.parent_id(value.unwrap_or("")); + call = call.parent_id( value.map(|v| arg_from_str(v, err, "parent-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dfp-network-code" => { call = call.dfp_network_code(value.unwrap_or("")); }, "country-id" => { - call = call.country_id(value.unwrap_or("")); + call = call.country_id( value.map(|v| arg_from_str(v, err, "country-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7033,13 +7032,13 @@ where call = call.object_type(value.unwrap_or("")); }, "object-id" => { - call = call.object_id(value.unwrap_or("")); + call = call.object_id( value.map(|v| arg_from_str(v, err, "object-id", "int64")).unwrap_or(-0)); }, "names" => { call = call.add_names(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7312,25 +7311,25 @@ where call = call.search_string(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "event-tag-types" => { call = call.add_event_tag_types(value.unwrap_or("")); }, "enabled" => { - call = call.enabled(arg_from_str(value.unwrap_or("false"), err, "enabled", "boolean")); + call = call.enabled( value.map(|v| arg_from_str(v, err, "enabled", "boolean")).unwrap_or(false)); }, "definitions-only" => { - call = call.definitions_only(arg_from_str(value.unwrap_or("false"), err, "definitions-only", "boolean")); + call = call.definitions_only( value.map(|v| arg_from_str(v, err, "definitions-only", "boolean")).unwrap_or(false)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "ad-id" => { - call = call.ad_id(value.unwrap_or("")); + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7684,7 +7683,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7784,7 +7783,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8032,13 +8031,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "floodlight-activity-group-type" => { call = call.floodlight_activity_group_type(value.unwrap_or("")); @@ -8050,10 +8049,10 @@ where call = call.floodlight_activity_group_name(value.unwrap_or("")); }, "floodlight-activity-group-ids" => { - call = call.add_floodlight_activity_group_ids(value.unwrap_or("")); + call = call.add_floodlight_activity_group_ids( value.map(|v| arg_from_str(v, err, "floodlight-activity-group-ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8537,16 +8536,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8876,7 +8875,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9217,22 +9216,22 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "in-plan" => { - call = call.in_plan(arg_from_str(value.unwrap_or("false"), err, "in-plan", "boolean")); + call = call.in_plan( value.map(|v| arg_from_str(v, err, "in-plan", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10177,7 +10176,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10186,16 +10185,16 @@ where call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10309,7 +10308,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10318,10 +10317,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10582,7 +10581,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10591,7 +10590,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "placement-group-type" => { call = call.placement_group_type(value.unwrap_or("")); @@ -10609,28 +10608,28 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11176,10 +11175,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11414,10 +11413,10 @@ where call = call.add_tag_formats(value.unwrap_or("")); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11711,10 +11710,10 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -11723,7 +11722,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "payment-source" => { call = call.payment_source(value.unwrap_or("")); @@ -11741,34 +11740,34 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-ids" => { - call = call.add_group_ids(value.unwrap_or("")); + call = call.add_group_ids( value.map(|v| arg_from_str(v, err, "group-ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "compatibilities" => { call = call.add_compatibilities(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12456,13 +12455,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12965,13 +12964,13 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -13513,7 +13512,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13798,7 +13797,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14019,7 +14018,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "synchronous" => { - call = call.synchronous(arg_from_str(value.unwrap_or("false"), err, "synchronous", "boolean")); + call = call.synchronous( value.map(|v| arg_from_str(v, err, "synchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14409,10 +14408,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unmapped-site" => { - call = call.unmapped_site(arg_from_str(value.unwrap_or("false"), err, "unmapped-site", "boolean")); + call = call.unmapped_site( value.map(|v| arg_from_str(v, err, "unmapped-site", "boolean")).unwrap_or(false)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -14427,31 +14426,31 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, "ad-words-site" => { - call = call.ad_words_site(arg_from_str(value.unwrap_or("false"), err, "ad-words-site", "boolean")); + call = call.ad_words_site( value.map(|v| arg_from_str(v, err, "ad-words-site", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14882,16 +14881,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "width" => { - call = call.width(arg_from_str(value.unwrap_or("-0"), err, "width", "integer")); + call = call.width( value.map(|v| arg_from_str(v, err, "width", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "iab-standard" => { - call = call.iab_standard(arg_from_str(value.unwrap_or("false"), err, "iab-standard", "boolean")); + call = call.iab_standard( value.map(|v| arg_from_str(v, err, "iab-standard", "boolean")).unwrap_or(false)); }, "height" => { - call = call.height(arg_from_str(value.unwrap_or("-0"), err, "height", "integer")); + call = call.height( value.map(|v| arg_from_str(v, err, "height", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15100,10 +15099,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15401,10 +15400,10 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15626,13 +15625,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16152,7 +16151,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16395,7 +16394,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -16410,13 +16409,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "account-user-role-only" => { - call = call.account_user_role_only(arg_from_str(value.unwrap_or("false"), err, "account-user-role-only", "boolean")); + call = call.account_user_role_only( value.map(|v| arg_from_str(v, err, "account-user-role-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -23763,7 +23762,7 @@ async fn main() { let mut app = App::new("dfareporting2d8") .author("Sebastian Thiel ") - .version("4.0.1+20180830") + .version("5.0.2+20180830") .about("Manages your DoubleClick Campaign Manager ad campaigns and reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dfareporting2d8_cli") .arg(Arg::with_name("url") diff --git a/gen/dfareporting2d8/Cargo.toml b/gen/dfareporting2d8/Cargo.toml index dbe500dd9a..e8f1d73db2 100644 --- a/gen/dfareporting2d8/Cargo.toml +++ b/gen/dfareporting2d8/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dfareporting2d8" -version = "5.0.2-beta-1+20180830" +version = "5.0.2+20180830" authors = ["Sebastian Thiel "] description = "A complete library to interact with dfareporting (protocol v2.8)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting2d8" homepage = "https://developers.google.com/doubleclick-advertisers/" -documentation = "https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830" +documentation = "https://docs.rs/google-dfareporting2d8/5.0.2+20180830" license = "MIT" keywords = ["dfareporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dfareporting2d8/README.md b/gen/dfareporting2d8/README.md index 01e50682b8..6ac80fc93f 100644 --- a/gen/dfareporting2d8/README.md +++ b/gen/dfareporting2d8/README.md @@ -5,144 +5,144 @@ DO NOT EDIT ! --> The `google-dfareporting2d8` library allows access to all features of the *Google dfareporting* service. -This documentation was generated from *dfareporting* crate version *5.0.2-beta-1+20180830*, where *20180830* is the exact revision of the *dfareporting:v2.8* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *dfareporting* crate version *5.0.2+20180830*, where *20180830* is the exact revision of the *dfareporting:v2.8* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *dfareporting* *v2d8* API can be found at the [official documentation site](https://developers.google.com/doubleclick-advertisers/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/Dfareporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/Dfareporting) ... -* [account active ad summaries](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountActiveAdSummary) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountActiveAdSummaryGetCall) -* [account permission groups](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountPermissionGroup) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountPermissionGroupListCall) -* [account permissions](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountPermission) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountPermissionListCall) -* [account user profiles](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountUserProfile) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountUserProfileUpdateCall) -* [accounts](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Account) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AccountUpdateCall) -* [ads](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Ad) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdUpdateCall) -* [advertiser groups](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGroup) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGroupUpdateCall) -* [advertisers](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Advertiser) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::AdvertiserUpdateCall) -* [browsers](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Browser) - * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::BrowserListCall) -* [campaign creative associations](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignCreativeAssociation) - * [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignCreativeAssociationListCall) -* [campaigns](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Campaign) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CampaignUpdateCall) -* [change logs](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ChangeLog) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ChangeLogListCall) -* [cities](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::City) - * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CityListCall) -* [connection types](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ConnectionType) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ConnectionTypeListCall) -* [content categories](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ContentCategory) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ContentCategoryUpdateCall) -* [conversions](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Conversion) - * [*batchinsert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ConversionBatchupdateCall) -* [countries](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Country) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CountryListCall) -* [creative assets](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeAsset) - * [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeAssetInsertCall) -* [creative field values](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldValue) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldValueUpdateCall) -* [creative fields](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeField) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeFieldUpdateCall) -* [creative groups](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeGroup) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeGroupUpdateCall) -* [creatives](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Creative) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeUpdateCall) -* [dimension values](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DimensionValue) - * [*query*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DimensionValueQueryCall) -* [directory site contacts](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DirectorySiteContact) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DirectorySiteContactGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DirectorySiteContactListCall) -* [directory sites](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DirectorySite) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DirectorySiteListCall) -* [dynamic targeting keys](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DynamicTargetingKey) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::DynamicTargetingKeyListCall) -* [event tags](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::EventTag) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::EventTagUpdateCall) -* [files](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::File) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FileListCall) -* [floodlight activities](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivity) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityUpdateCall) -* [floodlight activity groups](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGroup) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightActivityGroupUpdateCall) -* [floodlight configurations](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightConfiguration) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FloodlightConfigurationUpdateCall) -* [inventory items](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::InventoryItem) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::InventoryItemListCall) -* [landing pages](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LandingPage) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LandingPageDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LandingPageListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LandingPageUpdateCall) -* [languages](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Language) - * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::LanguageListCall) -* [metros](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Metro) - * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::MetroListCall) -* [mobile carriers](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::MobileCarrier) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::MobileCarrierListCall) -* [operating system versions](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OperatingSystemVersion) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OperatingSystemVersionListCall) -* [operating systems](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OperatingSystem) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OperatingSystemListCall) -* [order documents](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OrderDocument) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OrderDocumentListCall) -* [orders](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Order) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::OrderListCall) -* [placement groups](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGroup) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGroupUpdateCall) -* [placement strategies](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementStrategy) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementStrategyUpdateCall) -* [placements](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Placement) - * [*generatetags*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlacementUpdateCall) -* [platform types](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlatformType) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PlatformTypeListCall) -* [postal codes](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PostalCode) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::PostalCodeListCall) -* [projects](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Project) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ProjectListCall) -* [regions](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Region) - * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RegionListCall) -* [remarketing list shares](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListShare) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListShareUpdateCall) -* [remarketing lists](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingList) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::RemarketingListUpdateCall) -* [reports](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Report) - * [*compatible fields query*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportUpdateCall) -* [sites](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Site) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SiteUpdateCall) -* [sizes](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Size) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SizeListCall) -* [subaccounts](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::Subaccount) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::SubaccountUpdateCall) -* [targetable remarketing lists](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetableRemarketingList) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetableRemarketingListListCall) -* [targeting templates](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetingTemplate) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::TargetingTemplateUpdateCall) -* [user profiles](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserProfile) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserProfileListCall) -* [user role permission groups](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRolePermissionGroup) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRolePermissionGroupListCall) -* [user role permissions](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRolePermission) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRolePermissionListCall) -* [user roles](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRole) - * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::UserRoleUpdateCall) -* [video formats](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::VideoFormat) - * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::VideoFormatListCall) +* [account active ad summaries](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountActiveAdSummary) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountActiveAdSummaryGetCall) +* [account permission groups](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountPermissionGroup) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountPermissionGroupListCall) +* [account permissions](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountPermission) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountPermissionListCall) +* [account user profiles](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountUserProfile) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountUserProfileUpdateCall) +* [accounts](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Account) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AccountUpdateCall) +* [ads](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Ad) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdUpdateCall) +* [advertiser groups](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGroup) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGroupUpdateCall) +* [advertisers](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Advertiser) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::AdvertiserUpdateCall) +* [browsers](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Browser) + * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::BrowserListCall) +* [campaign creative associations](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignCreativeAssociation) + * [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignCreativeAssociationListCall) +* [campaigns](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Campaign) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CampaignUpdateCall) +* [change logs](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ChangeLog) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ChangeLogListCall) +* [cities](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::City) + * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CityListCall) +* [connection types](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ConnectionType) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ConnectionTypeListCall) +* [content categories](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ContentCategory) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ContentCategoryUpdateCall) +* [conversions](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Conversion) + * [*batchinsert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ConversionBatchupdateCall) +* [countries](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Country) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CountryListCall) +* [creative assets](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeAsset) + * [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeAssetInsertCall) +* [creative field values](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldValue) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldValueUpdateCall) +* [creative fields](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeField) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeFieldUpdateCall) +* [creative groups](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeGroup) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeGroupUpdateCall) +* [creatives](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Creative) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeUpdateCall) +* [dimension values](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DimensionValue) + * [*query*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DimensionValueQueryCall) +* [directory site contacts](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DirectorySiteContact) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DirectorySiteContactGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DirectorySiteContactListCall) +* [directory sites](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DirectorySite) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DirectorySiteListCall) +* [dynamic targeting keys](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DynamicTargetingKey) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::DynamicTargetingKeyListCall) +* [event tags](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::EventTag) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::EventTagUpdateCall) +* [files](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::File) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FileListCall) +* [floodlight activities](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivity) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityUpdateCall) +* [floodlight activity groups](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGroup) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightActivityGroupUpdateCall) +* [floodlight configurations](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightConfiguration) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FloodlightConfigurationUpdateCall) +* [inventory items](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::InventoryItem) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::InventoryItemListCall) +* [landing pages](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LandingPage) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LandingPageDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LandingPageListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LandingPageUpdateCall) +* [languages](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Language) + * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::LanguageListCall) +* [metros](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Metro) + * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::MetroListCall) +* [mobile carriers](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::MobileCarrier) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::MobileCarrierListCall) +* [operating system versions](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OperatingSystemVersion) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OperatingSystemVersionListCall) +* [operating systems](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OperatingSystem) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OperatingSystemListCall) +* [order documents](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OrderDocument) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OrderDocumentListCall) +* [orders](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Order) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::OrderListCall) +* [placement groups](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGroup) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGroupUpdateCall) +* [placement strategies](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementStrategy) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementStrategyUpdateCall) +* [placements](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Placement) + * [*generatetags*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlacementUpdateCall) +* [platform types](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlatformType) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PlatformTypeListCall) +* [postal codes](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PostalCode) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::PostalCodeListCall) +* [projects](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Project) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ProjectListCall) +* [regions](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Region) + * [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RegionListCall) +* [remarketing list shares](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListShare) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListShareUpdateCall) +* [remarketing lists](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingList) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::RemarketingListUpdateCall) +* [reports](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Report) + * [*compatible fields query*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportUpdateCall) +* [sites](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Site) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SiteUpdateCall) +* [sizes](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Size) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SizeListCall) +* [subaccounts](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::Subaccount) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::SubaccountUpdateCall) +* [targetable remarketing lists](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetableRemarketingList) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetableRemarketingListListCall) +* [targeting templates](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetingTemplate) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::TargetingTemplateUpdateCall) +* [user profiles](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserProfile) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserProfileListCall) +* [user role permission groups](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRolePermissionGroup) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRolePermissionGroupListCall) +* [user role permissions](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRolePermission) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRolePermissionListCall) +* [user roles](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRole) + * [*delete*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::UserRoleUpdateCall) +* [video formats](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::VideoFormat) + * [*get*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::VideoFormatListCall) Upload supported by ... -* [*insert creative assets*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::CreativeAssetInsertCall) +* [*insert creative assets*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::CreativeAssetInsertCall) Download supported by ... -* [*get files*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::FileGetCall) -* [*files get reports*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/api::ReportFileGetCall) +* [*get files*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::FileGetCall) +* [*files get reports*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/api::ReportFileGetCall) @@ -150,17 +150,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/Dfareporting)** +* **[Hub](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/Dfareporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::CallBuilder) -* **[Resources](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::CallBuilder) +* **[Resources](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Part)** + * **[Parts](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -258,17 +258,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -278,29 +278,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Delegate) to the -[Method Builder](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Delegate) to the +[Method Builder](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::RequestValue) and -[decodable](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::RequestValue) and +[decodable](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dfareporting2d8/5.0.2-beta-1+20180830/google_dfareporting2d8/client::RequestValue) are moved +* [request values](https://docs.rs/google-dfareporting2d8/5.0.2+20180830/google_dfareporting2d8/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dfareporting2d8/src/api.rs b/gen/dfareporting2d8/src/api.rs index 449d35660c..53471451b1 100644 --- a/gen/dfareporting2d8/src/api.rs +++ b/gen/dfareporting2d8/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> Dfareporting { Dfareporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/dfareporting/v2.8/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -319,7 +319,7 @@ impl<'a, S> Dfareporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dfareporting2d8/src/client.rs b/gen/dfareporting2d8/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dfareporting2d8/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dfareporting2d8/src/lib.rs b/gen/dfareporting2d8/src/lib.rs index 09c493684c..d6e156d6e0 100644 --- a/gen/dfareporting2d8/src/lib.rs +++ b/gen/dfareporting2d8/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *dfareporting* crate version *5.0.2-beta-1+20180830*, where *20180830* is the exact revision of the *dfareporting:v2.8* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *dfareporting* crate version *5.0.2+20180830*, where *20180830* is the exact revision of the *dfareporting:v2.8* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *dfareporting* *v2d8* API can be found at the //! [official documentation site](https://developers.google.com/doubleclick-advertisers/). diff --git a/gen/dfareporting3-cli/Cargo.toml b/gen/dfareporting3-cli/Cargo.toml index 7e9c9a5f60..1a6d0fd322 100644 --- a/gen/dfareporting3-cli/Cargo.toml +++ b/gen/dfareporting3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dfareporting3-cli" -version = "4.0.1+20180830" +version = "5.0.2+20180830" authors = ["Sebastian Thiel "] description = "A complete library to interact with dfareporting (protocol v3.0)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3-cli" @@ -20,13 +20,13 @@ name = "dfareporting3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dfareporting3] path = "../dfareporting3" -version = "4.0.1+20180830" +version = "5.0.2+20180830" + diff --git a/gen/dfareporting3-cli/README.md b/gen/dfareporting3-cli/README.md index 3150ae842a..837dbc7eb0 100644 --- a/gen/dfareporting3-cli/README.md +++ b/gen/dfareporting3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *dfareporting* API at revision *20180830*. The CLI is at version *4.0.1*. +This documentation was generated from the *dfareporting* API at revision *20180830*. The CLI is at version *5.0.2*. ```bash dfareporting3 [options] diff --git a/gen/dfareporting3-cli/mkdocs.yml b/gen/dfareporting3-cli/mkdocs.yml index 0196e89ebb..4e625a19b2 100644 --- a/gen/dfareporting3-cli/mkdocs.yml +++ b/gen/dfareporting3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: dfareporting v4.0.1+20180830 +site_name: dfareporting v5.0.2+20180830 site_url: http://byron.github.io/google-apis-rs/google-dfareporting3-cli site_description: A complete library to interact with dfareporting (protocol v3.0) @@ -7,212 +7,272 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-active-ad-summaries_get.md', 'Account Active Ad Summaries', 'Get'] -- ['account-permission-groups_get.md', 'Account Permission Groups', 'Get'] -- ['account-permission-groups_list.md', 'Account Permission Groups', 'List'] -- ['account-permissions_get.md', 'Account Permissions', 'Get'] -- ['account-permissions_list.md', 'Account Permissions', 'List'] -- ['account-user-profiles_get.md', 'Account User Profiles', 'Get'] -- ['account-user-profiles_insert.md', 'Account User Profiles', 'Insert'] -- ['account-user-profiles_list.md', 'Account User Profiles', 'List'] -- ['account-user-profiles_patch.md', 'Account User Profiles', 'Patch'] -- ['account-user-profiles_update.md', 'Account User Profiles', 'Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['ads_get.md', 'Ads', 'Get'] -- ['ads_insert.md', 'Ads', 'Insert'] -- ['ads_list.md', 'Ads', 'List'] -- ['ads_patch.md', 'Ads', 'Patch'] -- ['ads_update.md', 'Ads', 'Update'] -- ['advertiser-groups_delete.md', 'Advertiser Groups', 'Delete'] -- ['advertiser-groups_get.md', 'Advertiser Groups', 'Get'] -- ['advertiser-groups_insert.md', 'Advertiser Groups', 'Insert'] -- ['advertiser-groups_list.md', 'Advertiser Groups', 'List'] -- ['advertiser-groups_patch.md', 'Advertiser Groups', 'Patch'] -- ['advertiser-groups_update.md', 'Advertiser Groups', 'Update'] -- ['advertiser-landing-pages_get.md', 'Advertiser Landing Pages', 'Get'] -- ['advertiser-landing-pages_insert.md', 'Advertiser Landing Pages', 'Insert'] -- ['advertiser-landing-pages_list.md', 'Advertiser Landing Pages', 'List'] -- ['advertiser-landing-pages_patch.md', 'Advertiser Landing Pages', 'Patch'] -- ['advertiser-landing-pages_update.md', 'Advertiser Landing Pages', 'Update'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_insert.md', 'Advertisers', 'Insert'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['advertisers_patch.md', 'Advertisers', 'Patch'] -- ['advertisers_update.md', 'Advertisers', 'Update'] -- ['browsers_list.md', 'Browsers', 'List'] -- ['campaign-creative-associations_insert.md', 'Campaign Creative Associations', 'Insert'] -- ['campaign-creative-associations_list.md', 'Campaign Creative Associations', 'List'] -- ['campaigns_get.md', 'Campaigns', 'Get'] -- ['campaigns_insert.md', 'Campaigns', 'Insert'] -- ['campaigns_list.md', 'Campaigns', 'List'] -- ['campaigns_patch.md', 'Campaigns', 'Patch'] -- ['campaigns_update.md', 'Campaigns', 'Update'] -- ['change-logs_get.md', 'Change Logs', 'Get'] -- ['change-logs_list.md', 'Change Logs', 'List'] -- ['cities_list.md', 'Cities', 'List'] -- ['connection-types_get.md', 'Connection Types', 'Get'] -- ['connection-types_list.md', 'Connection Types', 'List'] -- ['content-categories_delete.md', 'Content Categories', 'Delete'] -- ['content-categories_get.md', 'Content Categories', 'Get'] -- ['content-categories_insert.md', 'Content Categories', 'Insert'] -- ['content-categories_list.md', 'Content Categories', 'List'] -- ['content-categories_patch.md', 'Content Categories', 'Patch'] -- ['content-categories_update.md', 'Content Categories', 'Update'] -- ['conversions_batchinsert.md', 'Conversions', 'Batchinsert'] -- ['conversions_batchupdate.md', 'Conversions', 'Batchupdate'] -- ['countries_get.md', 'Countries', 'Get'] -- ['countries_list.md', 'Countries', 'List'] -- ['creative-assets_insert.md', 'Creative Assets', 'Insert'] -- ['creative-field-values_delete.md', 'Creative Field Values', 'Delete'] -- ['creative-field-values_get.md', 'Creative Field Values', 'Get'] -- ['creative-field-values_insert.md', 'Creative Field Values', 'Insert'] -- ['creative-field-values_list.md', 'Creative Field Values', 'List'] -- ['creative-field-values_patch.md', 'Creative Field Values', 'Patch'] -- ['creative-field-values_update.md', 'Creative Field Values', 'Update'] -- ['creative-fields_delete.md', 'Creative Fields', 'Delete'] -- ['creative-fields_get.md', 'Creative Fields', 'Get'] -- ['creative-fields_insert.md', 'Creative Fields', 'Insert'] -- ['creative-fields_list.md', 'Creative Fields', 'List'] -- ['creative-fields_patch.md', 'Creative Fields', 'Patch'] -- ['creative-fields_update.md', 'Creative Fields', 'Update'] -- ['creative-groups_get.md', 'Creative Groups', 'Get'] -- ['creative-groups_insert.md', 'Creative Groups', 'Insert'] -- ['creative-groups_list.md', 'Creative Groups', 'List'] -- ['creative-groups_patch.md', 'Creative Groups', 'Patch'] -- ['creative-groups_update.md', 'Creative Groups', 'Update'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['creatives_patch.md', 'Creatives', 'Patch'] -- ['creatives_update.md', 'Creatives', 'Update'] -- ['dimension-values_query.md', 'Dimension Values', 'Query'] -- ['directory-site-contacts_get.md', 'Directory Site Contacts', 'Get'] -- ['directory-site-contacts_list.md', 'Directory Site Contacts', 'List'] -- ['directory-sites_get.md', 'Directory Sites', 'Get'] -- ['directory-sites_insert.md', 'Directory Sites', 'Insert'] -- ['directory-sites_list.md', 'Directory Sites', 'List'] -- ['dynamic-targeting-keys_delete.md', 'Dynamic Targeting Keys', 'Delete'] -- ['dynamic-targeting-keys_insert.md', 'Dynamic Targeting Keys', 'Insert'] -- ['dynamic-targeting-keys_list.md', 'Dynamic Targeting Keys', 'List'] -- ['event-tags_delete.md', 'Event Tags', 'Delete'] -- ['event-tags_get.md', 'Event Tags', 'Get'] -- ['event-tags_insert.md', 'Event Tags', 'Insert'] -- ['event-tags_list.md', 'Event Tags', 'List'] -- ['event-tags_patch.md', 'Event Tags', 'Patch'] -- ['event-tags_update.md', 'Event Tags', 'Update'] -- ['files_get.md', 'Files', 'Get'] -- ['files_list.md', 'Files', 'List'] -- ['floodlight-activities_delete.md', 'Floodlight Activities', 'Delete'] -- ['floodlight-activities_generatetag.md', 'Floodlight Activities', 'Generatetag'] -- ['floodlight-activities_get.md', 'Floodlight Activities', 'Get'] -- ['floodlight-activities_insert.md', 'Floodlight Activities', 'Insert'] -- ['floodlight-activities_list.md', 'Floodlight Activities', 'List'] -- ['floodlight-activities_patch.md', 'Floodlight Activities', 'Patch'] -- ['floodlight-activities_update.md', 'Floodlight Activities', 'Update'] -- ['floodlight-activity-groups_get.md', 'Floodlight Activity Groups', 'Get'] -- ['floodlight-activity-groups_insert.md', 'Floodlight Activity Groups', 'Insert'] -- ['floodlight-activity-groups_list.md', 'Floodlight Activity Groups', 'List'] -- ['floodlight-activity-groups_patch.md', 'Floodlight Activity Groups', 'Patch'] -- ['floodlight-activity-groups_update.md', 'Floodlight Activity Groups', 'Update'] -- ['floodlight-configurations_get.md', 'Floodlight Configurations', 'Get'] -- ['floodlight-configurations_list.md', 'Floodlight Configurations', 'List'] -- ['floodlight-configurations_patch.md', 'Floodlight Configurations', 'Patch'] -- ['floodlight-configurations_update.md', 'Floodlight Configurations', 'Update'] -- ['inventory-items_get.md', 'Inventory Items', 'Get'] -- ['inventory-items_list.md', 'Inventory Items', 'List'] -- ['languages_list.md', 'Languages', 'List'] -- ['metros_list.md', 'Metros', 'List'] -- ['mobile-carriers_get.md', 'Mobile Carriers', 'Get'] -- ['mobile-carriers_list.md', 'Mobile Carriers', 'List'] -- ['operating-system-versions_get.md', 'Operating System Versions', 'Get'] -- ['operating-system-versions_list.md', 'Operating System Versions', 'List'] -- ['operating-systems_get.md', 'Operating Systems', 'Get'] -- ['operating-systems_list.md', 'Operating Systems', 'List'] -- ['order-documents_get.md', 'Order Documents', 'Get'] -- ['order-documents_list.md', 'Order Documents', 'List'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_list.md', 'Orders', 'List'] -- ['placement-groups_get.md', 'Placement Groups', 'Get'] -- ['placement-groups_insert.md', 'Placement Groups', 'Insert'] -- ['placement-groups_list.md', 'Placement Groups', 'List'] -- ['placement-groups_patch.md', 'Placement Groups', 'Patch'] -- ['placement-groups_update.md', 'Placement Groups', 'Update'] -- ['placement-strategies_delete.md', 'Placement Strategies', 'Delete'] -- ['placement-strategies_get.md', 'Placement Strategies', 'Get'] -- ['placement-strategies_insert.md', 'Placement Strategies', 'Insert'] -- ['placement-strategies_list.md', 'Placement Strategies', 'List'] -- ['placement-strategies_patch.md', 'Placement Strategies', 'Patch'] -- ['placement-strategies_update.md', 'Placement Strategies', 'Update'] -- ['placements_generatetags.md', 'Placements', 'Generatetags'] -- ['placements_get.md', 'Placements', 'Get'] -- ['placements_insert.md', 'Placements', 'Insert'] -- ['placements_list.md', 'Placements', 'List'] -- ['placements_patch.md', 'Placements', 'Patch'] -- ['placements_update.md', 'Placements', 'Update'] -- ['platform-types_get.md', 'Platform Types', 'Get'] -- ['platform-types_list.md', 'Platform Types', 'List'] -- ['postal-codes_get.md', 'Postal Codes', 'Get'] -- ['postal-codes_list.md', 'Postal Codes', 'List'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_list.md', 'Projects', 'List'] -- ['regions_list.md', 'Regions', 'List'] -- ['remarketing-list-shares_get.md', 'Remarketing List Shares', 'Get'] -- ['remarketing-list-shares_patch.md', 'Remarketing List Shares', 'Patch'] -- ['remarketing-list-shares_update.md', 'Remarketing List Shares', 'Update'] -- ['remarketing-lists_get.md', 'Remarketing Lists', 'Get'] -- ['remarketing-lists_insert.md', 'Remarketing Lists', 'Insert'] -- ['remarketing-lists_list.md', 'Remarketing Lists', 'List'] -- ['remarketing-lists_patch.md', 'Remarketing Lists', 'Patch'] -- ['remarketing-lists_update.md', 'Remarketing Lists', 'Update'] -- ['reports_compatible-fields-query.md', 'Reports', 'Compatible Fields Query'] -- ['reports_delete.md', 'Reports', 'Delete'] -- ['reports_files-get.md', 'Reports', 'Files Get'] -- ['reports_files-list.md', 'Reports', 'Files List'] -- ['reports_get.md', 'Reports', 'Get'] -- ['reports_insert.md', 'Reports', 'Insert'] -- ['reports_list.md', 'Reports', 'List'] -- ['reports_patch.md', 'Reports', 'Patch'] -- ['reports_run.md', 'Reports', 'Run'] -- ['reports_update.md', 'Reports', 'Update'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_insert.md', 'Sites', 'Insert'] -- ['sites_list.md', 'Sites', 'List'] -- ['sites_patch.md', 'Sites', 'Patch'] -- ['sites_update.md', 'Sites', 'Update'] -- ['sizes_get.md', 'Sizes', 'Get'] -- ['sizes_insert.md', 'Sizes', 'Insert'] -- ['sizes_list.md', 'Sizes', 'List'] -- ['subaccounts_get.md', 'Subaccounts', 'Get'] -- ['subaccounts_insert.md', 'Subaccounts', 'Insert'] -- ['subaccounts_list.md', 'Subaccounts', 'List'] -- ['subaccounts_patch.md', 'Subaccounts', 'Patch'] -- ['subaccounts_update.md', 'Subaccounts', 'Update'] -- ['targetable-remarketing-lists_get.md', 'Targetable Remarketing Lists', 'Get'] -- ['targetable-remarketing-lists_list.md', 'Targetable Remarketing Lists', 'List'] -- ['targeting-templates_get.md', 'Targeting Templates', 'Get'] -- ['targeting-templates_insert.md', 'Targeting Templates', 'Insert'] -- ['targeting-templates_list.md', 'Targeting Templates', 'List'] -- ['targeting-templates_patch.md', 'Targeting Templates', 'Patch'] -- ['targeting-templates_update.md', 'Targeting Templates', 'Update'] -- ['user-profiles_get.md', 'User Profiles', 'Get'] -- ['user-profiles_list.md', 'User Profiles', 'List'] -- ['user-role-permission-groups_get.md', 'User Role Permission Groups', 'Get'] -- ['user-role-permission-groups_list.md', 'User Role Permission Groups', 'List'] -- ['user-role-permissions_get.md', 'User Role Permissions', 'Get'] -- ['user-role-permissions_list.md', 'User Role Permissions', 'List'] -- ['user-roles_delete.md', 'User Roles', 'Delete'] -- ['user-roles_get.md', 'User Roles', 'Get'] -- ['user-roles_insert.md', 'User Roles', 'Insert'] -- ['user-roles_list.md', 'User Roles', 'List'] -- ['user-roles_patch.md', 'User Roles', 'Patch'] -- ['user-roles_update.md', 'User Roles', 'Update'] -- ['video-formats_get.md', 'Video Formats', 'Get'] -- ['video-formats_list.md', 'Video Formats', 'List'] +nav: +- Home: 'index.md' +- 'Account Active Ad Summaries': + - 'Get': 'account-active-ad-summaries_get.md' +- 'Account Permission Groups': + - 'Get': 'account-permission-groups_get.md' + - 'List': 'account-permission-groups_list.md' +- 'Account Permissions': + - 'Get': 'account-permissions_get.md' + - 'List': 'account-permissions_list.md' +- 'Account User Profiles': + - 'Get': 'account-user-profiles_get.md' + - 'Insert': 'account-user-profiles_insert.md' + - 'List': 'account-user-profiles_list.md' + - 'Patch': 'account-user-profiles_patch.md' + - 'Update': 'account-user-profiles_update.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Ads': + - 'Get': 'ads_get.md' + - 'Insert': 'ads_insert.md' + - 'List': 'ads_list.md' + - 'Patch': 'ads_patch.md' + - 'Update': 'ads_update.md' +- 'Advertiser Groups': + - 'Delete': 'advertiser-groups_delete.md' + - 'Get': 'advertiser-groups_get.md' + - 'Insert': 'advertiser-groups_insert.md' + - 'List': 'advertiser-groups_list.md' + - 'Patch': 'advertiser-groups_patch.md' + - 'Update': 'advertiser-groups_update.md' +- 'Advertiser Landing Pages': + - 'Get': 'advertiser-landing-pages_get.md' + - 'Insert': 'advertiser-landing-pages_insert.md' + - 'List': 'advertiser-landing-pages_list.md' + - 'Patch': 'advertiser-landing-pages_patch.md' + - 'Update': 'advertiser-landing-pages_update.md' +- 'Advertisers': + - 'Get': 'advertisers_get.md' + - 'Insert': 'advertisers_insert.md' + - 'List': 'advertisers_list.md' + - 'Patch': 'advertisers_patch.md' + - 'Update': 'advertisers_update.md' +- 'Browsers': + - 'List': 'browsers_list.md' +- 'Campaign Creative Associations': + - 'Insert': 'campaign-creative-associations_insert.md' + - 'List': 'campaign-creative-associations_list.md' +- 'Campaigns': + - 'Get': 'campaigns_get.md' + - 'Insert': 'campaigns_insert.md' + - 'List': 'campaigns_list.md' + - 'Patch': 'campaigns_patch.md' + - 'Update': 'campaigns_update.md' +- 'Change Logs': + - 'Get': 'change-logs_get.md' + - 'List': 'change-logs_list.md' +- 'Cities': + - 'List': 'cities_list.md' +- 'Connection Types': + - 'Get': 'connection-types_get.md' + - 'List': 'connection-types_list.md' +- 'Content Categories': + - 'Delete': 'content-categories_delete.md' + - 'Get': 'content-categories_get.md' + - 'Insert': 'content-categories_insert.md' + - 'List': 'content-categories_list.md' + - 'Patch': 'content-categories_patch.md' + - 'Update': 'content-categories_update.md' +- 'Conversions': + - 'Batchinsert': 'conversions_batchinsert.md' + - 'Batchupdate': 'conversions_batchupdate.md' +- 'Countries': + - 'Get': 'countries_get.md' + - 'List': 'countries_list.md' +- 'Creative Assets': + - 'Insert': 'creative-assets_insert.md' +- 'Creative Field Values': + - 'Delete': 'creative-field-values_delete.md' + - 'Get': 'creative-field-values_get.md' + - 'Insert': 'creative-field-values_insert.md' + - 'List': 'creative-field-values_list.md' + - 'Patch': 'creative-field-values_patch.md' + - 'Update': 'creative-field-values_update.md' +- 'Creative Fields': + - 'Delete': 'creative-fields_delete.md' + - 'Get': 'creative-fields_get.md' + - 'Insert': 'creative-fields_insert.md' + - 'List': 'creative-fields_list.md' + - 'Patch': 'creative-fields_patch.md' + - 'Update': 'creative-fields_update.md' +- 'Creative Groups': + - 'Get': 'creative-groups_get.md' + - 'Insert': 'creative-groups_insert.md' + - 'List': 'creative-groups_list.md' + - 'Patch': 'creative-groups_patch.md' + - 'Update': 'creative-groups_update.md' +- 'Creatives': + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' + - 'Patch': 'creatives_patch.md' + - 'Update': 'creatives_update.md' +- 'Dimension Values': + - 'Query': 'dimension-values_query.md' +- 'Directory Site Contacts': + - 'Get': 'directory-site-contacts_get.md' + - 'List': 'directory-site-contacts_list.md' +- 'Directory Sites': + - 'Get': 'directory-sites_get.md' + - 'Insert': 'directory-sites_insert.md' + - 'List': 'directory-sites_list.md' +- 'Dynamic Targeting Keys': + - 'Delete': 'dynamic-targeting-keys_delete.md' + - 'Insert': 'dynamic-targeting-keys_insert.md' + - 'List': 'dynamic-targeting-keys_list.md' +- 'Event Tags': + - 'Delete': 'event-tags_delete.md' + - 'Get': 'event-tags_get.md' + - 'Insert': 'event-tags_insert.md' + - 'List': 'event-tags_list.md' + - 'Patch': 'event-tags_patch.md' + - 'Update': 'event-tags_update.md' +- 'Files': + - 'Get': 'files_get.md' + - 'List': 'files_list.md' +- 'Floodlight Activities': + - 'Delete': 'floodlight-activities_delete.md' + - 'Generatetag': 'floodlight-activities_generatetag.md' + - 'Get': 'floodlight-activities_get.md' + - 'Insert': 'floodlight-activities_insert.md' + - 'List': 'floodlight-activities_list.md' + - 'Patch': 'floodlight-activities_patch.md' + - 'Update': 'floodlight-activities_update.md' +- 'Floodlight Activity Groups': + - 'Get': 'floodlight-activity-groups_get.md' + - 'Insert': 'floodlight-activity-groups_insert.md' + - 'List': 'floodlight-activity-groups_list.md' + - 'Patch': 'floodlight-activity-groups_patch.md' + - 'Update': 'floodlight-activity-groups_update.md' +- 'Floodlight Configurations': + - 'Get': 'floodlight-configurations_get.md' + - 'List': 'floodlight-configurations_list.md' + - 'Patch': 'floodlight-configurations_patch.md' + - 'Update': 'floodlight-configurations_update.md' +- 'Inventory Items': + - 'Get': 'inventory-items_get.md' + - 'List': 'inventory-items_list.md' +- 'Languages': + - 'List': 'languages_list.md' +- 'Metros': + - 'List': 'metros_list.md' +- 'Mobile Carriers': + - 'Get': 'mobile-carriers_get.md' + - 'List': 'mobile-carriers_list.md' +- 'Operating System Versions': + - 'Get': 'operating-system-versions_get.md' + - 'List': 'operating-system-versions_list.md' +- 'Operating Systems': + - 'Get': 'operating-systems_get.md' + - 'List': 'operating-systems_list.md' +- 'Order Documents': + - 'Get': 'order-documents_get.md' + - 'List': 'order-documents_list.md' +- 'Orders': + - 'Get': 'orders_get.md' + - 'List': 'orders_list.md' +- 'Placement Groups': + - 'Get': 'placement-groups_get.md' + - 'Insert': 'placement-groups_insert.md' + - 'List': 'placement-groups_list.md' + - 'Patch': 'placement-groups_patch.md' + - 'Update': 'placement-groups_update.md' +- 'Placement Strategies': + - 'Delete': 'placement-strategies_delete.md' + - 'Get': 'placement-strategies_get.md' + - 'Insert': 'placement-strategies_insert.md' + - 'List': 'placement-strategies_list.md' + - 'Patch': 'placement-strategies_patch.md' + - 'Update': 'placement-strategies_update.md' +- 'Placements': + - 'Generatetags': 'placements_generatetags.md' + - 'Get': 'placements_get.md' + - 'Insert': 'placements_insert.md' + - 'List': 'placements_list.md' + - 'Patch': 'placements_patch.md' + - 'Update': 'placements_update.md' +- 'Platform Types': + - 'Get': 'platform-types_get.md' + - 'List': 'platform-types_list.md' +- 'Postal Codes': + - 'Get': 'postal-codes_get.md' + - 'List': 'postal-codes_list.md' +- 'Projects': + - 'Get': 'projects_get.md' + - 'List': 'projects_list.md' +- 'Regions': + - 'List': 'regions_list.md' +- 'Remarketing List Shares': + - 'Get': 'remarketing-list-shares_get.md' + - 'Patch': 'remarketing-list-shares_patch.md' + - 'Update': 'remarketing-list-shares_update.md' +- 'Remarketing Lists': + - 'Get': 'remarketing-lists_get.md' + - 'Insert': 'remarketing-lists_insert.md' + - 'List': 'remarketing-lists_list.md' + - 'Patch': 'remarketing-lists_patch.md' + - 'Update': 'remarketing-lists_update.md' +- 'Reports': + - 'Compatible Fields Query': 'reports_compatible-fields-query.md' + - 'Delete': 'reports_delete.md' + - 'Files Get': 'reports_files-get.md' + - 'Files List': 'reports_files-list.md' + - 'Get': 'reports_get.md' + - 'Insert': 'reports_insert.md' + - 'List': 'reports_list.md' + - 'Patch': 'reports_patch.md' + - 'Run': 'reports_run.md' + - 'Update': 'reports_update.md' +- 'Sites': + - 'Get': 'sites_get.md' + - 'Insert': 'sites_insert.md' + - 'List': 'sites_list.md' + - 'Patch': 'sites_patch.md' + - 'Update': 'sites_update.md' +- 'Sizes': + - 'Get': 'sizes_get.md' + - 'Insert': 'sizes_insert.md' + - 'List': 'sizes_list.md' +- 'Subaccounts': + - 'Get': 'subaccounts_get.md' + - 'Insert': 'subaccounts_insert.md' + - 'List': 'subaccounts_list.md' + - 'Patch': 'subaccounts_patch.md' + - 'Update': 'subaccounts_update.md' +- 'Targetable Remarketing Lists': + - 'Get': 'targetable-remarketing-lists_get.md' + - 'List': 'targetable-remarketing-lists_list.md' +- 'Targeting Templates': + - 'Get': 'targeting-templates_get.md' + - 'Insert': 'targeting-templates_insert.md' + - 'List': 'targeting-templates_list.md' + - 'Patch': 'targeting-templates_patch.md' + - 'Update': 'targeting-templates_update.md' +- 'User Profiles': + - 'Get': 'user-profiles_get.md' + - 'List': 'user-profiles_list.md' +- 'User Role Permission Groups': + - 'Get': 'user-role-permission-groups_get.md' + - 'List': 'user-role-permission-groups_list.md' +- 'User Role Permissions': + - 'Get': 'user-role-permissions_get.md' + - 'List': 'user-role-permissions_list.md' +- 'User Roles': + - 'Delete': 'user-roles_delete.md' + - 'Get': 'user-roles_get.md' + - 'Insert': 'user-roles_insert.md' + - 'List': 'user-roles_list.md' + - 'Patch': 'user-roles_patch.md' + - 'Update': 'user-roles_update.md' +- 'Video Formats': + - 'Get': 'video-formats_get.md' + - 'List': 'video-formats_list.md' theme: readthedocs diff --git a/gen/dfareporting3-cli/src/client.rs b/gen/dfareporting3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dfareporting3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dfareporting3-cli/src/main.rs b/gen/dfareporting3-cli/src/main.rs index 465ba4871e..904a657d48 100644 --- a/gen/dfareporting3-cli/src/main.rs +++ b/gen/dfareporting3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dfareporting3::{api, Error, oauth2}; +use google_dfareporting3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -478,10 +477,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-role-id" => { - call = call.user_role_id(value.unwrap_or("")); + call = call.user_role_id( value.map(|v| arg_from_str(v, err, "user-role-id", "int64")).unwrap_or(-0)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -496,13 +495,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -838,13 +837,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1317,10 +1316,10 @@ where call = call.add_type(value.unwrap_or("")); }, "ssl-required" => { - call = call.ssl_required(arg_from_str(value.unwrap_or("false"), err, "ssl-required", "boolean")); + call = call.ssl_required( value.map(|v| arg_from_str(v, err, "ssl-required", "boolean")).unwrap_or(false)); }, "ssl-compliant" => { - call = call.ssl_compliant(arg_from_str(value.unwrap_or("false"), err, "ssl-compliant", "boolean")); + call = call.ssl_compliant( value.map(|v| arg_from_str(v, err, "ssl-compliant", "boolean")).unwrap_or(false)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -1329,58 +1328,58 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "remarketing-list-ids" => { - call = call.add_remarketing_list_ids(value.unwrap_or("")); + call = call.add_remarketing_list_ids( value.map(|v| arg_from_str(v, err, "remarketing-list-ids", "int64")).unwrap_or(-0)); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "landing-page-ids" => { - call = call.add_landing_page_ids(value.unwrap_or("")); + call = call.add_landing_page_ids( value.map(|v| arg_from_str(v, err, "landing-page-ids", "int64")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dynamic-click-tracker" => { - call = call.dynamic_click_tracker(arg_from_str(value.unwrap_or("false"), err, "dynamic-click-tracker", "boolean")); + call = call.dynamic_click_tracker( value.map(|v| arg_from_str(v, err, "dynamic-click-tracker", "boolean")).unwrap_or(false)); }, "creative-optimization-configuration-ids" => { - call = call.add_creative_optimization_configuration_ids(value.unwrap_or("")); + call = call.add_creative_optimization_configuration_ids( value.map(|v| arg_from_str(v, err, "creative-optimization-configuration-ids", "int64")).unwrap_or(-0)); }, "creative-ids" => { - call = call.add_creative_ids(value.unwrap_or("")); + call = call.add_creative_ids( value.map(|v| arg_from_str(v, err, "creative-ids", "int64")).unwrap_or(-0)); }, "compatibility" => { call = call.compatibility(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "audience-segment-ids" => { - call = call.add_audience_segment_ids(value.unwrap_or("")); + call = call.add_audience_segment_ids( value.map(|v| arg_from_str(v, err, "audience-segment-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1932,10 +1931,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2309,7 +2308,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -2324,16 +2323,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2730,7 +2729,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "status" => { call = call.status(value.unwrap_or("")); @@ -2748,22 +2747,22 @@ where call = call.page_token(value.unwrap_or("")); }, "only-parent" => { - call = call.only_parent(arg_from_str(value.unwrap_or("false"), err, "only-parent", "boolean")); + call = call.only_parent( value.map(|v| arg_from_str(v, err, "only-parent", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-advertisers-without-groups-only" => { - call = call.include_advertisers_without_groups_only(arg_from_str(value.unwrap_or("false"), err, "include-advertisers-without-groups-only", "boolean")); + call = call.include_advertisers_without_groups_only( value.map(|v| arg_from_str(v, err, "include-advertisers-without-groups-only", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-ids" => { - call = call.add_floodlight_configuration_ids(value.unwrap_or("")); + call = call.add_floodlight_configuration_ids( value.map(|v| arg_from_str(v, err, "floodlight-configuration-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3181,7 +3180,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3417,7 +3416,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -3432,28 +3431,28 @@ where call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "excluded-ids" => { - call = call.add_excluded_ids(value.unwrap_or("")); + call = call.add_excluded_ids( value.map(|v| arg_from_str(v, err, "excluded-ids", "int64")).unwrap_or(-0)); }, "at-least-one-optimization-activity" => { - call = call.at_least_one_optimization_activity(arg_from_str(value.unwrap_or("false"), err, "at-least-one-optimization-activity", "boolean")); + call = call.at_least_one_optimization_activity( value.map(|v| arg_from_str(v, err, "at-least-one-optimization-activity", "boolean")).unwrap_or(false)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3817,7 +3816,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-profile-ids" => { - call = call.add_user_profile_ids(value.unwrap_or("")); + call = call.add_user_profile_ids( value.map(|v| arg_from_str(v, err, "user-profile-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -3829,19 +3828,19 @@ where call = call.object_type(value.unwrap_or("")); }, "object-ids" => { - call = call.add_object_ids(value.unwrap_or("")); + call = call.add_object_ids( value.map(|v| arg_from_str(v, err, "object-ids", "int64")).unwrap_or(-0)); }, "min-change-time" => { call = call.min_change_time(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-change-time" => { call = call.max_change_time(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "action" => { call = call.action(value.unwrap_or("")); @@ -3900,16 +3899,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "region-dart-ids" => { - call = call.add_region_dart_ids(value.unwrap_or("")); + call = call.add_region_dart_ids( value.map(|v| arg_from_str(v, err, "region-dart-ids", "int64")).unwrap_or(-0)); }, "name-prefix" => { call = call.name_prefix(value.unwrap_or("")); }, "dart-ids" => { - call = call.add_dart_ids(value.unwrap_or("")); + call = call.add_dart_ids( value.map(|v| arg_from_str(v, err, "dart-ids", "int64")).unwrap_or(-0)); }, "country-dart-ids" => { - call = call.add_country_dart_ids(value.unwrap_or("")); + call = call.add_country_dart_ids( value.map(|v| arg_from_str(v, err, "country-dart-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4265,10 +4264,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5076,10 +5075,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5513,13 +5512,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5928,16 +5927,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-number" => { - call = call.group_number(arg_from_str(value.unwrap_or("-0"), err, "group-number", "integer")); + call = call.group_number( value.map(|v| arg_from_str(v, err, "group-number", "int32")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6407,7 +6406,7 @@ where call = call.add_types(value.unwrap_or("")); }, "studio-creative-id" => { - call = call.studio_creative_id(value.unwrap_or("")); + call = call.studio_creative_id( value.map(|v| arg_from_str(v, err, "studio-creative-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -6416,40 +6415,40 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "rendering-ids" => { - call = call.add_rendering_ids(value.unwrap_or("")); + call = call.add_rendering_ids( value.map(|v| arg_from_str(v, err, "rendering-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "creative-field-ids" => { - call = call.add_creative_field_ids(value.unwrap_or("")); + call = call.add_creative_field_ids( value.map(|v| arg_from_str(v, err, "creative-field-ids", "int64")).unwrap_or(-0)); }, "companion-creative-ids" => { - call = call.add_companion_creative_ids(value.unwrap_or("")); + call = call.add_companion_creative_ids( value.map(|v| arg_from_str(v, err, "companion-creative-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6874,7 +6873,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6994,13 +6993,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7229,34 +7228,34 @@ where call = call.search_string(value.unwrap_or("")); }, "parent-id" => { - call = call.parent_id(value.unwrap_or("")); + call = call.parent_id( value.map(|v| arg_from_str(v, err, "parent-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dfp-network-code" => { call = call.dfp_network_code(value.unwrap_or("")); }, "country-id" => { - call = call.country_id(value.unwrap_or("")); + call = call.country_id( value.map(|v| arg_from_str(v, err, "country-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7447,13 +7446,13 @@ where call = call.object_type(value.unwrap_or("")); }, "object-id" => { - call = call.object_id(value.unwrap_or("")); + call = call.object_id( value.map(|v| arg_from_str(v, err, "object-id", "int64")).unwrap_or(-0)); }, "names" => { call = call.add_names(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7726,25 +7725,25 @@ where call = call.search_string(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "event-tag-types" => { call = call.add_event_tag_types(value.unwrap_or("")); }, "enabled" => { - call = call.enabled(arg_from_str(value.unwrap_or("false"), err, "enabled", "boolean")); + call = call.enabled( value.map(|v| arg_from_str(v, err, "enabled", "boolean")).unwrap_or(false)); }, "definitions-only" => { - call = call.definitions_only(arg_from_str(value.unwrap_or("false"), err, "definitions-only", "boolean")); + call = call.definitions_only( value.map(|v| arg_from_str(v, err, "definitions-only", "boolean")).unwrap_or(false)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "ad-id" => { - call = call.ad_id(value.unwrap_or("")); + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8098,7 +8097,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8198,7 +8197,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8446,13 +8445,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "floodlight-activity-group-type" => { call = call.floodlight_activity_group_type(value.unwrap_or("")); @@ -8464,10 +8463,10 @@ where call = call.floodlight_activity_group_name(value.unwrap_or("")); }, "floodlight-activity-group-ids" => { - call = call.add_floodlight_activity_group_ids(value.unwrap_or("")); + call = call.add_floodlight_activity_group_ids( value.map(|v| arg_from_str(v, err, "floodlight-activity-group-ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8951,16 +8950,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9290,7 +9289,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9631,22 +9630,22 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "in-plan" => { - call = call.in_plan(arg_from_str(value.unwrap_or("false"), err, "in-plan", "boolean")); + call = call.in_plan( value.map(|v| arg_from_str(v, err, "in-plan", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10176,7 +10175,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10185,16 +10184,16 @@ where call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10308,7 +10307,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10317,10 +10316,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10581,7 +10580,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10590,7 +10589,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "placement-group-type" => { call = call.placement_group_type(value.unwrap_or("")); @@ -10608,28 +10607,28 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11175,10 +11174,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11413,10 +11412,10 @@ where call = call.add_tag_formats(value.unwrap_or("")); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11711,10 +11710,10 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -11723,7 +11722,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "payment-source" => { call = call.payment_source(value.unwrap_or("")); @@ -11741,34 +11740,34 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-ids" => { - call = call.add_group_ids(value.unwrap_or("")); + call = call.add_group_ids( value.map(|v| arg_from_str(v, err, "group-ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "compatibilities" => { call = call.add_compatibilities(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12458,13 +12457,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12967,13 +12966,13 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -13515,7 +13514,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13800,7 +13799,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14021,7 +14020,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "synchronous" => { - call = call.synchronous(arg_from_str(value.unwrap_or("false"), err, "synchronous", "boolean")); + call = call.synchronous( value.map(|v| arg_from_str(v, err, "synchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14411,10 +14410,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unmapped-site" => { - call = call.unmapped_site(arg_from_str(value.unwrap_or("false"), err, "unmapped-site", "boolean")); + call = call.unmapped_site( value.map(|v| arg_from_str(v, err, "unmapped-site", "boolean")).unwrap_or(false)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -14429,31 +14428,31 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, "ad-words-site" => { - call = call.ad_words_site(arg_from_str(value.unwrap_or("false"), err, "ad-words-site", "boolean")); + call = call.ad_words_site( value.map(|v| arg_from_str(v, err, "ad-words-site", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14884,16 +14883,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "width" => { - call = call.width(arg_from_str(value.unwrap_or("-0"), err, "width", "integer")); + call = call.width( value.map(|v| arg_from_str(v, err, "width", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "iab-standard" => { - call = call.iab_standard(arg_from_str(value.unwrap_or("false"), err, "iab-standard", "boolean")); + call = call.iab_standard( value.map(|v| arg_from_str(v, err, "iab-standard", "boolean")).unwrap_or(false)); }, "height" => { - call = call.height(arg_from_str(value.unwrap_or("-0"), err, "height", "integer")); + call = call.height( value.map(|v| arg_from_str(v, err, "height", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15102,10 +15101,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15403,10 +15402,10 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15628,13 +15627,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16154,7 +16153,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16397,7 +16396,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -16412,13 +16411,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "account-user-role-only" => { - call = call.account_user_role_only(arg_from_str(value.unwrap_or("false"), err, "account-user-role-only", "boolean")); + call = call.account_user_role_only( value.map(|v| arg_from_str(v, err, "account-user-role-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -23692,7 +23691,7 @@ async fn main() { let mut app = App::new("dfareporting3") .author("Sebastian Thiel ") - .version("4.0.1+20180830") + .version("5.0.2+20180830") .about("Manages your DoubleClick Campaign Manager ad campaigns and reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dfareporting3_cli") .arg(Arg::with_name("url") diff --git a/gen/dfareporting3/Cargo.toml b/gen/dfareporting3/Cargo.toml index 4a6970d4b3..5552af0529 100644 --- a/gen/dfareporting3/Cargo.toml +++ b/gen/dfareporting3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dfareporting3" -version = "5.0.2-beta-1+20180830" +version = "5.0.2+20180830" authors = ["Sebastian Thiel "] description = "A complete library to interact with dfareporting (protocol v3.0)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3" homepage = "https://developers.google.com/doubleclick-advertisers/" -documentation = "https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830" +documentation = "https://docs.rs/google-dfareporting3/5.0.2+20180830" license = "MIT" keywords = ["dfareporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dfareporting3/README.md b/gen/dfareporting3/README.md index 8aaa1b7bdd..752d7a0695 100644 --- a/gen/dfareporting3/README.md +++ b/gen/dfareporting3/README.md @@ -5,144 +5,144 @@ DO NOT EDIT ! --> The `google-dfareporting3` library allows access to all features of the *Google dfareporting* service. -This documentation was generated from *dfareporting* crate version *5.0.2-beta-1+20180830*, where *20180830* is the exact revision of the *dfareporting:v3.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *dfareporting* crate version *5.0.2+20180830*, where *20180830* is the exact revision of the *dfareporting:v3.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *dfareporting* *v3* API can be found at the [official documentation site](https://developers.google.com/doubleclick-advertisers/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/Dfareporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/Dfareporting) ... -* [account active ad summaries](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountActiveAdSummary) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountActiveAdSummaryGetCall) -* [account permission groups](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountPermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountPermissionGroupListCall) -* [account permissions](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountPermission) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountPermissionListCall) -* [account user profiles](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountUserProfile) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountUserProfileUpdateCall) -* [accounts](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Account) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AccountUpdateCall) -* [ads](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Ad) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdUpdateCall) -* [advertiser groups](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGroup) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGroupUpdateCall) +* [account active ad summaries](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountActiveAdSummary) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountActiveAdSummaryGetCall) +* [account permission groups](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountPermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountPermissionGroupListCall) +* [account permissions](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountPermission) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountPermissionListCall) +* [account user profiles](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountUserProfile) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountUserProfileUpdateCall) +* [accounts](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Account) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AccountUpdateCall) +* [ads](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Ad) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdUpdateCall) +* [advertiser groups](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGroup) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGroupUpdateCall) * advertiser landing pages - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserLandingPageUpdateCall) -* [advertisers](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Advertiser) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::AdvertiserUpdateCall) -* [browsers](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Browser) - * [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::BrowserListCall) -* [campaign creative associations](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignCreativeAssociation) - * [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignCreativeAssociationListCall) -* [campaigns](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Campaign) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CampaignUpdateCall) -* [change logs](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ChangeLog) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ChangeLogListCall) -* [cities](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::City) - * [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CityListCall) -* [connection types](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ConnectionType) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ConnectionTypeListCall) -* [content categories](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ContentCategory) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ContentCategoryUpdateCall) -* [conversions](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Conversion) - * [*batchinsert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ConversionBatchupdateCall) -* [countries](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Country) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CountryListCall) -* [creative assets](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeAsset) - * [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeAssetInsertCall) -* [creative field values](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldValue) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldValueUpdateCall) -* [creative fields](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeField) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeFieldUpdateCall) -* [creative groups](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeGroup) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeGroupUpdateCall) -* [creatives](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Creative) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeUpdateCall) -* [dimension values](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DimensionValue) - * [*query*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DimensionValueQueryCall) -* [directory site contacts](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DirectorySiteContact) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DirectorySiteContactGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DirectorySiteContactListCall) -* [directory sites](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DirectorySite) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DirectorySiteListCall) -* [dynamic targeting keys](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DynamicTargetingKey) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::DynamicTargetingKeyListCall) -* [event tags](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::EventTag) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::EventTagUpdateCall) -* [files](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::File) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FileListCall) -* [floodlight activities](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivity) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityUpdateCall) -* [floodlight activity groups](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGroup) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightActivityGroupUpdateCall) -* [floodlight configurations](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightConfiguration) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FloodlightConfigurationUpdateCall) -* [inventory items](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::InventoryItem) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::InventoryItemListCall) -* [languages](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Language) - * [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::LanguageListCall) -* [metros](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Metro) - * [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::MetroListCall) -* [mobile carriers](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::MobileCarrier) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::MobileCarrierListCall) -* [operating system versions](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OperatingSystemVersion) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OperatingSystemVersionListCall) -* [operating systems](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OperatingSystem) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OperatingSystemListCall) -* [order documents](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OrderDocument) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OrderDocumentListCall) -* [orders](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Order) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::OrderListCall) -* [placement groups](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGroup) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGroupUpdateCall) -* [placement strategies](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementStrategy) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementStrategyUpdateCall) -* [placements](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Placement) - * [*generatetags*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlacementUpdateCall) -* [platform types](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlatformType) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PlatformTypeListCall) -* [postal codes](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PostalCode) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::PostalCodeListCall) -* [projects](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Project) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ProjectListCall) -* [regions](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Region) - * [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RegionListCall) -* [remarketing list shares](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListShare) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListShareUpdateCall) -* [remarketing lists](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingList) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::RemarketingListUpdateCall) -* [reports](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Report) - * [*compatible fields query*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportUpdateCall) -* [sites](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Site) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SiteUpdateCall) -* [sizes](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Size) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SizeListCall) -* [subaccounts](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::Subaccount) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::SubaccountUpdateCall) -* [targetable remarketing lists](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetableRemarketingList) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetableRemarketingListListCall) -* [targeting templates](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetingTemplate) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::TargetingTemplateUpdateCall) -* [user profiles](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserProfile) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserProfileListCall) -* [user role permission groups](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRolePermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRolePermissionGroupListCall) -* [user role permissions](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRolePermission) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRolePermissionListCall) -* [user roles](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRole) - * [*delete*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::UserRoleUpdateCall) -* [video formats](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::VideoFormat) - * [*get*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::VideoFormatListCall) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserLandingPageUpdateCall) +* [advertisers](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Advertiser) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::AdvertiserUpdateCall) +* [browsers](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Browser) + * [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::BrowserListCall) +* [campaign creative associations](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignCreativeAssociation) + * [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignCreativeAssociationListCall) +* [campaigns](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Campaign) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CampaignUpdateCall) +* [change logs](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ChangeLog) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ChangeLogListCall) +* [cities](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::City) + * [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CityListCall) +* [connection types](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ConnectionType) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ConnectionTypeListCall) +* [content categories](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ContentCategory) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ContentCategoryUpdateCall) +* [conversions](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Conversion) + * [*batchinsert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ConversionBatchupdateCall) +* [countries](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Country) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CountryListCall) +* [creative assets](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeAsset) + * [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeAssetInsertCall) +* [creative field values](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldValue) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldValueUpdateCall) +* [creative fields](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeField) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeFieldUpdateCall) +* [creative groups](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeGroup) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeGroupUpdateCall) +* [creatives](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Creative) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeUpdateCall) +* [dimension values](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DimensionValue) + * [*query*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DimensionValueQueryCall) +* [directory site contacts](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DirectorySiteContact) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DirectorySiteContactGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DirectorySiteContactListCall) +* [directory sites](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DirectorySite) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DirectorySiteListCall) +* [dynamic targeting keys](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DynamicTargetingKey) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::DynamicTargetingKeyListCall) +* [event tags](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::EventTag) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::EventTagUpdateCall) +* [files](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::File) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FileListCall) +* [floodlight activities](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivity) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityUpdateCall) +* [floodlight activity groups](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGroup) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightActivityGroupUpdateCall) +* [floodlight configurations](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightConfiguration) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FloodlightConfigurationUpdateCall) +* [inventory items](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::InventoryItem) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::InventoryItemListCall) +* [languages](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Language) + * [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::LanguageListCall) +* [metros](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Metro) + * [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::MetroListCall) +* [mobile carriers](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::MobileCarrier) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::MobileCarrierListCall) +* [operating system versions](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OperatingSystemVersion) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OperatingSystemVersionListCall) +* [operating systems](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OperatingSystem) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OperatingSystemListCall) +* [order documents](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OrderDocument) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OrderDocumentListCall) +* [orders](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Order) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::OrderListCall) +* [placement groups](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGroup) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGroupUpdateCall) +* [placement strategies](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementStrategy) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementStrategyUpdateCall) +* [placements](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Placement) + * [*generatetags*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlacementUpdateCall) +* [platform types](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlatformType) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PlatformTypeListCall) +* [postal codes](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PostalCode) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::PostalCodeListCall) +* [projects](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Project) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ProjectListCall) +* [regions](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Region) + * [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RegionListCall) +* [remarketing list shares](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListShare) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListShareUpdateCall) +* [remarketing lists](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingList) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::RemarketingListUpdateCall) +* [reports](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Report) + * [*compatible fields query*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportUpdateCall) +* [sites](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Site) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SiteUpdateCall) +* [sizes](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Size) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SizeListCall) +* [subaccounts](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::Subaccount) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::SubaccountUpdateCall) +* [targetable remarketing lists](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetableRemarketingList) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetableRemarketingListListCall) +* [targeting templates](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetingTemplate) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::TargetingTemplateUpdateCall) +* [user profiles](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserProfile) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserProfileListCall) +* [user role permission groups](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRolePermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRolePermissionGroupListCall) +* [user role permissions](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRolePermission) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRolePermissionListCall) +* [user roles](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRole) + * [*delete*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::UserRoleUpdateCall) +* [video formats](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::VideoFormat) + * [*get*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::VideoFormatListCall) Upload supported by ... -* [*insert creative assets*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::CreativeAssetInsertCall) +* [*insert creative assets*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::CreativeAssetInsertCall) Download supported by ... -* [*get files*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::FileGetCall) -* [*files get reports*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/api::ReportFileGetCall) +* [*get files*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::FileGetCall) +* [*files get reports*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/api::ReportFileGetCall) @@ -150,17 +150,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/Dfareporting)** +* **[Hub](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/Dfareporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::CallBuilder) -* **[Resources](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::CallBuilder) +* **[Resources](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Part)** + * **[Parts](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -258,17 +258,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -278,29 +278,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Delegate) to the -[Method Builder](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Delegate) to the +[Method Builder](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::RequestValue) and -[decodable](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::RequestValue) and +[decodable](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dfareporting3/5.0.2-beta-1+20180830/google_dfareporting3/client::RequestValue) are moved +* [request values](https://docs.rs/google-dfareporting3/5.0.2+20180830/google_dfareporting3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dfareporting3/src/api.rs b/gen/dfareporting3/src/api.rs index 2f802c0dfe..47cca911c8 100644 --- a/gen/dfareporting3/src/api.rs +++ b/gen/dfareporting3/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> Dfareporting { Dfareporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/dfareporting/v3.0/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -319,7 +319,7 @@ impl<'a, S> Dfareporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dfareporting3/src/client.rs b/gen/dfareporting3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dfareporting3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dfareporting3/src/lib.rs b/gen/dfareporting3/src/lib.rs index b1c3dbac16..7bde7427e5 100644 --- a/gen/dfareporting3/src/lib.rs +++ b/gen/dfareporting3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *dfareporting* crate version *5.0.2-beta-1+20180830*, where *20180830* is the exact revision of the *dfareporting:v3.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *dfareporting* crate version *5.0.2+20180830*, where *20180830* is the exact revision of the *dfareporting:v3.0* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *dfareporting* *v3* API can be found at the //! [official documentation site](https://developers.google.com/doubleclick-advertisers/). diff --git a/gen/dfareporting3d2-cli/Cargo.toml b/gen/dfareporting3d2-cli/Cargo.toml index 65805147da..c0afaf580e 100644 --- a/gen/dfareporting3d2-cli/Cargo.toml +++ b/gen/dfareporting3d2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dfareporting3d2-cli" -version = "4.0.1+20190531" +version = "5.0.2+20190531" authors = ["Sebastian Thiel "] description = "A complete library to interact with dfareporting (protocol v3.2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d2-cli" @@ -20,13 +20,13 @@ name = "dfareporting3d2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dfareporting3d2] path = "../dfareporting3d2" -version = "4.0.1+20190531" +version = "5.0.2+20190531" + diff --git a/gen/dfareporting3d2-cli/README.md b/gen/dfareporting3d2-cli/README.md index d44de3c82c..570ccacfda 100644 --- a/gen/dfareporting3d2-cli/README.md +++ b/gen/dfareporting3d2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *dfareporting* API at revision *20190531*. The CLI is at version *4.0.1*. +This documentation was generated from the *dfareporting* API at revision *20190531*. The CLI is at version *5.0.2*. ```bash dfareporting3d2 [options] diff --git a/gen/dfareporting3d2-cli/mkdocs.yml b/gen/dfareporting3d2-cli/mkdocs.yml index 8e3d3be4f2..365c4fd19d 100644 --- a/gen/dfareporting3d2-cli/mkdocs.yml +++ b/gen/dfareporting3d2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: dfareporting v4.0.1+20190531 +site_name: dfareporting v5.0.2+20190531 site_url: http://byron.github.io/google-apis-rs/google-dfareporting3d2-cli site_description: A complete library to interact with dfareporting (protocol v3.2) @@ -7,214 +7,275 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d2- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-active-ad-summaries_get.md', 'Account Active Ad Summaries', 'Get'] -- ['account-permission-groups_get.md', 'Account Permission Groups', 'Get'] -- ['account-permission-groups_list.md', 'Account Permission Groups', 'List'] -- ['account-permissions_get.md', 'Account Permissions', 'Get'] -- ['account-permissions_list.md', 'Account Permissions', 'List'] -- ['account-user-profiles_get.md', 'Account User Profiles', 'Get'] -- ['account-user-profiles_insert.md', 'Account User Profiles', 'Insert'] -- ['account-user-profiles_list.md', 'Account User Profiles', 'List'] -- ['account-user-profiles_patch.md', 'Account User Profiles', 'Patch'] -- ['account-user-profiles_update.md', 'Account User Profiles', 'Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['ads_get.md', 'Ads', 'Get'] -- ['ads_insert.md', 'Ads', 'Insert'] -- ['ads_list.md', 'Ads', 'List'] -- ['ads_patch.md', 'Ads', 'Patch'] -- ['ads_update.md', 'Ads', 'Update'] -- ['advertiser-groups_delete.md', 'Advertiser Groups', 'Delete'] -- ['advertiser-groups_get.md', 'Advertiser Groups', 'Get'] -- ['advertiser-groups_insert.md', 'Advertiser Groups', 'Insert'] -- ['advertiser-groups_list.md', 'Advertiser Groups', 'List'] -- ['advertiser-groups_patch.md', 'Advertiser Groups', 'Patch'] -- ['advertiser-groups_update.md', 'Advertiser Groups', 'Update'] -- ['advertiser-landing-pages_get.md', 'Advertiser Landing Pages', 'Get'] -- ['advertiser-landing-pages_insert.md', 'Advertiser Landing Pages', 'Insert'] -- ['advertiser-landing-pages_list.md', 'Advertiser Landing Pages', 'List'] -- ['advertiser-landing-pages_patch.md', 'Advertiser Landing Pages', 'Patch'] -- ['advertiser-landing-pages_update.md', 'Advertiser Landing Pages', 'Update'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_insert.md', 'Advertisers', 'Insert'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['advertisers_patch.md', 'Advertisers', 'Patch'] -- ['advertisers_update.md', 'Advertisers', 'Update'] -- ['browsers_list.md', 'Browsers', 'List'] -- ['campaign-creative-associations_insert.md', 'Campaign Creative Associations', 'Insert'] -- ['campaign-creative-associations_list.md', 'Campaign Creative Associations', 'List'] -- ['campaigns_get.md', 'Campaigns', 'Get'] -- ['campaigns_insert.md', 'Campaigns', 'Insert'] -- ['campaigns_list.md', 'Campaigns', 'List'] -- ['campaigns_patch.md', 'Campaigns', 'Patch'] -- ['campaigns_update.md', 'Campaigns', 'Update'] -- ['change-logs_get.md', 'Change Logs', 'Get'] -- ['change-logs_list.md', 'Change Logs', 'List'] -- ['cities_list.md', 'Cities', 'List'] -- ['connection-types_get.md', 'Connection Types', 'Get'] -- ['connection-types_list.md', 'Connection Types', 'List'] -- ['content-categories_delete.md', 'Content Categories', 'Delete'] -- ['content-categories_get.md', 'Content Categories', 'Get'] -- ['content-categories_insert.md', 'Content Categories', 'Insert'] -- ['content-categories_list.md', 'Content Categories', 'List'] -- ['content-categories_patch.md', 'Content Categories', 'Patch'] -- ['content-categories_update.md', 'Content Categories', 'Update'] -- ['conversions_batchinsert.md', 'Conversions', 'Batchinsert'] -- ['conversions_batchupdate.md', 'Conversions', 'Batchupdate'] -- ['countries_get.md', 'Countries', 'Get'] -- ['countries_list.md', 'Countries', 'List'] -- ['creative-assets_insert.md', 'Creative Assets', 'Insert'] -- ['creative-field-values_delete.md', 'Creative Field Values', 'Delete'] -- ['creative-field-values_get.md', 'Creative Field Values', 'Get'] -- ['creative-field-values_insert.md', 'Creative Field Values', 'Insert'] -- ['creative-field-values_list.md', 'Creative Field Values', 'List'] -- ['creative-field-values_patch.md', 'Creative Field Values', 'Patch'] -- ['creative-field-values_update.md', 'Creative Field Values', 'Update'] -- ['creative-fields_delete.md', 'Creative Fields', 'Delete'] -- ['creative-fields_get.md', 'Creative Fields', 'Get'] -- ['creative-fields_insert.md', 'Creative Fields', 'Insert'] -- ['creative-fields_list.md', 'Creative Fields', 'List'] -- ['creative-fields_patch.md', 'Creative Fields', 'Patch'] -- ['creative-fields_update.md', 'Creative Fields', 'Update'] -- ['creative-groups_get.md', 'Creative Groups', 'Get'] -- ['creative-groups_insert.md', 'Creative Groups', 'Insert'] -- ['creative-groups_list.md', 'Creative Groups', 'List'] -- ['creative-groups_patch.md', 'Creative Groups', 'Patch'] -- ['creative-groups_update.md', 'Creative Groups', 'Update'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['creatives_patch.md', 'Creatives', 'Patch'] -- ['creatives_update.md', 'Creatives', 'Update'] -- ['dimension-values_query.md', 'Dimension Values', 'Query'] -- ['directory-site-contacts_get.md', 'Directory Site Contacts', 'Get'] -- ['directory-site-contacts_list.md', 'Directory Site Contacts', 'List'] -- ['directory-sites_get.md', 'Directory Sites', 'Get'] -- ['directory-sites_insert.md', 'Directory Sites', 'Insert'] -- ['directory-sites_list.md', 'Directory Sites', 'List'] -- ['dynamic-targeting-keys_delete.md', 'Dynamic Targeting Keys', 'Delete'] -- ['dynamic-targeting-keys_insert.md', 'Dynamic Targeting Keys', 'Insert'] -- ['dynamic-targeting-keys_list.md', 'Dynamic Targeting Keys', 'List'] -- ['event-tags_delete.md', 'Event Tags', 'Delete'] -- ['event-tags_get.md', 'Event Tags', 'Get'] -- ['event-tags_insert.md', 'Event Tags', 'Insert'] -- ['event-tags_list.md', 'Event Tags', 'List'] -- ['event-tags_patch.md', 'Event Tags', 'Patch'] -- ['event-tags_update.md', 'Event Tags', 'Update'] -- ['files_get.md', 'Files', 'Get'] -- ['files_list.md', 'Files', 'List'] -- ['floodlight-activities_delete.md', 'Floodlight Activities', 'Delete'] -- ['floodlight-activities_generatetag.md', 'Floodlight Activities', 'Generatetag'] -- ['floodlight-activities_get.md', 'Floodlight Activities', 'Get'] -- ['floodlight-activities_insert.md', 'Floodlight Activities', 'Insert'] -- ['floodlight-activities_list.md', 'Floodlight Activities', 'List'] -- ['floodlight-activities_patch.md', 'Floodlight Activities', 'Patch'] -- ['floodlight-activities_update.md', 'Floodlight Activities', 'Update'] -- ['floodlight-activity-groups_get.md', 'Floodlight Activity Groups', 'Get'] -- ['floodlight-activity-groups_insert.md', 'Floodlight Activity Groups', 'Insert'] -- ['floodlight-activity-groups_list.md', 'Floodlight Activity Groups', 'List'] -- ['floodlight-activity-groups_patch.md', 'Floodlight Activity Groups', 'Patch'] -- ['floodlight-activity-groups_update.md', 'Floodlight Activity Groups', 'Update'] -- ['floodlight-configurations_get.md', 'Floodlight Configurations', 'Get'] -- ['floodlight-configurations_list.md', 'Floodlight Configurations', 'List'] -- ['floodlight-configurations_patch.md', 'Floodlight Configurations', 'Patch'] -- ['floodlight-configurations_update.md', 'Floodlight Configurations', 'Update'] -- ['inventory-items_get.md', 'Inventory Items', 'Get'] -- ['inventory-items_list.md', 'Inventory Items', 'List'] -- ['languages_list.md', 'Languages', 'List'] -- ['metros_list.md', 'Metros', 'List'] -- ['mobile-apps_get.md', 'Mobile Apps', 'Get'] -- ['mobile-apps_list.md', 'Mobile Apps', 'List'] -- ['mobile-carriers_get.md', 'Mobile Carriers', 'Get'] -- ['mobile-carriers_list.md', 'Mobile Carriers', 'List'] -- ['operating-system-versions_get.md', 'Operating System Versions', 'Get'] -- ['operating-system-versions_list.md', 'Operating System Versions', 'List'] -- ['operating-systems_get.md', 'Operating Systems', 'Get'] -- ['operating-systems_list.md', 'Operating Systems', 'List'] -- ['order-documents_get.md', 'Order Documents', 'Get'] -- ['order-documents_list.md', 'Order Documents', 'List'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_list.md', 'Orders', 'List'] -- ['placement-groups_get.md', 'Placement Groups', 'Get'] -- ['placement-groups_insert.md', 'Placement Groups', 'Insert'] -- ['placement-groups_list.md', 'Placement Groups', 'List'] -- ['placement-groups_patch.md', 'Placement Groups', 'Patch'] -- ['placement-groups_update.md', 'Placement Groups', 'Update'] -- ['placement-strategies_delete.md', 'Placement Strategies', 'Delete'] -- ['placement-strategies_get.md', 'Placement Strategies', 'Get'] -- ['placement-strategies_insert.md', 'Placement Strategies', 'Insert'] -- ['placement-strategies_list.md', 'Placement Strategies', 'List'] -- ['placement-strategies_patch.md', 'Placement Strategies', 'Patch'] -- ['placement-strategies_update.md', 'Placement Strategies', 'Update'] -- ['placements_generatetags.md', 'Placements', 'Generatetags'] -- ['placements_get.md', 'Placements', 'Get'] -- ['placements_insert.md', 'Placements', 'Insert'] -- ['placements_list.md', 'Placements', 'List'] -- ['placements_patch.md', 'Placements', 'Patch'] -- ['placements_update.md', 'Placements', 'Update'] -- ['platform-types_get.md', 'Platform Types', 'Get'] -- ['platform-types_list.md', 'Platform Types', 'List'] -- ['postal-codes_get.md', 'Postal Codes', 'Get'] -- ['postal-codes_list.md', 'Postal Codes', 'List'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_list.md', 'Projects', 'List'] -- ['regions_list.md', 'Regions', 'List'] -- ['remarketing-list-shares_get.md', 'Remarketing List Shares', 'Get'] -- ['remarketing-list-shares_patch.md', 'Remarketing List Shares', 'Patch'] -- ['remarketing-list-shares_update.md', 'Remarketing List Shares', 'Update'] -- ['remarketing-lists_get.md', 'Remarketing Lists', 'Get'] -- ['remarketing-lists_insert.md', 'Remarketing Lists', 'Insert'] -- ['remarketing-lists_list.md', 'Remarketing Lists', 'List'] -- ['remarketing-lists_patch.md', 'Remarketing Lists', 'Patch'] -- ['remarketing-lists_update.md', 'Remarketing Lists', 'Update'] -- ['reports_compatible-fields-query.md', 'Reports', 'Compatible Fields Query'] -- ['reports_delete.md', 'Reports', 'Delete'] -- ['reports_files-get.md', 'Reports', 'Files Get'] -- ['reports_files-list.md', 'Reports', 'Files List'] -- ['reports_get.md', 'Reports', 'Get'] -- ['reports_insert.md', 'Reports', 'Insert'] -- ['reports_list.md', 'Reports', 'List'] -- ['reports_patch.md', 'Reports', 'Patch'] -- ['reports_run.md', 'Reports', 'Run'] -- ['reports_update.md', 'Reports', 'Update'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_insert.md', 'Sites', 'Insert'] -- ['sites_list.md', 'Sites', 'List'] -- ['sites_patch.md', 'Sites', 'Patch'] -- ['sites_update.md', 'Sites', 'Update'] -- ['sizes_get.md', 'Sizes', 'Get'] -- ['sizes_insert.md', 'Sizes', 'Insert'] -- ['sizes_list.md', 'Sizes', 'List'] -- ['subaccounts_get.md', 'Subaccounts', 'Get'] -- ['subaccounts_insert.md', 'Subaccounts', 'Insert'] -- ['subaccounts_list.md', 'Subaccounts', 'List'] -- ['subaccounts_patch.md', 'Subaccounts', 'Patch'] -- ['subaccounts_update.md', 'Subaccounts', 'Update'] -- ['targetable-remarketing-lists_get.md', 'Targetable Remarketing Lists', 'Get'] -- ['targetable-remarketing-lists_list.md', 'Targetable Remarketing Lists', 'List'] -- ['targeting-templates_get.md', 'Targeting Templates', 'Get'] -- ['targeting-templates_insert.md', 'Targeting Templates', 'Insert'] -- ['targeting-templates_list.md', 'Targeting Templates', 'List'] -- ['targeting-templates_patch.md', 'Targeting Templates', 'Patch'] -- ['targeting-templates_update.md', 'Targeting Templates', 'Update'] -- ['user-profiles_get.md', 'User Profiles', 'Get'] -- ['user-profiles_list.md', 'User Profiles', 'List'] -- ['user-role-permission-groups_get.md', 'User Role Permission Groups', 'Get'] -- ['user-role-permission-groups_list.md', 'User Role Permission Groups', 'List'] -- ['user-role-permissions_get.md', 'User Role Permissions', 'Get'] -- ['user-role-permissions_list.md', 'User Role Permissions', 'List'] -- ['user-roles_delete.md', 'User Roles', 'Delete'] -- ['user-roles_get.md', 'User Roles', 'Get'] -- ['user-roles_insert.md', 'User Roles', 'Insert'] -- ['user-roles_list.md', 'User Roles', 'List'] -- ['user-roles_patch.md', 'User Roles', 'Patch'] -- ['user-roles_update.md', 'User Roles', 'Update'] -- ['video-formats_get.md', 'Video Formats', 'Get'] -- ['video-formats_list.md', 'Video Formats', 'List'] +nav: +- Home: 'index.md' +- 'Account Active Ad Summaries': + - 'Get': 'account-active-ad-summaries_get.md' +- 'Account Permission Groups': + - 'Get': 'account-permission-groups_get.md' + - 'List': 'account-permission-groups_list.md' +- 'Account Permissions': + - 'Get': 'account-permissions_get.md' + - 'List': 'account-permissions_list.md' +- 'Account User Profiles': + - 'Get': 'account-user-profiles_get.md' + - 'Insert': 'account-user-profiles_insert.md' + - 'List': 'account-user-profiles_list.md' + - 'Patch': 'account-user-profiles_patch.md' + - 'Update': 'account-user-profiles_update.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Ads': + - 'Get': 'ads_get.md' + - 'Insert': 'ads_insert.md' + - 'List': 'ads_list.md' + - 'Patch': 'ads_patch.md' + - 'Update': 'ads_update.md' +- 'Advertiser Groups': + - 'Delete': 'advertiser-groups_delete.md' + - 'Get': 'advertiser-groups_get.md' + - 'Insert': 'advertiser-groups_insert.md' + - 'List': 'advertiser-groups_list.md' + - 'Patch': 'advertiser-groups_patch.md' + - 'Update': 'advertiser-groups_update.md' +- 'Advertiser Landing Pages': + - 'Get': 'advertiser-landing-pages_get.md' + - 'Insert': 'advertiser-landing-pages_insert.md' + - 'List': 'advertiser-landing-pages_list.md' + - 'Patch': 'advertiser-landing-pages_patch.md' + - 'Update': 'advertiser-landing-pages_update.md' +- 'Advertisers': + - 'Get': 'advertisers_get.md' + - 'Insert': 'advertisers_insert.md' + - 'List': 'advertisers_list.md' + - 'Patch': 'advertisers_patch.md' + - 'Update': 'advertisers_update.md' +- 'Browsers': + - 'List': 'browsers_list.md' +- 'Campaign Creative Associations': + - 'Insert': 'campaign-creative-associations_insert.md' + - 'List': 'campaign-creative-associations_list.md' +- 'Campaigns': + - 'Get': 'campaigns_get.md' + - 'Insert': 'campaigns_insert.md' + - 'List': 'campaigns_list.md' + - 'Patch': 'campaigns_patch.md' + - 'Update': 'campaigns_update.md' +- 'Change Logs': + - 'Get': 'change-logs_get.md' + - 'List': 'change-logs_list.md' +- 'Cities': + - 'List': 'cities_list.md' +- 'Connection Types': + - 'Get': 'connection-types_get.md' + - 'List': 'connection-types_list.md' +- 'Content Categories': + - 'Delete': 'content-categories_delete.md' + - 'Get': 'content-categories_get.md' + - 'Insert': 'content-categories_insert.md' + - 'List': 'content-categories_list.md' + - 'Patch': 'content-categories_patch.md' + - 'Update': 'content-categories_update.md' +- 'Conversions': + - 'Batchinsert': 'conversions_batchinsert.md' + - 'Batchupdate': 'conversions_batchupdate.md' +- 'Countries': + - 'Get': 'countries_get.md' + - 'List': 'countries_list.md' +- 'Creative Assets': + - 'Insert': 'creative-assets_insert.md' +- 'Creative Field Values': + - 'Delete': 'creative-field-values_delete.md' + - 'Get': 'creative-field-values_get.md' + - 'Insert': 'creative-field-values_insert.md' + - 'List': 'creative-field-values_list.md' + - 'Patch': 'creative-field-values_patch.md' + - 'Update': 'creative-field-values_update.md' +- 'Creative Fields': + - 'Delete': 'creative-fields_delete.md' + - 'Get': 'creative-fields_get.md' + - 'Insert': 'creative-fields_insert.md' + - 'List': 'creative-fields_list.md' + - 'Patch': 'creative-fields_patch.md' + - 'Update': 'creative-fields_update.md' +- 'Creative Groups': + - 'Get': 'creative-groups_get.md' + - 'Insert': 'creative-groups_insert.md' + - 'List': 'creative-groups_list.md' + - 'Patch': 'creative-groups_patch.md' + - 'Update': 'creative-groups_update.md' +- 'Creatives': + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' + - 'Patch': 'creatives_patch.md' + - 'Update': 'creatives_update.md' +- 'Dimension Values': + - 'Query': 'dimension-values_query.md' +- 'Directory Site Contacts': + - 'Get': 'directory-site-contacts_get.md' + - 'List': 'directory-site-contacts_list.md' +- 'Directory Sites': + - 'Get': 'directory-sites_get.md' + - 'Insert': 'directory-sites_insert.md' + - 'List': 'directory-sites_list.md' +- 'Dynamic Targeting Keys': + - 'Delete': 'dynamic-targeting-keys_delete.md' + - 'Insert': 'dynamic-targeting-keys_insert.md' + - 'List': 'dynamic-targeting-keys_list.md' +- 'Event Tags': + - 'Delete': 'event-tags_delete.md' + - 'Get': 'event-tags_get.md' + - 'Insert': 'event-tags_insert.md' + - 'List': 'event-tags_list.md' + - 'Patch': 'event-tags_patch.md' + - 'Update': 'event-tags_update.md' +- 'Files': + - 'Get': 'files_get.md' + - 'List': 'files_list.md' +- 'Floodlight Activities': + - 'Delete': 'floodlight-activities_delete.md' + - 'Generatetag': 'floodlight-activities_generatetag.md' + - 'Get': 'floodlight-activities_get.md' + - 'Insert': 'floodlight-activities_insert.md' + - 'List': 'floodlight-activities_list.md' + - 'Patch': 'floodlight-activities_patch.md' + - 'Update': 'floodlight-activities_update.md' +- 'Floodlight Activity Groups': + - 'Get': 'floodlight-activity-groups_get.md' + - 'Insert': 'floodlight-activity-groups_insert.md' + - 'List': 'floodlight-activity-groups_list.md' + - 'Patch': 'floodlight-activity-groups_patch.md' + - 'Update': 'floodlight-activity-groups_update.md' +- 'Floodlight Configurations': + - 'Get': 'floodlight-configurations_get.md' + - 'List': 'floodlight-configurations_list.md' + - 'Patch': 'floodlight-configurations_patch.md' + - 'Update': 'floodlight-configurations_update.md' +- 'Inventory Items': + - 'Get': 'inventory-items_get.md' + - 'List': 'inventory-items_list.md' +- 'Languages': + - 'List': 'languages_list.md' +- 'Metros': + - 'List': 'metros_list.md' +- 'Mobile Apps': + - 'Get': 'mobile-apps_get.md' + - 'List': 'mobile-apps_list.md' +- 'Mobile Carriers': + - 'Get': 'mobile-carriers_get.md' + - 'List': 'mobile-carriers_list.md' +- 'Operating System Versions': + - 'Get': 'operating-system-versions_get.md' + - 'List': 'operating-system-versions_list.md' +- 'Operating Systems': + - 'Get': 'operating-systems_get.md' + - 'List': 'operating-systems_list.md' +- 'Order Documents': + - 'Get': 'order-documents_get.md' + - 'List': 'order-documents_list.md' +- 'Orders': + - 'Get': 'orders_get.md' + - 'List': 'orders_list.md' +- 'Placement Groups': + - 'Get': 'placement-groups_get.md' + - 'Insert': 'placement-groups_insert.md' + - 'List': 'placement-groups_list.md' + - 'Patch': 'placement-groups_patch.md' + - 'Update': 'placement-groups_update.md' +- 'Placement Strategies': + - 'Delete': 'placement-strategies_delete.md' + - 'Get': 'placement-strategies_get.md' + - 'Insert': 'placement-strategies_insert.md' + - 'List': 'placement-strategies_list.md' + - 'Patch': 'placement-strategies_patch.md' + - 'Update': 'placement-strategies_update.md' +- 'Placements': + - 'Generatetags': 'placements_generatetags.md' + - 'Get': 'placements_get.md' + - 'Insert': 'placements_insert.md' + - 'List': 'placements_list.md' + - 'Patch': 'placements_patch.md' + - 'Update': 'placements_update.md' +- 'Platform Types': + - 'Get': 'platform-types_get.md' + - 'List': 'platform-types_list.md' +- 'Postal Codes': + - 'Get': 'postal-codes_get.md' + - 'List': 'postal-codes_list.md' +- 'Projects': + - 'Get': 'projects_get.md' + - 'List': 'projects_list.md' +- 'Regions': + - 'List': 'regions_list.md' +- 'Remarketing List Shares': + - 'Get': 'remarketing-list-shares_get.md' + - 'Patch': 'remarketing-list-shares_patch.md' + - 'Update': 'remarketing-list-shares_update.md' +- 'Remarketing Lists': + - 'Get': 'remarketing-lists_get.md' + - 'Insert': 'remarketing-lists_insert.md' + - 'List': 'remarketing-lists_list.md' + - 'Patch': 'remarketing-lists_patch.md' + - 'Update': 'remarketing-lists_update.md' +- 'Reports': + - 'Compatible Fields Query': 'reports_compatible-fields-query.md' + - 'Delete': 'reports_delete.md' + - 'Files Get': 'reports_files-get.md' + - 'Files List': 'reports_files-list.md' + - 'Get': 'reports_get.md' + - 'Insert': 'reports_insert.md' + - 'List': 'reports_list.md' + - 'Patch': 'reports_patch.md' + - 'Run': 'reports_run.md' + - 'Update': 'reports_update.md' +- 'Sites': + - 'Get': 'sites_get.md' + - 'Insert': 'sites_insert.md' + - 'List': 'sites_list.md' + - 'Patch': 'sites_patch.md' + - 'Update': 'sites_update.md' +- 'Sizes': + - 'Get': 'sizes_get.md' + - 'Insert': 'sizes_insert.md' + - 'List': 'sizes_list.md' +- 'Subaccounts': + - 'Get': 'subaccounts_get.md' + - 'Insert': 'subaccounts_insert.md' + - 'List': 'subaccounts_list.md' + - 'Patch': 'subaccounts_patch.md' + - 'Update': 'subaccounts_update.md' +- 'Targetable Remarketing Lists': + - 'Get': 'targetable-remarketing-lists_get.md' + - 'List': 'targetable-remarketing-lists_list.md' +- 'Targeting Templates': + - 'Get': 'targeting-templates_get.md' + - 'Insert': 'targeting-templates_insert.md' + - 'List': 'targeting-templates_list.md' + - 'Patch': 'targeting-templates_patch.md' + - 'Update': 'targeting-templates_update.md' +- 'User Profiles': + - 'Get': 'user-profiles_get.md' + - 'List': 'user-profiles_list.md' +- 'User Role Permission Groups': + - 'Get': 'user-role-permission-groups_get.md' + - 'List': 'user-role-permission-groups_list.md' +- 'User Role Permissions': + - 'Get': 'user-role-permissions_get.md' + - 'List': 'user-role-permissions_list.md' +- 'User Roles': + - 'Delete': 'user-roles_delete.md' + - 'Get': 'user-roles_get.md' + - 'Insert': 'user-roles_insert.md' + - 'List': 'user-roles_list.md' + - 'Patch': 'user-roles_patch.md' + - 'Update': 'user-roles_update.md' +- 'Video Formats': + - 'Get': 'video-formats_get.md' + - 'List': 'video-formats_list.md' theme: readthedocs diff --git a/gen/dfareporting3d2-cli/src/client.rs b/gen/dfareporting3d2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dfareporting3d2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dfareporting3d2-cli/src/main.rs b/gen/dfareporting3d2-cli/src/main.rs index 204a269ff0..a075f62fee 100644 --- a/gen/dfareporting3d2-cli/src/main.rs +++ b/gen/dfareporting3d2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dfareporting3d2::{api, Error, oauth2}; +use google_dfareporting3d2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -478,10 +477,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-role-id" => { - call = call.user_role_id(value.unwrap_or("")); + call = call.user_role_id( value.map(|v| arg_from_str(v, err, "user-role-id", "int64")).unwrap_or(-0)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -496,13 +495,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -838,13 +837,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1317,10 +1316,10 @@ where call = call.add_type(value.unwrap_or("")); }, "ssl-required" => { - call = call.ssl_required(arg_from_str(value.unwrap_or("false"), err, "ssl-required", "boolean")); + call = call.ssl_required( value.map(|v| arg_from_str(v, err, "ssl-required", "boolean")).unwrap_or(false)); }, "ssl-compliant" => { - call = call.ssl_compliant(arg_from_str(value.unwrap_or("false"), err, "ssl-compliant", "boolean")); + call = call.ssl_compliant( value.map(|v| arg_from_str(v, err, "ssl-compliant", "boolean")).unwrap_or(false)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -1329,58 +1328,58 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "remarketing-list-ids" => { - call = call.add_remarketing_list_ids(value.unwrap_or("")); + call = call.add_remarketing_list_ids( value.map(|v| arg_from_str(v, err, "remarketing-list-ids", "int64")).unwrap_or(-0)); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "landing-page-ids" => { - call = call.add_landing_page_ids(value.unwrap_or("")); + call = call.add_landing_page_ids( value.map(|v| arg_from_str(v, err, "landing-page-ids", "int64")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dynamic-click-tracker" => { - call = call.dynamic_click_tracker(arg_from_str(value.unwrap_or("false"), err, "dynamic-click-tracker", "boolean")); + call = call.dynamic_click_tracker( value.map(|v| arg_from_str(v, err, "dynamic-click-tracker", "boolean")).unwrap_or(false)); }, "creative-optimization-configuration-ids" => { - call = call.add_creative_optimization_configuration_ids(value.unwrap_or("")); + call = call.add_creative_optimization_configuration_ids( value.map(|v| arg_from_str(v, err, "creative-optimization-configuration-ids", "int64")).unwrap_or(-0)); }, "creative-ids" => { - call = call.add_creative_ids(value.unwrap_or("")); + call = call.add_creative_ids( value.map(|v| arg_from_str(v, err, "creative-ids", "int64")).unwrap_or(-0)); }, "compatibility" => { call = call.compatibility(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "audience-segment-ids" => { - call = call.add_audience_segment_ids(value.unwrap_or("")); + call = call.add_audience_segment_ids( value.map(|v| arg_from_str(v, err, "audience-segment-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1932,10 +1931,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2309,7 +2308,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -2324,19 +2323,19 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2733,7 +2732,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "status" => { call = call.status(value.unwrap_or("")); @@ -2751,22 +2750,22 @@ where call = call.page_token(value.unwrap_or("")); }, "only-parent" => { - call = call.only_parent(arg_from_str(value.unwrap_or("false"), err, "only-parent", "boolean")); + call = call.only_parent( value.map(|v| arg_from_str(v, err, "only-parent", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-advertisers-without-groups-only" => { - call = call.include_advertisers_without_groups_only(arg_from_str(value.unwrap_or("false"), err, "include-advertisers-without-groups-only", "boolean")); + call = call.include_advertisers_without_groups_only( value.map(|v| arg_from_str(v, err, "include-advertisers-without-groups-only", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-ids" => { - call = call.add_floodlight_configuration_ids(value.unwrap_or("")); + call = call.add_floodlight_configuration_ids( value.map(|v| arg_from_str(v, err, "floodlight-configuration-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3184,7 +3183,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3420,7 +3419,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -3435,28 +3434,28 @@ where call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "excluded-ids" => { - call = call.add_excluded_ids(value.unwrap_or("")); + call = call.add_excluded_ids( value.map(|v| arg_from_str(v, err, "excluded-ids", "int64")).unwrap_or(-0)); }, "at-least-one-optimization-activity" => { - call = call.at_least_one_optimization_activity(arg_from_str(value.unwrap_or("false"), err, "at-least-one-optimization-activity", "boolean")); + call = call.at_least_one_optimization_activity( value.map(|v| arg_from_str(v, err, "at-least-one-optimization-activity", "boolean")).unwrap_or(false)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3820,7 +3819,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-profile-ids" => { - call = call.add_user_profile_ids(value.unwrap_or("")); + call = call.add_user_profile_ids( value.map(|v| arg_from_str(v, err, "user-profile-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -3832,19 +3831,19 @@ where call = call.object_type(value.unwrap_or("")); }, "object-ids" => { - call = call.add_object_ids(value.unwrap_or("")); + call = call.add_object_ids( value.map(|v| arg_from_str(v, err, "object-ids", "int64")).unwrap_or(-0)); }, "min-change-time" => { call = call.min_change_time(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-change-time" => { call = call.max_change_time(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "action" => { call = call.action(value.unwrap_or("")); @@ -3903,16 +3902,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "region-dart-ids" => { - call = call.add_region_dart_ids(value.unwrap_or("")); + call = call.add_region_dart_ids( value.map(|v| arg_from_str(v, err, "region-dart-ids", "int64")).unwrap_or(-0)); }, "name-prefix" => { call = call.name_prefix(value.unwrap_or("")); }, "dart-ids" => { - call = call.add_dart_ids(value.unwrap_or("")); + call = call.add_dart_ids( value.map(|v| arg_from_str(v, err, "dart-ids", "int64")).unwrap_or(-0)); }, "country-dart-ids" => { - call = call.add_country_dart_ids(value.unwrap_or("")); + call = call.add_country_dart_ids( value.map(|v| arg_from_str(v, err, "country-dart-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4268,10 +4267,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5079,10 +5078,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5516,13 +5515,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5931,16 +5930,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-number" => { - call = call.group_number(arg_from_str(value.unwrap_or("-0"), err, "group-number", "integer")); + call = call.group_number( value.map(|v| arg_from_str(v, err, "group-number", "int32")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6410,7 +6409,7 @@ where call = call.add_types(value.unwrap_or("")); }, "studio-creative-id" => { - call = call.studio_creative_id(value.unwrap_or("")); + call = call.studio_creative_id( value.map(|v| arg_from_str(v, err, "studio-creative-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -6419,40 +6418,40 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "rendering-ids" => { - call = call.add_rendering_ids(value.unwrap_or("")); + call = call.add_rendering_ids( value.map(|v| arg_from_str(v, err, "rendering-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "creative-field-ids" => { - call = call.add_creative_field_ids(value.unwrap_or("")); + call = call.add_creative_field_ids( value.map(|v| arg_from_str(v, err, "creative-field-ids", "int64")).unwrap_or(-0)); }, "companion-creative-ids" => { - call = call.add_companion_creative_ids(value.unwrap_or("")); + call = call.add_companion_creative_ids( value.map(|v| arg_from_str(v, err, "companion-creative-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6877,7 +6876,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6997,13 +6996,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7232,34 +7231,34 @@ where call = call.search_string(value.unwrap_or("")); }, "parent-id" => { - call = call.parent_id(value.unwrap_or("")); + call = call.parent_id( value.map(|v| arg_from_str(v, err, "parent-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dfp-network-code" => { call = call.dfp_network_code(value.unwrap_or("")); }, "country-id" => { - call = call.country_id(value.unwrap_or("")); + call = call.country_id( value.map(|v| arg_from_str(v, err, "country-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7450,13 +7449,13 @@ where call = call.object_type(value.unwrap_or("")); }, "object-id" => { - call = call.object_id(value.unwrap_or("")); + call = call.object_id( value.map(|v| arg_from_str(v, err, "object-id", "int64")).unwrap_or(-0)); }, "names" => { call = call.add_names(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7729,25 +7728,25 @@ where call = call.search_string(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "event-tag-types" => { call = call.add_event_tag_types(value.unwrap_or("")); }, "enabled" => { - call = call.enabled(arg_from_str(value.unwrap_or("false"), err, "enabled", "boolean")); + call = call.enabled( value.map(|v| arg_from_str(v, err, "enabled", "boolean")).unwrap_or(false)); }, "definitions-only" => { - call = call.definitions_only(arg_from_str(value.unwrap_or("false"), err, "definitions-only", "boolean")); + call = call.definitions_only( value.map(|v| arg_from_str(v, err, "definitions-only", "boolean")).unwrap_or(false)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "ad-id" => { - call = call.ad_id(value.unwrap_or("")); + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8101,7 +8100,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8201,7 +8200,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8449,13 +8448,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "floodlight-activity-group-type" => { call = call.floodlight_activity_group_type(value.unwrap_or("")); @@ -8467,10 +8466,10 @@ where call = call.floodlight_activity_group_name(value.unwrap_or("")); }, "floodlight-activity-group-ids" => { - call = call.add_floodlight_activity_group_ids(value.unwrap_or("")); + call = call.add_floodlight_activity_group_ids( value.map(|v| arg_from_str(v, err, "floodlight-activity-group-ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8954,16 +8953,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9293,7 +9292,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9634,22 +9633,22 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "in-plan" => { - call = call.in_plan(arg_from_str(value.unwrap_or("false"), err, "in-plan", "boolean")); + call = call.in_plan( value.map(|v| arg_from_str(v, err, "in-plan", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9867,7 +9866,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { call = call.add_ids(value.unwrap_or("")); @@ -10299,7 +10298,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10308,16 +10307,16 @@ where call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10431,7 +10430,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10440,10 +10439,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10704,7 +10703,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10713,7 +10712,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "placement-group-type" => { call = call.placement_group_type(value.unwrap_or("")); @@ -10731,28 +10730,28 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11298,10 +11297,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11536,10 +11535,10 @@ where call = call.add_tag_formats(value.unwrap_or("")); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11834,10 +11833,10 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -11846,7 +11845,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "payment-source" => { call = call.payment_source(value.unwrap_or("")); @@ -11864,34 +11863,34 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-ids" => { - call = call.add_group_ids(value.unwrap_or("")); + call = call.add_group_ids( value.map(|v| arg_from_str(v, err, "group-ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "compatibilities" => { call = call.add_compatibilities(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12581,13 +12580,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13090,13 +13089,13 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -13638,7 +13637,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13923,7 +13922,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14144,7 +14143,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "synchronous" => { - call = call.synchronous(arg_from_str(value.unwrap_or("false"), err, "synchronous", "boolean")); + call = call.synchronous( value.map(|v| arg_from_str(v, err, "synchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14534,10 +14533,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unmapped-site" => { - call = call.unmapped_site(arg_from_str(value.unwrap_or("false"), err, "unmapped-site", "boolean")); + call = call.unmapped_site( value.map(|v| arg_from_str(v, err, "unmapped-site", "boolean")).unwrap_or(false)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -14552,31 +14551,31 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, "ad-words-site" => { - call = call.ad_words_site(arg_from_str(value.unwrap_or("false"), err, "ad-words-site", "boolean")); + call = call.ad_words_site( value.map(|v| arg_from_str(v, err, "ad-words-site", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15007,16 +15006,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "width" => { - call = call.width(arg_from_str(value.unwrap_or("-0"), err, "width", "integer")); + call = call.width( value.map(|v| arg_from_str(v, err, "width", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "iab-standard" => { - call = call.iab_standard(arg_from_str(value.unwrap_or("false"), err, "iab-standard", "boolean")); + call = call.iab_standard( value.map(|v| arg_from_str(v, err, "iab-standard", "boolean")).unwrap_or(false)); }, "height" => { - call = call.height(arg_from_str(value.unwrap_or("-0"), err, "height", "integer")); + call = call.height( value.map(|v| arg_from_str(v, err, "height", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15225,10 +15224,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15526,10 +15525,10 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15751,13 +15750,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16277,7 +16276,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16520,7 +16519,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -16535,13 +16534,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "account-user-role-only" => { - call = call.account_user_role_only(arg_from_str(value.unwrap_or("false"), err, "account-user-role-only", "boolean")); + call = call.account_user_role_only( value.map(|v| arg_from_str(v, err, "account-user-role-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -23882,7 +23881,7 @@ async fn main() { let mut app = App::new("dfareporting3d2") .author("Sebastian Thiel ") - .version("4.0.1+20190531") + .version("5.0.2+20190531") .about("Manages your DoubleClick Campaign Manager ad campaigns and reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dfareporting3d2_cli") .arg(Arg::with_name("url") diff --git a/gen/dfareporting3d2/Cargo.toml b/gen/dfareporting3d2/Cargo.toml index 71068ae646..b6ba9065ca 100644 --- a/gen/dfareporting3d2/Cargo.toml +++ b/gen/dfareporting3d2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dfareporting3d2" -version = "5.0.2-beta-1+20190531" +version = "5.0.2+20190531" authors = ["Sebastian Thiel "] description = "A complete library to interact with dfareporting (protocol v3.2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d2" homepage = "https://developers.google.com/doubleclick-advertisers/" -documentation = "https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531" +documentation = "https://docs.rs/google-dfareporting3d2/5.0.2+20190531" license = "MIT" keywords = ["dfareporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dfareporting3d2/README.md b/gen/dfareporting3d2/README.md index ff2116b379..32436447f6 100644 --- a/gen/dfareporting3d2/README.md +++ b/gen/dfareporting3d2/README.md @@ -5,146 +5,146 @@ DO NOT EDIT ! --> The `google-dfareporting3d2` library allows access to all features of the *Google dfareporting* service. -This documentation was generated from *dfareporting* crate version *5.0.2-beta-1+20190531*, where *20190531* is the exact revision of the *dfareporting:v3.2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *dfareporting* crate version *5.0.2+20190531*, where *20190531* is the exact revision of the *dfareporting:v3.2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *dfareporting* *v3d2* API can be found at the [official documentation site](https://developers.google.com/doubleclick-advertisers/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/Dfareporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/Dfareporting) ... -* [account active ad summaries](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountActiveAdSummary) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountActiveAdSummaryGetCall) -* [account permission groups](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountPermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountPermissionGroupListCall) -* [account permissions](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountPermission) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountPermissionListCall) -* [account user profiles](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountUserProfile) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountUserProfileUpdateCall) -* [accounts](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Account) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AccountUpdateCall) -* [ads](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Ad) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdUpdateCall) -* [advertiser groups](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGroup) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGroupUpdateCall) +* [account active ad summaries](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountActiveAdSummary) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountActiveAdSummaryGetCall) +* [account permission groups](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountPermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountPermissionGroupListCall) +* [account permissions](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountPermission) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountPermissionListCall) +* [account user profiles](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountUserProfile) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountUserProfileUpdateCall) +* [accounts](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Account) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AccountUpdateCall) +* [ads](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Ad) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdUpdateCall) +* [advertiser groups](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGroup) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGroupUpdateCall) * advertiser landing pages - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserLandingPageUpdateCall) -* [advertisers](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Advertiser) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::AdvertiserUpdateCall) -* [browsers](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Browser) - * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::BrowserListCall) -* [campaign creative associations](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignCreativeAssociation) - * [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignCreativeAssociationListCall) -* [campaigns](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Campaign) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CampaignUpdateCall) -* [change logs](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ChangeLog) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ChangeLogListCall) -* [cities](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::City) - * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CityListCall) -* [connection types](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ConnectionType) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ConnectionTypeListCall) -* [content categories](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ContentCategory) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ContentCategoryUpdateCall) -* [conversions](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Conversion) - * [*batchinsert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ConversionBatchupdateCall) -* [countries](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Country) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CountryListCall) -* [creative assets](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeAsset) - * [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeAssetInsertCall) -* [creative field values](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldValue) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldValueUpdateCall) -* [creative fields](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeField) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeFieldUpdateCall) -* [creative groups](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeGroup) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeGroupUpdateCall) -* [creatives](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Creative) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeUpdateCall) -* [dimension values](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DimensionValue) - * [*query*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DimensionValueQueryCall) -* [directory site contacts](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DirectorySiteContact) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DirectorySiteContactGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DirectorySiteContactListCall) -* [directory sites](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DirectorySite) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DirectorySiteListCall) -* [dynamic targeting keys](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DynamicTargetingKey) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::DynamicTargetingKeyListCall) -* [event tags](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::EventTag) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::EventTagUpdateCall) -* [files](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::File) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FileListCall) -* [floodlight activities](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivity) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityUpdateCall) -* [floodlight activity groups](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGroup) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightActivityGroupUpdateCall) -* [floodlight configurations](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightConfiguration) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FloodlightConfigurationUpdateCall) -* [inventory items](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::InventoryItem) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::InventoryItemListCall) -* [languages](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Language) - * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::LanguageListCall) -* [metros](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Metro) - * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::MetroListCall) -* [mobile apps](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::MobileApp) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::MobileAppListCall) -* [mobile carriers](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::MobileCarrier) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::MobileCarrierListCall) -* [operating system versions](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OperatingSystemVersion) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OperatingSystemVersionListCall) -* [operating systems](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OperatingSystem) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OperatingSystemListCall) -* [order documents](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OrderDocument) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OrderDocumentListCall) -* [orders](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Order) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::OrderListCall) -* [placement groups](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGroup) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGroupUpdateCall) -* [placement strategies](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementStrategy) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementStrategyUpdateCall) -* [placements](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Placement) - * [*generatetags*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlacementUpdateCall) -* [platform types](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlatformType) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PlatformTypeListCall) -* [postal codes](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PostalCode) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::PostalCodeListCall) -* [projects](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Project) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ProjectListCall) -* [regions](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Region) - * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RegionListCall) -* [remarketing list shares](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListShare) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListShareUpdateCall) -* [remarketing lists](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::RemarketingListUpdateCall) -* [reports](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Report) - * [*compatible fields query*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportUpdateCall) -* [sites](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Site) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SiteUpdateCall) -* [sizes](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Size) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SizeListCall) -* [subaccounts](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::Subaccount) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::SubaccountUpdateCall) -* [targetable remarketing lists](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetableRemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetableRemarketingListListCall) -* [targeting templates](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetingTemplate) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::TargetingTemplateUpdateCall) -* [user profiles](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserProfile) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserProfileListCall) -* [user role permission groups](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRolePermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRolePermissionGroupListCall) -* [user role permissions](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRolePermission) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRolePermissionListCall) -* [user roles](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRole) - * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::UserRoleUpdateCall) -* [video formats](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::VideoFormat) - * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::VideoFormatListCall) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserLandingPageUpdateCall) +* [advertisers](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Advertiser) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::AdvertiserUpdateCall) +* [browsers](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Browser) + * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::BrowserListCall) +* [campaign creative associations](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignCreativeAssociation) + * [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignCreativeAssociationListCall) +* [campaigns](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Campaign) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CampaignUpdateCall) +* [change logs](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ChangeLog) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ChangeLogListCall) +* [cities](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::City) + * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CityListCall) +* [connection types](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ConnectionType) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ConnectionTypeListCall) +* [content categories](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ContentCategory) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ContentCategoryUpdateCall) +* [conversions](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Conversion) + * [*batchinsert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ConversionBatchupdateCall) +* [countries](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Country) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CountryListCall) +* [creative assets](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeAsset) + * [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeAssetInsertCall) +* [creative field values](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldValue) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldValueUpdateCall) +* [creative fields](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeField) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeFieldUpdateCall) +* [creative groups](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeGroup) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeGroupUpdateCall) +* [creatives](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Creative) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeUpdateCall) +* [dimension values](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DimensionValue) + * [*query*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DimensionValueQueryCall) +* [directory site contacts](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DirectorySiteContact) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DirectorySiteContactGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DirectorySiteContactListCall) +* [directory sites](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DirectorySite) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DirectorySiteListCall) +* [dynamic targeting keys](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DynamicTargetingKey) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::DynamicTargetingKeyListCall) +* [event tags](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::EventTag) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::EventTagUpdateCall) +* [files](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::File) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FileListCall) +* [floodlight activities](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivity) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityUpdateCall) +* [floodlight activity groups](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGroup) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightActivityGroupUpdateCall) +* [floodlight configurations](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightConfiguration) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FloodlightConfigurationUpdateCall) +* [inventory items](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::InventoryItem) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::InventoryItemListCall) +* [languages](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Language) + * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::LanguageListCall) +* [metros](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Metro) + * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::MetroListCall) +* [mobile apps](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::MobileApp) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::MobileAppListCall) +* [mobile carriers](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::MobileCarrier) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::MobileCarrierListCall) +* [operating system versions](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OperatingSystemVersion) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OperatingSystemVersionListCall) +* [operating systems](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OperatingSystem) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OperatingSystemListCall) +* [order documents](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OrderDocument) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OrderDocumentListCall) +* [orders](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Order) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::OrderListCall) +* [placement groups](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGroup) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGroupUpdateCall) +* [placement strategies](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementStrategy) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementStrategyUpdateCall) +* [placements](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Placement) + * [*generatetags*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlacementUpdateCall) +* [platform types](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlatformType) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PlatformTypeListCall) +* [postal codes](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PostalCode) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::PostalCodeListCall) +* [projects](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Project) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ProjectListCall) +* [regions](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Region) + * [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RegionListCall) +* [remarketing list shares](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListShare) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListShareUpdateCall) +* [remarketing lists](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::RemarketingListUpdateCall) +* [reports](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Report) + * [*compatible fields query*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportUpdateCall) +* [sites](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Site) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SiteUpdateCall) +* [sizes](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Size) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SizeListCall) +* [subaccounts](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::Subaccount) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::SubaccountUpdateCall) +* [targetable remarketing lists](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetableRemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetableRemarketingListListCall) +* [targeting templates](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetingTemplate) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::TargetingTemplateUpdateCall) +* [user profiles](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserProfile) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserProfileListCall) +* [user role permission groups](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRolePermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRolePermissionGroupListCall) +* [user role permissions](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRolePermission) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRolePermissionListCall) +* [user roles](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRole) + * [*delete*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::UserRoleUpdateCall) +* [video formats](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::VideoFormat) + * [*get*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::VideoFormatListCall) Upload supported by ... -* [*insert creative assets*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::CreativeAssetInsertCall) +* [*insert creative assets*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::CreativeAssetInsertCall) Download supported by ... -* [*get files*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::FileGetCall) -* [*files get reports*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/api::ReportFileGetCall) +* [*get files*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::FileGetCall) +* [*files get reports*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/api::ReportFileGetCall) @@ -152,17 +152,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/Dfareporting)** +* **[Hub](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/Dfareporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::CallBuilder) -* **[Resources](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::CallBuilder) +* **[Resources](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Part)** + * **[Parts](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -260,17 +260,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -280,29 +280,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Delegate) to the -[Method Builder](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Delegate) to the +[Method Builder](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::RequestValue) and -[decodable](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::RequestValue) and +[decodable](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dfareporting3d2/5.0.2-beta-1+20190531/google_dfareporting3d2/client::RequestValue) are moved +* [request values](https://docs.rs/google-dfareporting3d2/5.0.2+20190531/google_dfareporting3d2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dfareporting3d2/src/api.rs b/gen/dfareporting3d2/src/api.rs index dfef855627..0980010dd0 100644 --- a/gen/dfareporting3d2/src/api.rs +++ b/gen/dfareporting3d2/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> Dfareporting { Dfareporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/dfareporting/v3.2/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -322,7 +322,7 @@ impl<'a, S> Dfareporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dfareporting3d2/src/client.rs b/gen/dfareporting3d2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dfareporting3d2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dfareporting3d2/src/lib.rs b/gen/dfareporting3d2/src/lib.rs index 919d656b46..1eb85814aa 100644 --- a/gen/dfareporting3d2/src/lib.rs +++ b/gen/dfareporting3d2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *dfareporting* crate version *5.0.2-beta-1+20190531*, where *20190531* is the exact revision of the *dfareporting:v3.2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *dfareporting* crate version *5.0.2+20190531*, where *20190531* is the exact revision of the *dfareporting:v3.2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *dfareporting* *v3d2* API can be found at the //! [official documentation site](https://developers.google.com/doubleclick-advertisers/). diff --git a/gen/dfareporting3d3-cli/Cargo.toml b/gen/dfareporting3d3-cli/Cargo.toml index 4e45bbb559..8d81e2a030 100644 --- a/gen/dfareporting3d3-cli/Cargo.toml +++ b/gen/dfareporting3d3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dfareporting3d3-cli" -version = "4.0.1+20220104" +version = "5.0.2+20220104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dfareporting (protocol v3.3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d3-cli" @@ -20,13 +20,13 @@ name = "dfareporting3d3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dfareporting3d3] path = "../dfareporting3d3" -version = "4.0.1+20220104" +version = "5.0.2+20220104" + diff --git a/gen/dfareporting3d3-cli/README.md b/gen/dfareporting3d3-cli/README.md index 2f058a79db..f94c0a6664 100644 --- a/gen/dfareporting3d3-cli/README.md +++ b/gen/dfareporting3d3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dfareporting* API at revision *20220104*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dfareporting* API at revision *20220104*. The CLI is at version *5.0.2*. ```bash dfareporting3d3 [options] diff --git a/gen/dfareporting3d3-cli/mkdocs.yml b/gen/dfareporting3d3-cli/mkdocs.yml index c882e59b4f..df06fd21aa 100644 --- a/gen/dfareporting3d3-cli/mkdocs.yml +++ b/gen/dfareporting3d3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dfareporting v4.0.1+20220104 +site_name: Dfareporting v5.0.2+20220104 site_url: http://byron.github.io/google-apis-rs/google-dfareporting3d3-cli site_description: A complete library to interact with Dfareporting (protocol v3.3) @@ -7,212 +7,272 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d3- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-active-ad-summaries_get.md', 'Account Active Ad Summaries', 'Get'] -- ['account-permission-groups_get.md', 'Account Permission Groups', 'Get'] -- ['account-permission-groups_list.md', 'Account Permission Groups', 'List'] -- ['account-permissions_get.md', 'Account Permissions', 'Get'] -- ['account-permissions_list.md', 'Account Permissions', 'List'] -- ['account-user-profiles_get.md', 'Account User Profiles', 'Get'] -- ['account-user-profiles_insert.md', 'Account User Profiles', 'Insert'] -- ['account-user-profiles_list.md', 'Account User Profiles', 'List'] -- ['account-user-profiles_patch.md', 'Account User Profiles', 'Patch'] -- ['account-user-profiles_update.md', 'Account User Profiles', 'Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['ads_get.md', 'Ads', 'Get'] -- ['ads_insert.md', 'Ads', 'Insert'] -- ['ads_list.md', 'Ads', 'List'] -- ['ads_patch.md', 'Ads', 'Patch'] -- ['ads_update.md', 'Ads', 'Update'] -- ['advertiser-groups_delete.md', 'Advertiser Groups', 'Delete'] -- ['advertiser-groups_get.md', 'Advertiser Groups', 'Get'] -- ['advertiser-groups_insert.md', 'Advertiser Groups', 'Insert'] -- ['advertiser-groups_list.md', 'Advertiser Groups', 'List'] -- ['advertiser-groups_patch.md', 'Advertiser Groups', 'Patch'] -- ['advertiser-groups_update.md', 'Advertiser Groups', 'Update'] -- ['advertiser-landing-pages_get.md', 'Advertiser Landing Pages', 'Get'] -- ['advertiser-landing-pages_insert.md', 'Advertiser Landing Pages', 'Insert'] -- ['advertiser-landing-pages_list.md', 'Advertiser Landing Pages', 'List'] -- ['advertiser-landing-pages_patch.md', 'Advertiser Landing Pages', 'Patch'] -- ['advertiser-landing-pages_update.md', 'Advertiser Landing Pages', 'Update'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_insert.md', 'Advertisers', 'Insert'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['advertisers_patch.md', 'Advertisers', 'Patch'] -- ['advertisers_update.md', 'Advertisers', 'Update'] -- ['browsers_list.md', 'Browsers', 'List'] -- ['campaign-creative-associations_insert.md', 'Campaign Creative Associations', 'Insert'] -- ['campaign-creative-associations_list.md', 'Campaign Creative Associations', 'List'] -- ['campaigns_get.md', 'Campaigns', 'Get'] -- ['campaigns_insert.md', 'Campaigns', 'Insert'] -- ['campaigns_list.md', 'Campaigns', 'List'] -- ['campaigns_patch.md', 'Campaigns', 'Patch'] -- ['campaigns_update.md', 'Campaigns', 'Update'] -- ['change-logs_get.md', 'Change Logs', 'Get'] -- ['change-logs_list.md', 'Change Logs', 'List'] -- ['cities_list.md', 'Cities', 'List'] -- ['connection-types_get.md', 'Connection Types', 'Get'] -- ['connection-types_list.md', 'Connection Types', 'List'] -- ['content-categories_delete.md', 'Content Categories', 'Delete'] -- ['content-categories_get.md', 'Content Categories', 'Get'] -- ['content-categories_insert.md', 'Content Categories', 'Insert'] -- ['content-categories_list.md', 'Content Categories', 'List'] -- ['content-categories_patch.md', 'Content Categories', 'Patch'] -- ['content-categories_update.md', 'Content Categories', 'Update'] -- ['conversions_batchinsert.md', 'Conversions', 'Batchinsert'] -- ['conversions_batchupdate.md', 'Conversions', 'Batchupdate'] -- ['countries_get.md', 'Countries', 'Get'] -- ['countries_list.md', 'Countries', 'List'] -- ['creative-assets_insert.md', 'Creative Assets', 'Insert'] -- ['creative-field-values_delete.md', 'Creative Field Values', 'Delete'] -- ['creative-field-values_get.md', 'Creative Field Values', 'Get'] -- ['creative-field-values_insert.md', 'Creative Field Values', 'Insert'] -- ['creative-field-values_list.md', 'Creative Field Values', 'List'] -- ['creative-field-values_patch.md', 'Creative Field Values', 'Patch'] -- ['creative-field-values_update.md', 'Creative Field Values', 'Update'] -- ['creative-fields_delete.md', 'Creative Fields', 'Delete'] -- ['creative-fields_get.md', 'Creative Fields', 'Get'] -- ['creative-fields_insert.md', 'Creative Fields', 'Insert'] -- ['creative-fields_list.md', 'Creative Fields', 'List'] -- ['creative-fields_patch.md', 'Creative Fields', 'Patch'] -- ['creative-fields_update.md', 'Creative Fields', 'Update'] -- ['creative-groups_get.md', 'Creative Groups', 'Get'] -- ['creative-groups_insert.md', 'Creative Groups', 'Insert'] -- ['creative-groups_list.md', 'Creative Groups', 'List'] -- ['creative-groups_patch.md', 'Creative Groups', 'Patch'] -- ['creative-groups_update.md', 'Creative Groups', 'Update'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['creatives_patch.md', 'Creatives', 'Patch'] -- ['creatives_update.md', 'Creatives', 'Update'] -- ['dimension-values_query.md', 'Dimension Values', 'Query'] -- ['directory-sites_get.md', 'Directory Sites', 'Get'] -- ['directory-sites_insert.md', 'Directory Sites', 'Insert'] -- ['directory-sites_list.md', 'Directory Sites', 'List'] -- ['dynamic-targeting-keys_delete.md', 'Dynamic Targeting Keys', 'Delete'] -- ['dynamic-targeting-keys_insert.md', 'Dynamic Targeting Keys', 'Insert'] -- ['dynamic-targeting-keys_list.md', 'Dynamic Targeting Keys', 'List'] -- ['event-tags_delete.md', 'Event Tags', 'Delete'] -- ['event-tags_get.md', 'Event Tags', 'Get'] -- ['event-tags_insert.md', 'Event Tags', 'Insert'] -- ['event-tags_list.md', 'Event Tags', 'List'] -- ['event-tags_patch.md', 'Event Tags', 'Patch'] -- ['event-tags_update.md', 'Event Tags', 'Update'] -- ['files_get.md', 'Files', 'Get'] -- ['files_list.md', 'Files', 'List'] -- ['floodlight-activities_delete.md', 'Floodlight Activities', 'Delete'] -- ['floodlight-activities_generatetag.md', 'Floodlight Activities', 'Generatetag'] -- ['floodlight-activities_get.md', 'Floodlight Activities', 'Get'] -- ['floodlight-activities_insert.md', 'Floodlight Activities', 'Insert'] -- ['floodlight-activities_list.md', 'Floodlight Activities', 'List'] -- ['floodlight-activities_patch.md', 'Floodlight Activities', 'Patch'] -- ['floodlight-activities_update.md', 'Floodlight Activities', 'Update'] -- ['floodlight-activity-groups_get.md', 'Floodlight Activity Groups', 'Get'] -- ['floodlight-activity-groups_insert.md', 'Floodlight Activity Groups', 'Insert'] -- ['floodlight-activity-groups_list.md', 'Floodlight Activity Groups', 'List'] -- ['floodlight-activity-groups_patch.md', 'Floodlight Activity Groups', 'Patch'] -- ['floodlight-activity-groups_update.md', 'Floodlight Activity Groups', 'Update'] -- ['floodlight-configurations_get.md', 'Floodlight Configurations', 'Get'] -- ['floodlight-configurations_list.md', 'Floodlight Configurations', 'List'] -- ['floodlight-configurations_patch.md', 'Floodlight Configurations', 'Patch'] -- ['floodlight-configurations_update.md', 'Floodlight Configurations', 'Update'] -- ['inventory-items_get.md', 'Inventory Items', 'Get'] -- ['inventory-items_list.md', 'Inventory Items', 'List'] -- ['languages_list.md', 'Languages', 'List'] -- ['metros_list.md', 'Metros', 'List'] -- ['mobile-apps_get.md', 'Mobile Apps', 'Get'] -- ['mobile-apps_list.md', 'Mobile Apps', 'List'] -- ['mobile-carriers_get.md', 'Mobile Carriers', 'Get'] -- ['mobile-carriers_list.md', 'Mobile Carriers', 'List'] -- ['operating-system-versions_get.md', 'Operating System Versions', 'Get'] -- ['operating-system-versions_list.md', 'Operating System Versions', 'List'] -- ['operating-systems_get.md', 'Operating Systems', 'Get'] -- ['operating-systems_list.md', 'Operating Systems', 'List'] -- ['order-documents_get.md', 'Order Documents', 'Get'] -- ['order-documents_list.md', 'Order Documents', 'List'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_list.md', 'Orders', 'List'] -- ['placement-groups_get.md', 'Placement Groups', 'Get'] -- ['placement-groups_insert.md', 'Placement Groups', 'Insert'] -- ['placement-groups_list.md', 'Placement Groups', 'List'] -- ['placement-groups_patch.md', 'Placement Groups', 'Patch'] -- ['placement-groups_update.md', 'Placement Groups', 'Update'] -- ['placement-strategies_delete.md', 'Placement Strategies', 'Delete'] -- ['placement-strategies_get.md', 'Placement Strategies', 'Get'] -- ['placement-strategies_insert.md', 'Placement Strategies', 'Insert'] -- ['placement-strategies_list.md', 'Placement Strategies', 'List'] -- ['placement-strategies_patch.md', 'Placement Strategies', 'Patch'] -- ['placement-strategies_update.md', 'Placement Strategies', 'Update'] -- ['placements_generatetags.md', 'Placements', 'Generatetags'] -- ['placements_get.md', 'Placements', 'Get'] -- ['placements_insert.md', 'Placements', 'Insert'] -- ['placements_list.md', 'Placements', 'List'] -- ['placements_patch.md', 'Placements', 'Patch'] -- ['placements_update.md', 'Placements', 'Update'] -- ['platform-types_get.md', 'Platform Types', 'Get'] -- ['platform-types_list.md', 'Platform Types', 'List'] -- ['postal-codes_get.md', 'Postal Codes', 'Get'] -- ['postal-codes_list.md', 'Postal Codes', 'List'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_list.md', 'Projects', 'List'] -- ['regions_list.md', 'Regions', 'List'] -- ['remarketing-list-shares_get.md', 'Remarketing List Shares', 'Get'] -- ['remarketing-list-shares_patch.md', 'Remarketing List Shares', 'Patch'] -- ['remarketing-list-shares_update.md', 'Remarketing List Shares', 'Update'] -- ['remarketing-lists_get.md', 'Remarketing Lists', 'Get'] -- ['remarketing-lists_insert.md', 'Remarketing Lists', 'Insert'] -- ['remarketing-lists_list.md', 'Remarketing Lists', 'List'] -- ['remarketing-lists_patch.md', 'Remarketing Lists', 'Patch'] -- ['remarketing-lists_update.md', 'Remarketing Lists', 'Update'] -- ['reports_compatible-fields-query.md', 'Reports', 'Compatible Fields Query'] -- ['reports_delete.md', 'Reports', 'Delete'] -- ['reports_files-get.md', 'Reports', 'Files Get'] -- ['reports_files-list.md', 'Reports', 'Files List'] -- ['reports_get.md', 'Reports', 'Get'] -- ['reports_insert.md', 'Reports', 'Insert'] -- ['reports_list.md', 'Reports', 'List'] -- ['reports_patch.md', 'Reports', 'Patch'] -- ['reports_run.md', 'Reports', 'Run'] -- ['reports_update.md', 'Reports', 'Update'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_insert.md', 'Sites', 'Insert'] -- ['sites_list.md', 'Sites', 'List'] -- ['sites_patch.md', 'Sites', 'Patch'] -- ['sites_update.md', 'Sites', 'Update'] -- ['sizes_get.md', 'Sizes', 'Get'] -- ['sizes_insert.md', 'Sizes', 'Insert'] -- ['sizes_list.md', 'Sizes', 'List'] -- ['subaccounts_get.md', 'Subaccounts', 'Get'] -- ['subaccounts_insert.md', 'Subaccounts', 'Insert'] -- ['subaccounts_list.md', 'Subaccounts', 'List'] -- ['subaccounts_patch.md', 'Subaccounts', 'Patch'] -- ['subaccounts_update.md', 'Subaccounts', 'Update'] -- ['targetable-remarketing-lists_get.md', 'Targetable Remarketing Lists', 'Get'] -- ['targetable-remarketing-lists_list.md', 'Targetable Remarketing Lists', 'List'] -- ['targeting-templates_get.md', 'Targeting Templates', 'Get'] -- ['targeting-templates_insert.md', 'Targeting Templates', 'Insert'] -- ['targeting-templates_list.md', 'Targeting Templates', 'List'] -- ['targeting-templates_patch.md', 'Targeting Templates', 'Patch'] -- ['targeting-templates_update.md', 'Targeting Templates', 'Update'] -- ['user-profiles_get.md', 'User Profiles', 'Get'] -- ['user-profiles_list.md', 'User Profiles', 'List'] -- ['user-role-permission-groups_get.md', 'User Role Permission Groups', 'Get'] -- ['user-role-permission-groups_list.md', 'User Role Permission Groups', 'List'] -- ['user-role-permissions_get.md', 'User Role Permissions', 'Get'] -- ['user-role-permissions_list.md', 'User Role Permissions', 'List'] -- ['user-roles_delete.md', 'User Roles', 'Delete'] -- ['user-roles_get.md', 'User Roles', 'Get'] -- ['user-roles_insert.md', 'User Roles', 'Insert'] -- ['user-roles_list.md', 'User Roles', 'List'] -- ['user-roles_patch.md', 'User Roles', 'Patch'] -- ['user-roles_update.md', 'User Roles', 'Update'] -- ['video-formats_get.md', 'Video Formats', 'Get'] -- ['video-formats_list.md', 'Video Formats', 'List'] +nav: +- Home: 'index.md' +- 'Account Active Ad Summaries': + - 'Get': 'account-active-ad-summaries_get.md' +- 'Account Permission Groups': + - 'Get': 'account-permission-groups_get.md' + - 'List': 'account-permission-groups_list.md' +- 'Account Permissions': + - 'Get': 'account-permissions_get.md' + - 'List': 'account-permissions_list.md' +- 'Account User Profiles': + - 'Get': 'account-user-profiles_get.md' + - 'Insert': 'account-user-profiles_insert.md' + - 'List': 'account-user-profiles_list.md' + - 'Patch': 'account-user-profiles_patch.md' + - 'Update': 'account-user-profiles_update.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Ads': + - 'Get': 'ads_get.md' + - 'Insert': 'ads_insert.md' + - 'List': 'ads_list.md' + - 'Patch': 'ads_patch.md' + - 'Update': 'ads_update.md' +- 'Advertiser Groups': + - 'Delete': 'advertiser-groups_delete.md' + - 'Get': 'advertiser-groups_get.md' + - 'Insert': 'advertiser-groups_insert.md' + - 'List': 'advertiser-groups_list.md' + - 'Patch': 'advertiser-groups_patch.md' + - 'Update': 'advertiser-groups_update.md' +- 'Advertiser Landing Pages': + - 'Get': 'advertiser-landing-pages_get.md' + - 'Insert': 'advertiser-landing-pages_insert.md' + - 'List': 'advertiser-landing-pages_list.md' + - 'Patch': 'advertiser-landing-pages_patch.md' + - 'Update': 'advertiser-landing-pages_update.md' +- 'Advertisers': + - 'Get': 'advertisers_get.md' + - 'Insert': 'advertisers_insert.md' + - 'List': 'advertisers_list.md' + - 'Patch': 'advertisers_patch.md' + - 'Update': 'advertisers_update.md' +- 'Browsers': + - 'List': 'browsers_list.md' +- 'Campaign Creative Associations': + - 'Insert': 'campaign-creative-associations_insert.md' + - 'List': 'campaign-creative-associations_list.md' +- 'Campaigns': + - 'Get': 'campaigns_get.md' + - 'Insert': 'campaigns_insert.md' + - 'List': 'campaigns_list.md' + - 'Patch': 'campaigns_patch.md' + - 'Update': 'campaigns_update.md' +- 'Change Logs': + - 'Get': 'change-logs_get.md' + - 'List': 'change-logs_list.md' +- 'Cities': + - 'List': 'cities_list.md' +- 'Connection Types': + - 'Get': 'connection-types_get.md' + - 'List': 'connection-types_list.md' +- 'Content Categories': + - 'Delete': 'content-categories_delete.md' + - 'Get': 'content-categories_get.md' + - 'Insert': 'content-categories_insert.md' + - 'List': 'content-categories_list.md' + - 'Patch': 'content-categories_patch.md' + - 'Update': 'content-categories_update.md' +- 'Conversions': + - 'Batchinsert': 'conversions_batchinsert.md' + - 'Batchupdate': 'conversions_batchupdate.md' +- 'Countries': + - 'Get': 'countries_get.md' + - 'List': 'countries_list.md' +- 'Creative Assets': + - 'Insert': 'creative-assets_insert.md' +- 'Creative Field Values': + - 'Delete': 'creative-field-values_delete.md' + - 'Get': 'creative-field-values_get.md' + - 'Insert': 'creative-field-values_insert.md' + - 'List': 'creative-field-values_list.md' + - 'Patch': 'creative-field-values_patch.md' + - 'Update': 'creative-field-values_update.md' +- 'Creative Fields': + - 'Delete': 'creative-fields_delete.md' + - 'Get': 'creative-fields_get.md' + - 'Insert': 'creative-fields_insert.md' + - 'List': 'creative-fields_list.md' + - 'Patch': 'creative-fields_patch.md' + - 'Update': 'creative-fields_update.md' +- 'Creative Groups': + - 'Get': 'creative-groups_get.md' + - 'Insert': 'creative-groups_insert.md' + - 'List': 'creative-groups_list.md' + - 'Patch': 'creative-groups_patch.md' + - 'Update': 'creative-groups_update.md' +- 'Creatives': + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' + - 'Patch': 'creatives_patch.md' + - 'Update': 'creatives_update.md' +- 'Dimension Values': + - 'Query': 'dimension-values_query.md' +- 'Directory Sites': + - 'Get': 'directory-sites_get.md' + - 'Insert': 'directory-sites_insert.md' + - 'List': 'directory-sites_list.md' +- 'Dynamic Targeting Keys': + - 'Delete': 'dynamic-targeting-keys_delete.md' + - 'Insert': 'dynamic-targeting-keys_insert.md' + - 'List': 'dynamic-targeting-keys_list.md' +- 'Event Tags': + - 'Delete': 'event-tags_delete.md' + - 'Get': 'event-tags_get.md' + - 'Insert': 'event-tags_insert.md' + - 'List': 'event-tags_list.md' + - 'Patch': 'event-tags_patch.md' + - 'Update': 'event-tags_update.md' +- 'Files': + - 'Get': 'files_get.md' + - 'List': 'files_list.md' +- 'Floodlight Activities': + - 'Delete': 'floodlight-activities_delete.md' + - 'Generatetag': 'floodlight-activities_generatetag.md' + - 'Get': 'floodlight-activities_get.md' + - 'Insert': 'floodlight-activities_insert.md' + - 'List': 'floodlight-activities_list.md' + - 'Patch': 'floodlight-activities_patch.md' + - 'Update': 'floodlight-activities_update.md' +- 'Floodlight Activity Groups': + - 'Get': 'floodlight-activity-groups_get.md' + - 'Insert': 'floodlight-activity-groups_insert.md' + - 'List': 'floodlight-activity-groups_list.md' + - 'Patch': 'floodlight-activity-groups_patch.md' + - 'Update': 'floodlight-activity-groups_update.md' +- 'Floodlight Configurations': + - 'Get': 'floodlight-configurations_get.md' + - 'List': 'floodlight-configurations_list.md' + - 'Patch': 'floodlight-configurations_patch.md' + - 'Update': 'floodlight-configurations_update.md' +- 'Inventory Items': + - 'Get': 'inventory-items_get.md' + - 'List': 'inventory-items_list.md' +- 'Languages': + - 'List': 'languages_list.md' +- 'Metros': + - 'List': 'metros_list.md' +- 'Mobile Apps': + - 'Get': 'mobile-apps_get.md' + - 'List': 'mobile-apps_list.md' +- 'Mobile Carriers': + - 'Get': 'mobile-carriers_get.md' + - 'List': 'mobile-carriers_list.md' +- 'Operating System Versions': + - 'Get': 'operating-system-versions_get.md' + - 'List': 'operating-system-versions_list.md' +- 'Operating Systems': + - 'Get': 'operating-systems_get.md' + - 'List': 'operating-systems_list.md' +- 'Order Documents': + - 'Get': 'order-documents_get.md' + - 'List': 'order-documents_list.md' +- 'Orders': + - 'Get': 'orders_get.md' + - 'List': 'orders_list.md' +- 'Placement Groups': + - 'Get': 'placement-groups_get.md' + - 'Insert': 'placement-groups_insert.md' + - 'List': 'placement-groups_list.md' + - 'Patch': 'placement-groups_patch.md' + - 'Update': 'placement-groups_update.md' +- 'Placement Strategies': + - 'Delete': 'placement-strategies_delete.md' + - 'Get': 'placement-strategies_get.md' + - 'Insert': 'placement-strategies_insert.md' + - 'List': 'placement-strategies_list.md' + - 'Patch': 'placement-strategies_patch.md' + - 'Update': 'placement-strategies_update.md' +- 'Placements': + - 'Generatetags': 'placements_generatetags.md' + - 'Get': 'placements_get.md' + - 'Insert': 'placements_insert.md' + - 'List': 'placements_list.md' + - 'Patch': 'placements_patch.md' + - 'Update': 'placements_update.md' +- 'Platform Types': + - 'Get': 'platform-types_get.md' + - 'List': 'platform-types_list.md' +- 'Postal Codes': + - 'Get': 'postal-codes_get.md' + - 'List': 'postal-codes_list.md' +- 'Projects': + - 'Get': 'projects_get.md' + - 'List': 'projects_list.md' +- 'Regions': + - 'List': 'regions_list.md' +- 'Remarketing List Shares': + - 'Get': 'remarketing-list-shares_get.md' + - 'Patch': 'remarketing-list-shares_patch.md' + - 'Update': 'remarketing-list-shares_update.md' +- 'Remarketing Lists': + - 'Get': 'remarketing-lists_get.md' + - 'Insert': 'remarketing-lists_insert.md' + - 'List': 'remarketing-lists_list.md' + - 'Patch': 'remarketing-lists_patch.md' + - 'Update': 'remarketing-lists_update.md' +- 'Reports': + - 'Compatible Fields Query': 'reports_compatible-fields-query.md' + - 'Delete': 'reports_delete.md' + - 'Files Get': 'reports_files-get.md' + - 'Files List': 'reports_files-list.md' + - 'Get': 'reports_get.md' + - 'Insert': 'reports_insert.md' + - 'List': 'reports_list.md' + - 'Patch': 'reports_patch.md' + - 'Run': 'reports_run.md' + - 'Update': 'reports_update.md' +- 'Sites': + - 'Get': 'sites_get.md' + - 'Insert': 'sites_insert.md' + - 'List': 'sites_list.md' + - 'Patch': 'sites_patch.md' + - 'Update': 'sites_update.md' +- 'Sizes': + - 'Get': 'sizes_get.md' + - 'Insert': 'sizes_insert.md' + - 'List': 'sizes_list.md' +- 'Subaccounts': + - 'Get': 'subaccounts_get.md' + - 'Insert': 'subaccounts_insert.md' + - 'List': 'subaccounts_list.md' + - 'Patch': 'subaccounts_patch.md' + - 'Update': 'subaccounts_update.md' +- 'Targetable Remarketing Lists': + - 'Get': 'targetable-remarketing-lists_get.md' + - 'List': 'targetable-remarketing-lists_list.md' +- 'Targeting Templates': + - 'Get': 'targeting-templates_get.md' + - 'Insert': 'targeting-templates_insert.md' + - 'List': 'targeting-templates_list.md' + - 'Patch': 'targeting-templates_patch.md' + - 'Update': 'targeting-templates_update.md' +- 'User Profiles': + - 'Get': 'user-profiles_get.md' + - 'List': 'user-profiles_list.md' +- 'User Role Permission Groups': + - 'Get': 'user-role-permission-groups_get.md' + - 'List': 'user-role-permission-groups_list.md' +- 'User Role Permissions': + - 'Get': 'user-role-permissions_get.md' + - 'List': 'user-role-permissions_list.md' +- 'User Roles': + - 'Delete': 'user-roles_delete.md' + - 'Get': 'user-roles_get.md' + - 'Insert': 'user-roles_insert.md' + - 'List': 'user-roles_list.md' + - 'Patch': 'user-roles_patch.md' + - 'Update': 'user-roles_update.md' +- 'Video Formats': + - 'Get': 'video-formats_get.md' + - 'List': 'video-formats_list.md' theme: readthedocs diff --git a/gen/dfareporting3d3-cli/src/client.rs b/gen/dfareporting3d3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dfareporting3d3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dfareporting3d3-cli/src/main.rs b/gen/dfareporting3d3-cli/src/main.rs index 12ec2f49b6..a49402a6a2 100644 --- a/gen/dfareporting3d3-cli/src/main.rs +++ b/gen/dfareporting3d3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dfareporting3d3::{api, Error, oauth2}; +use google_dfareporting3d3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -478,10 +477,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-role-id" => { - call = call.user_role_id(value.unwrap_or("")); + call = call.user_role_id( value.map(|v| arg_from_str(v, err, "user-role-id", "int64")).unwrap_or(-0)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -496,13 +495,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -838,13 +837,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1317,10 +1316,10 @@ where call = call.add_type(value.unwrap_or("")); }, "ssl-required" => { - call = call.ssl_required(arg_from_str(value.unwrap_or("false"), err, "ssl-required", "boolean")); + call = call.ssl_required( value.map(|v| arg_from_str(v, err, "ssl-required", "boolean")).unwrap_or(false)); }, "ssl-compliant" => { - call = call.ssl_compliant(arg_from_str(value.unwrap_or("false"), err, "ssl-compliant", "boolean")); + call = call.ssl_compliant( value.map(|v| arg_from_str(v, err, "ssl-compliant", "boolean")).unwrap_or(false)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -1329,58 +1328,58 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "remarketing-list-ids" => { - call = call.add_remarketing_list_ids(value.unwrap_or("")); + call = call.add_remarketing_list_ids( value.map(|v| arg_from_str(v, err, "remarketing-list-ids", "int64")).unwrap_or(-0)); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "landing-page-ids" => { - call = call.add_landing_page_ids(value.unwrap_or("")); + call = call.add_landing_page_ids( value.map(|v| arg_from_str(v, err, "landing-page-ids", "int64")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dynamic-click-tracker" => { - call = call.dynamic_click_tracker(arg_from_str(value.unwrap_or("false"), err, "dynamic-click-tracker", "boolean")); + call = call.dynamic_click_tracker( value.map(|v| arg_from_str(v, err, "dynamic-click-tracker", "boolean")).unwrap_or(false)); }, "creative-optimization-configuration-ids" => { - call = call.add_creative_optimization_configuration_ids(value.unwrap_or("")); + call = call.add_creative_optimization_configuration_ids( value.map(|v| arg_from_str(v, err, "creative-optimization-configuration-ids", "int64")).unwrap_or(-0)); }, "creative-ids" => { - call = call.add_creative_ids(value.unwrap_or("")); + call = call.add_creative_ids( value.map(|v| arg_from_str(v, err, "creative-ids", "int64")).unwrap_or(-0)); }, "compatibility" => { call = call.compatibility(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "audience-segment-ids" => { - call = call.add_audience_segment_ids(value.unwrap_or("")); + call = call.add_audience_segment_ids( value.map(|v| arg_from_str(v, err, "audience-segment-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1932,10 +1931,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2309,7 +2308,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -2324,19 +2323,19 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2733,7 +2732,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "status" => { call = call.status(value.unwrap_or("")); @@ -2751,22 +2750,22 @@ where call = call.page_token(value.unwrap_or("")); }, "only-parent" => { - call = call.only_parent(arg_from_str(value.unwrap_or("false"), err, "only-parent", "boolean")); + call = call.only_parent( value.map(|v| arg_from_str(v, err, "only-parent", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-advertisers-without-groups-only" => { - call = call.include_advertisers_without_groups_only(arg_from_str(value.unwrap_or("false"), err, "include-advertisers-without-groups-only", "boolean")); + call = call.include_advertisers_without_groups_only( value.map(|v| arg_from_str(v, err, "include-advertisers-without-groups-only", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-ids" => { - call = call.add_floodlight_configuration_ids(value.unwrap_or("")); + call = call.add_floodlight_configuration_ids( value.map(|v| arg_from_str(v, err, "floodlight-configuration-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3184,7 +3183,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3418,7 +3417,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -3433,28 +3432,28 @@ where call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "excluded-ids" => { - call = call.add_excluded_ids(value.unwrap_or("")); + call = call.add_excluded_ids( value.map(|v| arg_from_str(v, err, "excluded-ids", "int64")).unwrap_or(-0)); }, "at-least-one-optimization-activity" => { - call = call.at_least_one_optimization_activity(arg_from_str(value.unwrap_or("false"), err, "at-least-one-optimization-activity", "boolean")); + call = call.at_least_one_optimization_activity( value.map(|v| arg_from_str(v, err, "at-least-one-optimization-activity", "boolean")).unwrap_or(false)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3814,7 +3813,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-profile-ids" => { - call = call.add_user_profile_ids(value.unwrap_or("")); + call = call.add_user_profile_ids( value.map(|v| arg_from_str(v, err, "user-profile-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -3826,19 +3825,19 @@ where call = call.object_type(value.unwrap_or("")); }, "object-ids" => { - call = call.add_object_ids(value.unwrap_or("")); + call = call.add_object_ids( value.map(|v| arg_from_str(v, err, "object-ids", "int64")).unwrap_or(-0)); }, "min-change-time" => { call = call.min_change_time(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-change-time" => { call = call.max_change_time(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "action" => { call = call.action(value.unwrap_or("")); @@ -3897,16 +3896,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "region-dart-ids" => { - call = call.add_region_dart_ids(value.unwrap_or("")); + call = call.add_region_dart_ids( value.map(|v| arg_from_str(v, err, "region-dart-ids", "int64")).unwrap_or(-0)); }, "name-prefix" => { call = call.name_prefix(value.unwrap_or("")); }, "dart-ids" => { - call = call.add_dart_ids(value.unwrap_or("")); + call = call.add_dart_ids( value.map(|v| arg_from_str(v, err, "dart-ids", "int64")).unwrap_or(-0)); }, "country-dart-ids" => { - call = call.add_country_dart_ids(value.unwrap_or("")); + call = call.add_country_dart_ids( value.map(|v| arg_from_str(v, err, "country-dart-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4262,10 +4261,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5073,10 +5072,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5510,13 +5509,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5925,16 +5924,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-number" => { - call = call.group_number(arg_from_str(value.unwrap_or("-0"), err, "group-number", "integer")); + call = call.group_number( value.map(|v| arg_from_str(v, err, "group-number", "int32")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6403,7 +6402,7 @@ where call = call.add_types(value.unwrap_or("")); }, "studio-creative-id" => { - call = call.studio_creative_id(value.unwrap_or("")); + call = call.studio_creative_id( value.map(|v| arg_from_str(v, err, "studio-creative-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -6412,40 +6411,40 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "rendering-ids" => { - call = call.add_rendering_ids(value.unwrap_or("")); + call = call.add_rendering_ids( value.map(|v| arg_from_str(v, err, "rendering-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "creative-field-ids" => { - call = call.add_creative_field_ids(value.unwrap_or("")); + call = call.add_creative_field_ids( value.map(|v| arg_from_str(v, err, "creative-field-ids", "int64")).unwrap_or(-0)); }, "companion-creative-ids" => { - call = call.add_companion_creative_ids(value.unwrap_or("")); + call = call.add_companion_creative_ids( value.map(|v| arg_from_str(v, err, "companion-creative-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6868,7 +6867,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7093,25 +7092,25 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dfp-network-code" => { call = call.dfp_network_code(value.unwrap_or("")); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7302,13 +7301,13 @@ where call = call.object_type(value.unwrap_or("")); }, "object-id" => { - call = call.object_id(value.unwrap_or("")); + call = call.object_id( value.map(|v| arg_from_str(v, err, "object-id", "int64")).unwrap_or(-0)); }, "names" => { call = call.add_names(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7581,25 +7580,25 @@ where call = call.search_string(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "event-tag-types" => { call = call.add_event_tag_types(value.unwrap_or("")); }, "enabled" => { - call = call.enabled(arg_from_str(value.unwrap_or("false"), err, "enabled", "boolean")); + call = call.enabled( value.map(|v| arg_from_str(v, err, "enabled", "boolean")).unwrap_or(false)); }, "definitions-only" => { - call = call.definitions_only(arg_from_str(value.unwrap_or("false"), err, "definitions-only", "boolean")); + call = call.definitions_only( value.map(|v| arg_from_str(v, err, "definitions-only", "boolean")).unwrap_or(false)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "ad-id" => { - call = call.ad_id(value.unwrap_or("")); + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7953,7 +7952,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8053,7 +8052,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8301,13 +8300,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "floodlight-activity-group-type" => { call = call.floodlight_activity_group_type(value.unwrap_or("")); @@ -8319,10 +8318,10 @@ where call = call.floodlight_activity_group_name(value.unwrap_or("")); }, "floodlight-activity-group-ids" => { - call = call.add_floodlight_activity_group_ids(value.unwrap_or("")); + call = call.add_floodlight_activity_group_ids( value.map(|v| arg_from_str(v, err, "floodlight-activity-group-ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8806,16 +8805,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9145,7 +9144,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9498,22 +9497,22 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "in-plan" => { - call = call.in_plan(arg_from_str(value.unwrap_or("false"), err, "in-plan", "boolean")); + call = call.in_plan( value.map(|v| arg_from_str(v, err, "in-plan", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9731,7 +9730,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { call = call.add_ids(value.unwrap_or("")); @@ -10163,7 +10162,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10172,16 +10171,16 @@ where call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10295,7 +10294,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10304,10 +10303,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10568,7 +10567,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10577,7 +10576,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "placement-group-type" => { call = call.placement_group_type(value.unwrap_or("")); @@ -10595,28 +10594,28 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11162,10 +11161,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11400,10 +11399,10 @@ where call = call.add_tag_formats(value.unwrap_or("")); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11698,10 +11697,10 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -11710,7 +11709,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "payment-source" => { call = call.payment_source(value.unwrap_or("")); @@ -11728,34 +11727,34 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-ids" => { - call = call.add_group_ids(value.unwrap_or("")); + call = call.add_group_ids( value.map(|v| arg_from_str(v, err, "group-ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "compatibilities" => { call = call.add_compatibilities(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12445,13 +12444,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12954,13 +12953,13 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -13502,7 +13501,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13787,7 +13786,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14008,7 +14007,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "synchronous" => { - call = call.synchronous(arg_from_str(value.unwrap_or("false"), err, "synchronous", "boolean")); + call = call.synchronous( value.map(|v| arg_from_str(v, err, "synchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14407,10 +14406,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unmapped-site" => { - call = call.unmapped_site(arg_from_str(value.unwrap_or("false"), err, "unmapped-site", "boolean")); + call = call.unmapped_site( value.map(|v| arg_from_str(v, err, "unmapped-site", "boolean")).unwrap_or(false)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -14425,31 +14424,31 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, "ad-words-site" => { - call = call.ad_words_site(arg_from_str(value.unwrap_or("false"), err, "ad-words-site", "boolean")); + call = call.ad_words_site( value.map(|v| arg_from_str(v, err, "ad-words-site", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14898,16 +14897,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "width" => { - call = call.width(arg_from_str(value.unwrap_or("-0"), err, "width", "integer")); + call = call.width( value.map(|v| arg_from_str(v, err, "width", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "iab-standard" => { - call = call.iab_standard(arg_from_str(value.unwrap_or("false"), err, "iab-standard", "boolean")); + call = call.iab_standard( value.map(|v| arg_from_str(v, err, "iab-standard", "boolean")).unwrap_or(false)); }, "height" => { - call = call.height(arg_from_str(value.unwrap_or("-0"), err, "height", "integer")); + call = call.height( value.map(|v| arg_from_str(v, err, "height", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15116,10 +15115,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15417,10 +15416,10 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15642,13 +15641,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16168,7 +16167,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16411,7 +16410,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -16426,13 +16425,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "account-user-role-only" => { - call = call.account_user_role_only(arg_from_str(value.unwrap_or("false"), err, "account-user-role-only", "boolean")); + call = call.account_user_role_only( value.map(|v| arg_from_str(v, err, "account-user-role-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -23709,7 +23708,7 @@ async fn main() { let mut app = App::new("dfareporting3d3") .author("Sebastian Thiel ") - .version("4.0.1+20220104") + .version("5.0.2+20220104") .about("Build applications to efficiently manage large or complex trafficking, reporting, and attribution workflows for Campaign Manager 360.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dfareporting3d3_cli") .arg(Arg::with_name("url") diff --git a/gen/dfareporting3d3/Cargo.toml b/gen/dfareporting3d3/Cargo.toml index e8bc33e014..c7fb002b5e 100644 --- a/gen/dfareporting3d3/Cargo.toml +++ b/gen/dfareporting3d3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dfareporting3d3" -version = "5.0.2-beta-1+20220104" +version = "5.0.2+20220104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dfareporting (protocol v3.3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d3" homepage = "https://developers.google.com/doubleclick-advertisers/" -documentation = "https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104" +documentation = "https://docs.rs/google-dfareporting3d3/5.0.2+20220104" license = "MIT" keywords = ["dfareporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dfareporting3d3/README.md b/gen/dfareporting3d3/README.md index 3847e14894..f04baba844 100644 --- a/gen/dfareporting3d3/README.md +++ b/gen/dfareporting3d3/README.md @@ -5,144 +5,144 @@ DO NOT EDIT ! --> The `google-dfareporting3d3` library allows access to all features of the *Google Dfareporting* service. -This documentation was generated from *Dfareporting* crate version *5.0.2-beta-1+20220104*, where *20220104* is the exact revision of the *dfareporting:v3.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dfareporting* crate version *5.0.2+20220104*, where *20220104* is the exact revision of the *dfareporting:v3.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dfareporting* *v3d3* API can be found at the [official documentation site](https://developers.google.com/doubleclick-advertisers/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/Dfareporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/Dfareporting) ... -* [account active ad summaries](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountActiveAdSummary) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountActiveAdSummaryGetCall) -* [account permission groups](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountPermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountPermissionGroupListCall) -* [account permissions](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountPermission) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountPermissionListCall) -* [account user profiles](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountUserProfile) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountUserProfileUpdateCall) -* [accounts](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Account) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AccountUpdateCall) -* [ads](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Ad) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdUpdateCall) -* [advertiser groups](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGroup) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGroupUpdateCall) +* [account active ad summaries](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountActiveAdSummary) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountActiveAdSummaryGetCall) +* [account permission groups](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountPermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountPermissionGroupListCall) +* [account permissions](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountPermission) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountPermissionListCall) +* [account user profiles](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountUserProfile) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountUserProfileUpdateCall) +* [accounts](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Account) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AccountUpdateCall) +* [ads](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Ad) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdUpdateCall) +* [advertiser groups](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGroup) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGroupUpdateCall) * advertiser landing pages - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserLandingPageUpdateCall) -* [advertisers](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Advertiser) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::AdvertiserUpdateCall) -* [browsers](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Browser) - * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::BrowserListCall) -* [campaign creative associations](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignCreativeAssociation) - * [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignCreativeAssociationListCall) -* [campaigns](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Campaign) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CampaignUpdateCall) -* [change logs](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ChangeLog) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ChangeLogListCall) -* [cities](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::City) - * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CityListCall) -* [connection types](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ConnectionType) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ConnectionTypeListCall) -* [content categories](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ContentCategory) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ContentCategoryUpdateCall) -* [conversions](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Conversion) - * [*batchinsert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ConversionBatchupdateCall) -* [countries](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Country) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CountryListCall) -* [creative assets](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeAsset) - * [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeAssetInsertCall) -* [creative field values](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldValue) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldValueUpdateCall) -* [creative fields](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeField) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeFieldUpdateCall) -* [creative groups](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeGroup) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeGroupUpdateCall) -* [creatives](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Creative) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeUpdateCall) -* [dimension values](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DimensionValue) - * [*query*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DimensionValueQueryCall) -* [directory sites](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DirectorySite) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DirectorySiteListCall) -* [dynamic targeting keys](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DynamicTargetingKey) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::DynamicTargetingKeyListCall) -* [event tags](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::EventTag) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::EventTagUpdateCall) -* [files](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::File) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FileListCall) -* [floodlight activities](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivity) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityUpdateCall) -* [floodlight activity groups](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGroup) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightActivityGroupUpdateCall) -* [floodlight configurations](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightConfiguration) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FloodlightConfigurationUpdateCall) -* [inventory items](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::InventoryItem) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::InventoryItemListCall) -* [languages](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Language) - * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::LanguageListCall) -* [metros](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Metro) - * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::MetroListCall) -* [mobile apps](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::MobileApp) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::MobileAppListCall) -* [mobile carriers](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::MobileCarrier) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::MobileCarrierListCall) -* [operating system versions](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OperatingSystemVersion) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OperatingSystemVersionListCall) -* [operating systems](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OperatingSystem) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OperatingSystemListCall) -* [order documents](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OrderDocument) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OrderDocumentListCall) -* [orders](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Order) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::OrderListCall) -* [placement groups](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGroup) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGroupUpdateCall) -* [placement strategies](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementStrategy) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementStrategyUpdateCall) -* [placements](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Placement) - * [*generatetags*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlacementUpdateCall) -* [platform types](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlatformType) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PlatformTypeListCall) -* [postal codes](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PostalCode) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::PostalCodeListCall) -* [projects](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Project) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ProjectListCall) -* [regions](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Region) - * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RegionListCall) -* [remarketing list shares](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListShare) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListShareUpdateCall) -* [remarketing lists](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::RemarketingListUpdateCall) -* [reports](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Report) - * [*compatible fields query*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportUpdateCall) -* [sites](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Site) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SiteUpdateCall) -* [sizes](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Size) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SizeListCall) -* [subaccounts](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::Subaccount) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::SubaccountUpdateCall) -* [targetable remarketing lists](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetableRemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetableRemarketingListListCall) -* [targeting templates](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetingTemplate) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::TargetingTemplateUpdateCall) -* [user profiles](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserProfile) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserProfileListCall) -* [user role permission groups](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRolePermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRolePermissionGroupListCall) -* [user role permissions](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRolePermission) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRolePermissionListCall) -* [user roles](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRole) - * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::UserRoleUpdateCall) -* [video formats](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::VideoFormat) - * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::VideoFormatListCall) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserLandingPageUpdateCall) +* [advertisers](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Advertiser) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::AdvertiserUpdateCall) +* [browsers](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Browser) + * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::BrowserListCall) +* [campaign creative associations](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignCreativeAssociation) + * [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignCreativeAssociationListCall) +* [campaigns](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Campaign) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CampaignUpdateCall) +* [change logs](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ChangeLog) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ChangeLogListCall) +* [cities](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::City) + * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CityListCall) +* [connection types](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ConnectionType) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ConnectionTypeListCall) +* [content categories](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ContentCategory) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ContentCategoryUpdateCall) +* [conversions](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Conversion) + * [*batchinsert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ConversionBatchupdateCall) +* [countries](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Country) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CountryListCall) +* [creative assets](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeAsset) + * [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeAssetInsertCall) +* [creative field values](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldValue) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldValueUpdateCall) +* [creative fields](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeField) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeFieldUpdateCall) +* [creative groups](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeGroup) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeGroupUpdateCall) +* [creatives](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Creative) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeUpdateCall) +* [dimension values](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DimensionValue) + * [*query*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DimensionValueQueryCall) +* [directory sites](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DirectorySite) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DirectorySiteListCall) +* [dynamic targeting keys](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DynamicTargetingKey) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::DynamicTargetingKeyListCall) +* [event tags](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::EventTag) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::EventTagUpdateCall) +* [files](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::File) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FileListCall) +* [floodlight activities](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivity) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityUpdateCall) +* [floodlight activity groups](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGroup) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightActivityGroupUpdateCall) +* [floodlight configurations](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightConfiguration) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FloodlightConfigurationUpdateCall) +* [inventory items](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::InventoryItem) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::InventoryItemListCall) +* [languages](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Language) + * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::LanguageListCall) +* [metros](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Metro) + * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::MetroListCall) +* [mobile apps](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::MobileApp) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::MobileAppListCall) +* [mobile carriers](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::MobileCarrier) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::MobileCarrierListCall) +* [operating system versions](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OperatingSystemVersion) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OperatingSystemVersionListCall) +* [operating systems](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OperatingSystem) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OperatingSystemListCall) +* [order documents](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OrderDocument) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OrderDocumentListCall) +* [orders](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Order) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::OrderListCall) +* [placement groups](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGroup) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGroupUpdateCall) +* [placement strategies](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementStrategy) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementStrategyUpdateCall) +* [placements](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Placement) + * [*generatetags*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlacementUpdateCall) +* [platform types](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlatformType) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PlatformTypeListCall) +* [postal codes](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PostalCode) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::PostalCodeListCall) +* [projects](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Project) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ProjectListCall) +* [regions](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Region) + * [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RegionListCall) +* [remarketing list shares](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListShare) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListShareUpdateCall) +* [remarketing lists](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::RemarketingListUpdateCall) +* [reports](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Report) + * [*compatible fields query*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportUpdateCall) +* [sites](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Site) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SiteUpdateCall) +* [sizes](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Size) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SizeListCall) +* [subaccounts](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::Subaccount) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::SubaccountUpdateCall) +* [targetable remarketing lists](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetableRemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetableRemarketingListListCall) +* [targeting templates](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetingTemplate) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::TargetingTemplateUpdateCall) +* [user profiles](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserProfile) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserProfileListCall) +* [user role permission groups](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRolePermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRolePermissionGroupListCall) +* [user role permissions](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRolePermission) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRolePermissionListCall) +* [user roles](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRole) + * [*delete*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::UserRoleUpdateCall) +* [video formats](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::VideoFormat) + * [*get*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::VideoFormatListCall) Upload supported by ... -* [*insert creative assets*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::CreativeAssetInsertCall) +* [*insert creative assets*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::CreativeAssetInsertCall) Download supported by ... -* [*get files*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::FileGetCall) -* [*files get reports*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/api::ReportFileGetCall) +* [*get files*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::FileGetCall) +* [*files get reports*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/api::ReportFileGetCall) @@ -150,17 +150,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/Dfareporting)** +* **[Hub](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/Dfareporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::CallBuilder) -* **[Resources](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::CallBuilder) +* **[Resources](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Part)** + * **[Parts](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -258,17 +258,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -278,29 +278,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Delegate) to the -[Method Builder](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Delegate) to the +[Method Builder](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::RequestValue) and -[decodable](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::RequestValue) and +[decodable](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dfareporting3d3/5.0.2-beta-1+20220104/google_dfareporting3d3/client::RequestValue) are moved +* [request values](https://docs.rs/google-dfareporting3d3/5.0.2+20220104/google_dfareporting3d3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dfareporting3d3/src/api.rs b/gen/dfareporting3d3/src/api.rs index 8a02712981..18df16de8b 100644 --- a/gen/dfareporting3d3/src/api.rs +++ b/gen/dfareporting3d3/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> Dfareporting { Dfareporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dfareporting.googleapis.com/dfareporting/v3.3/".to_string(), _root_url: "https://dfareporting.googleapis.com/".to_string(), } @@ -319,7 +319,7 @@ impl<'a, S> Dfareporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dfareporting3d3/src/client.rs b/gen/dfareporting3d3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dfareporting3d3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dfareporting3d3/src/lib.rs b/gen/dfareporting3d3/src/lib.rs index 96ad2e4d3f..70b80d5caa 100644 --- a/gen/dfareporting3d3/src/lib.rs +++ b/gen/dfareporting3d3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dfareporting* crate version *5.0.2-beta-1+20220104*, where *20220104* is the exact revision of the *dfareporting:v3.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dfareporting* crate version *5.0.2+20220104*, where *20220104* is the exact revision of the *dfareporting:v3.3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dfareporting* *v3d3* API can be found at the //! [official documentation site](https://developers.google.com/doubleclick-advertisers/). diff --git a/gen/dfareporting3d4-cli/Cargo.toml b/gen/dfareporting3d4-cli/Cargo.toml index d0f5e5b147..f7d5be8151 100644 --- a/gen/dfareporting3d4-cli/Cargo.toml +++ b/gen/dfareporting3d4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dfareporting3d4-cli" -version = "4.0.1+20220104" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dfareporting (protocol v3.4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d4-cli" @@ -20,13 +20,13 @@ name = "dfareporting3d4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dfareporting3d4] path = "../dfareporting3d4" -version = "4.0.1+20220104" +version = "5.0.2+20230118" + diff --git a/gen/dfareporting3d4-cli/README.md b/gen/dfareporting3d4-cli/README.md index 393782b168..3733ff8bf5 100644 --- a/gen/dfareporting3d4-cli/README.md +++ b/gen/dfareporting3d4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dfareporting* API at revision *20220104*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dfareporting* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash dfareporting3d4 [options] diff --git a/gen/dfareporting3d4-cli/mkdocs.yml b/gen/dfareporting3d4-cli/mkdocs.yml index 733beab2ca..62869e657b 100644 --- a/gen/dfareporting3d4-cli/mkdocs.yml +++ b/gen/dfareporting3d4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dfareporting v4.0.1+20220104 +site_name: Dfareporting v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-dfareporting3d4-cli site_description: A complete library to interact with Dfareporting (protocol v3.4) @@ -7,213 +7,274 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d4- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-active-ad-summaries_get.md', 'Account Active Ad Summaries', 'Get'] -- ['account-permission-groups_get.md', 'Account Permission Groups', 'Get'] -- ['account-permission-groups_list.md', 'Account Permission Groups', 'List'] -- ['account-permissions_get.md', 'Account Permissions', 'Get'] -- ['account-permissions_list.md', 'Account Permissions', 'List'] -- ['account-user-profiles_get.md', 'Account User Profiles', 'Get'] -- ['account-user-profiles_insert.md', 'Account User Profiles', 'Insert'] -- ['account-user-profiles_list.md', 'Account User Profiles', 'List'] -- ['account-user-profiles_patch.md', 'Account User Profiles', 'Patch'] -- ['account-user-profiles_update.md', 'Account User Profiles', 'Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['ads_get.md', 'Ads', 'Get'] -- ['ads_insert.md', 'Ads', 'Insert'] -- ['ads_list.md', 'Ads', 'List'] -- ['ads_patch.md', 'Ads', 'Patch'] -- ['ads_update.md', 'Ads', 'Update'] -- ['advertiser-groups_delete.md', 'Advertiser Groups', 'Delete'] -- ['advertiser-groups_get.md', 'Advertiser Groups', 'Get'] -- ['advertiser-groups_insert.md', 'Advertiser Groups', 'Insert'] -- ['advertiser-groups_list.md', 'Advertiser Groups', 'List'] -- ['advertiser-groups_patch.md', 'Advertiser Groups', 'Patch'] -- ['advertiser-groups_update.md', 'Advertiser Groups', 'Update'] -- ['advertiser-landing-pages_get.md', 'Advertiser Landing Pages', 'Get'] -- ['advertiser-landing-pages_insert.md', 'Advertiser Landing Pages', 'Insert'] -- ['advertiser-landing-pages_list.md', 'Advertiser Landing Pages', 'List'] -- ['advertiser-landing-pages_patch.md', 'Advertiser Landing Pages', 'Patch'] -- ['advertiser-landing-pages_update.md', 'Advertiser Landing Pages', 'Update'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_insert.md', 'Advertisers', 'Insert'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['advertisers_patch.md', 'Advertisers', 'Patch'] -- ['advertisers_update.md', 'Advertisers', 'Update'] -- ['browsers_list.md', 'Browsers', 'List'] -- ['campaign-creative-associations_insert.md', 'Campaign Creative Associations', 'Insert'] -- ['campaign-creative-associations_list.md', 'Campaign Creative Associations', 'List'] -- ['campaigns_get.md', 'Campaigns', 'Get'] -- ['campaigns_insert.md', 'Campaigns', 'Insert'] -- ['campaigns_list.md', 'Campaigns', 'List'] -- ['campaigns_patch.md', 'Campaigns', 'Patch'] -- ['campaigns_update.md', 'Campaigns', 'Update'] -- ['change-logs_get.md', 'Change Logs', 'Get'] -- ['change-logs_list.md', 'Change Logs', 'List'] -- ['cities_list.md', 'Cities', 'List'] -- ['connection-types_get.md', 'Connection Types', 'Get'] -- ['connection-types_list.md', 'Connection Types', 'List'] -- ['content-categories_delete.md', 'Content Categories', 'Delete'] -- ['content-categories_get.md', 'Content Categories', 'Get'] -- ['content-categories_insert.md', 'Content Categories', 'Insert'] -- ['content-categories_list.md', 'Content Categories', 'List'] -- ['content-categories_patch.md', 'Content Categories', 'Patch'] -- ['content-categories_update.md', 'Content Categories', 'Update'] -- ['conversions_batchinsert.md', 'Conversions', 'Batchinsert'] -- ['conversions_batchupdate.md', 'Conversions', 'Batchupdate'] -- ['countries_get.md', 'Countries', 'Get'] -- ['countries_list.md', 'Countries', 'List'] -- ['creative-assets_insert.md', 'Creative Assets', 'Insert'] -- ['creative-field-values_delete.md', 'Creative Field Values', 'Delete'] -- ['creative-field-values_get.md', 'Creative Field Values', 'Get'] -- ['creative-field-values_insert.md', 'Creative Field Values', 'Insert'] -- ['creative-field-values_list.md', 'Creative Field Values', 'List'] -- ['creative-field-values_patch.md', 'Creative Field Values', 'Patch'] -- ['creative-field-values_update.md', 'Creative Field Values', 'Update'] -- ['creative-fields_delete.md', 'Creative Fields', 'Delete'] -- ['creative-fields_get.md', 'Creative Fields', 'Get'] -- ['creative-fields_insert.md', 'Creative Fields', 'Insert'] -- ['creative-fields_list.md', 'Creative Fields', 'List'] -- ['creative-fields_patch.md', 'Creative Fields', 'Patch'] -- ['creative-fields_update.md', 'Creative Fields', 'Update'] -- ['creative-groups_get.md', 'Creative Groups', 'Get'] -- ['creative-groups_insert.md', 'Creative Groups', 'Insert'] -- ['creative-groups_list.md', 'Creative Groups', 'List'] -- ['creative-groups_patch.md', 'Creative Groups', 'Patch'] -- ['creative-groups_update.md', 'Creative Groups', 'Update'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['creatives_patch.md', 'Creatives', 'Patch'] -- ['creatives_update.md', 'Creatives', 'Update'] -- ['custom-events_batchinsert.md', 'Custom Events', 'Batchinsert'] -- ['dimension-values_query.md', 'Dimension Values', 'Query'] -- ['directory-sites_get.md', 'Directory Sites', 'Get'] -- ['directory-sites_insert.md', 'Directory Sites', 'Insert'] -- ['directory-sites_list.md', 'Directory Sites', 'List'] -- ['dynamic-targeting-keys_delete.md', 'Dynamic Targeting Keys', 'Delete'] -- ['dynamic-targeting-keys_insert.md', 'Dynamic Targeting Keys', 'Insert'] -- ['dynamic-targeting-keys_list.md', 'Dynamic Targeting Keys', 'List'] -- ['event-tags_delete.md', 'Event Tags', 'Delete'] -- ['event-tags_get.md', 'Event Tags', 'Get'] -- ['event-tags_insert.md', 'Event Tags', 'Insert'] -- ['event-tags_list.md', 'Event Tags', 'List'] -- ['event-tags_patch.md', 'Event Tags', 'Patch'] -- ['event-tags_update.md', 'Event Tags', 'Update'] -- ['files_get.md', 'Files', 'Get'] -- ['files_list.md', 'Files', 'List'] -- ['floodlight-activities_delete.md', 'Floodlight Activities', 'Delete'] -- ['floodlight-activities_generatetag.md', 'Floodlight Activities', 'Generatetag'] -- ['floodlight-activities_get.md', 'Floodlight Activities', 'Get'] -- ['floodlight-activities_insert.md', 'Floodlight Activities', 'Insert'] -- ['floodlight-activities_list.md', 'Floodlight Activities', 'List'] -- ['floodlight-activities_patch.md', 'Floodlight Activities', 'Patch'] -- ['floodlight-activities_update.md', 'Floodlight Activities', 'Update'] -- ['floodlight-activity-groups_get.md', 'Floodlight Activity Groups', 'Get'] -- ['floodlight-activity-groups_insert.md', 'Floodlight Activity Groups', 'Insert'] -- ['floodlight-activity-groups_list.md', 'Floodlight Activity Groups', 'List'] -- ['floodlight-activity-groups_patch.md', 'Floodlight Activity Groups', 'Patch'] -- ['floodlight-activity-groups_update.md', 'Floodlight Activity Groups', 'Update'] -- ['floodlight-configurations_get.md', 'Floodlight Configurations', 'Get'] -- ['floodlight-configurations_list.md', 'Floodlight Configurations', 'List'] -- ['floodlight-configurations_patch.md', 'Floodlight Configurations', 'Patch'] -- ['floodlight-configurations_update.md', 'Floodlight Configurations', 'Update'] -- ['inventory-items_get.md', 'Inventory Items', 'Get'] -- ['inventory-items_list.md', 'Inventory Items', 'List'] -- ['languages_list.md', 'Languages', 'List'] -- ['metros_list.md', 'Metros', 'List'] -- ['mobile-apps_get.md', 'Mobile Apps', 'Get'] -- ['mobile-apps_list.md', 'Mobile Apps', 'List'] -- ['mobile-carriers_get.md', 'Mobile Carriers', 'Get'] -- ['mobile-carriers_list.md', 'Mobile Carriers', 'List'] -- ['operating-system-versions_get.md', 'Operating System Versions', 'Get'] -- ['operating-system-versions_list.md', 'Operating System Versions', 'List'] -- ['operating-systems_get.md', 'Operating Systems', 'Get'] -- ['operating-systems_list.md', 'Operating Systems', 'List'] -- ['order-documents_get.md', 'Order Documents', 'Get'] -- ['order-documents_list.md', 'Order Documents', 'List'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_list.md', 'Orders', 'List'] -- ['placement-groups_get.md', 'Placement Groups', 'Get'] -- ['placement-groups_insert.md', 'Placement Groups', 'Insert'] -- ['placement-groups_list.md', 'Placement Groups', 'List'] -- ['placement-groups_patch.md', 'Placement Groups', 'Patch'] -- ['placement-groups_update.md', 'Placement Groups', 'Update'] -- ['placement-strategies_delete.md', 'Placement Strategies', 'Delete'] -- ['placement-strategies_get.md', 'Placement Strategies', 'Get'] -- ['placement-strategies_insert.md', 'Placement Strategies', 'Insert'] -- ['placement-strategies_list.md', 'Placement Strategies', 'List'] -- ['placement-strategies_patch.md', 'Placement Strategies', 'Patch'] -- ['placement-strategies_update.md', 'Placement Strategies', 'Update'] -- ['placements_generatetags.md', 'Placements', 'Generatetags'] -- ['placements_get.md', 'Placements', 'Get'] -- ['placements_insert.md', 'Placements', 'Insert'] -- ['placements_list.md', 'Placements', 'List'] -- ['placements_patch.md', 'Placements', 'Patch'] -- ['placements_update.md', 'Placements', 'Update'] -- ['platform-types_get.md', 'Platform Types', 'Get'] -- ['platform-types_list.md', 'Platform Types', 'List'] -- ['postal-codes_get.md', 'Postal Codes', 'Get'] -- ['postal-codes_list.md', 'Postal Codes', 'List'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_list.md', 'Projects', 'List'] -- ['regions_list.md', 'Regions', 'List'] -- ['remarketing-list-shares_get.md', 'Remarketing List Shares', 'Get'] -- ['remarketing-list-shares_patch.md', 'Remarketing List Shares', 'Patch'] -- ['remarketing-list-shares_update.md', 'Remarketing List Shares', 'Update'] -- ['remarketing-lists_get.md', 'Remarketing Lists', 'Get'] -- ['remarketing-lists_insert.md', 'Remarketing Lists', 'Insert'] -- ['remarketing-lists_list.md', 'Remarketing Lists', 'List'] -- ['remarketing-lists_patch.md', 'Remarketing Lists', 'Patch'] -- ['remarketing-lists_update.md', 'Remarketing Lists', 'Update'] -- ['reports_compatible-fields-query.md', 'Reports', 'Compatible Fields Query'] -- ['reports_delete.md', 'Reports', 'Delete'] -- ['reports_files-get.md', 'Reports', 'Files Get'] -- ['reports_files-list.md', 'Reports', 'Files List'] -- ['reports_get.md', 'Reports', 'Get'] -- ['reports_insert.md', 'Reports', 'Insert'] -- ['reports_list.md', 'Reports', 'List'] -- ['reports_patch.md', 'Reports', 'Patch'] -- ['reports_run.md', 'Reports', 'Run'] -- ['reports_update.md', 'Reports', 'Update'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_insert.md', 'Sites', 'Insert'] -- ['sites_list.md', 'Sites', 'List'] -- ['sites_patch.md', 'Sites', 'Patch'] -- ['sites_update.md', 'Sites', 'Update'] -- ['sizes_get.md', 'Sizes', 'Get'] -- ['sizes_insert.md', 'Sizes', 'Insert'] -- ['sizes_list.md', 'Sizes', 'List'] -- ['subaccounts_get.md', 'Subaccounts', 'Get'] -- ['subaccounts_insert.md', 'Subaccounts', 'Insert'] -- ['subaccounts_list.md', 'Subaccounts', 'List'] -- ['subaccounts_patch.md', 'Subaccounts', 'Patch'] -- ['subaccounts_update.md', 'Subaccounts', 'Update'] -- ['targetable-remarketing-lists_get.md', 'Targetable Remarketing Lists', 'Get'] -- ['targetable-remarketing-lists_list.md', 'Targetable Remarketing Lists', 'List'] -- ['targeting-templates_get.md', 'Targeting Templates', 'Get'] -- ['targeting-templates_insert.md', 'Targeting Templates', 'Insert'] -- ['targeting-templates_list.md', 'Targeting Templates', 'List'] -- ['targeting-templates_patch.md', 'Targeting Templates', 'Patch'] -- ['targeting-templates_update.md', 'Targeting Templates', 'Update'] -- ['user-profiles_get.md', 'User Profiles', 'Get'] -- ['user-profiles_list.md', 'User Profiles', 'List'] -- ['user-role-permission-groups_get.md', 'User Role Permission Groups', 'Get'] -- ['user-role-permission-groups_list.md', 'User Role Permission Groups', 'List'] -- ['user-role-permissions_get.md', 'User Role Permissions', 'Get'] -- ['user-role-permissions_list.md', 'User Role Permissions', 'List'] -- ['user-roles_delete.md', 'User Roles', 'Delete'] -- ['user-roles_get.md', 'User Roles', 'Get'] -- ['user-roles_insert.md', 'User Roles', 'Insert'] -- ['user-roles_list.md', 'User Roles', 'List'] -- ['user-roles_patch.md', 'User Roles', 'Patch'] -- ['user-roles_update.md', 'User Roles', 'Update'] -- ['video-formats_get.md', 'Video Formats', 'Get'] -- ['video-formats_list.md', 'Video Formats', 'List'] +nav: +- Home: 'index.md' +- 'Account Active Ad Summaries': + - 'Get': 'account-active-ad-summaries_get.md' +- 'Account Permission Groups': + - 'Get': 'account-permission-groups_get.md' + - 'List': 'account-permission-groups_list.md' +- 'Account Permissions': + - 'Get': 'account-permissions_get.md' + - 'List': 'account-permissions_list.md' +- 'Account User Profiles': + - 'Get': 'account-user-profiles_get.md' + - 'Insert': 'account-user-profiles_insert.md' + - 'List': 'account-user-profiles_list.md' + - 'Patch': 'account-user-profiles_patch.md' + - 'Update': 'account-user-profiles_update.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Ads': + - 'Get': 'ads_get.md' + - 'Insert': 'ads_insert.md' + - 'List': 'ads_list.md' + - 'Patch': 'ads_patch.md' + - 'Update': 'ads_update.md' +- 'Advertiser Groups': + - 'Delete': 'advertiser-groups_delete.md' + - 'Get': 'advertiser-groups_get.md' + - 'Insert': 'advertiser-groups_insert.md' + - 'List': 'advertiser-groups_list.md' + - 'Patch': 'advertiser-groups_patch.md' + - 'Update': 'advertiser-groups_update.md' +- 'Advertiser Landing Pages': + - 'Get': 'advertiser-landing-pages_get.md' + - 'Insert': 'advertiser-landing-pages_insert.md' + - 'List': 'advertiser-landing-pages_list.md' + - 'Patch': 'advertiser-landing-pages_patch.md' + - 'Update': 'advertiser-landing-pages_update.md' +- 'Advertisers': + - 'Get': 'advertisers_get.md' + - 'Insert': 'advertisers_insert.md' + - 'List': 'advertisers_list.md' + - 'Patch': 'advertisers_patch.md' + - 'Update': 'advertisers_update.md' +- 'Browsers': + - 'List': 'browsers_list.md' +- 'Campaign Creative Associations': + - 'Insert': 'campaign-creative-associations_insert.md' + - 'List': 'campaign-creative-associations_list.md' +- 'Campaigns': + - 'Get': 'campaigns_get.md' + - 'Insert': 'campaigns_insert.md' + - 'List': 'campaigns_list.md' + - 'Patch': 'campaigns_patch.md' + - 'Update': 'campaigns_update.md' +- 'Change Logs': + - 'Get': 'change-logs_get.md' + - 'List': 'change-logs_list.md' +- 'Cities': + - 'List': 'cities_list.md' +- 'Connection Types': + - 'Get': 'connection-types_get.md' + - 'List': 'connection-types_list.md' +- 'Content Categories': + - 'Delete': 'content-categories_delete.md' + - 'Get': 'content-categories_get.md' + - 'Insert': 'content-categories_insert.md' + - 'List': 'content-categories_list.md' + - 'Patch': 'content-categories_patch.md' + - 'Update': 'content-categories_update.md' +- 'Conversions': + - 'Batchinsert': 'conversions_batchinsert.md' + - 'Batchupdate': 'conversions_batchupdate.md' +- 'Countries': + - 'Get': 'countries_get.md' + - 'List': 'countries_list.md' +- 'Creative Assets': + - 'Insert': 'creative-assets_insert.md' +- 'Creative Field Values': + - 'Delete': 'creative-field-values_delete.md' + - 'Get': 'creative-field-values_get.md' + - 'Insert': 'creative-field-values_insert.md' + - 'List': 'creative-field-values_list.md' + - 'Patch': 'creative-field-values_patch.md' + - 'Update': 'creative-field-values_update.md' +- 'Creative Fields': + - 'Delete': 'creative-fields_delete.md' + - 'Get': 'creative-fields_get.md' + - 'Insert': 'creative-fields_insert.md' + - 'List': 'creative-fields_list.md' + - 'Patch': 'creative-fields_patch.md' + - 'Update': 'creative-fields_update.md' +- 'Creative Groups': + - 'Get': 'creative-groups_get.md' + - 'Insert': 'creative-groups_insert.md' + - 'List': 'creative-groups_list.md' + - 'Patch': 'creative-groups_patch.md' + - 'Update': 'creative-groups_update.md' +- 'Creatives': + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' + - 'Patch': 'creatives_patch.md' + - 'Update': 'creatives_update.md' +- 'Custom Events': + - 'Batchinsert': 'custom-events_batchinsert.md' +- 'Dimension Values': + - 'Query': 'dimension-values_query.md' +- 'Directory Sites': + - 'Get': 'directory-sites_get.md' + - 'Insert': 'directory-sites_insert.md' + - 'List': 'directory-sites_list.md' +- 'Dynamic Targeting Keys': + - 'Delete': 'dynamic-targeting-keys_delete.md' + - 'Insert': 'dynamic-targeting-keys_insert.md' + - 'List': 'dynamic-targeting-keys_list.md' +- 'Event Tags': + - 'Delete': 'event-tags_delete.md' + - 'Get': 'event-tags_get.md' + - 'Insert': 'event-tags_insert.md' + - 'List': 'event-tags_list.md' + - 'Patch': 'event-tags_patch.md' + - 'Update': 'event-tags_update.md' +- 'Files': + - 'Get': 'files_get.md' + - 'List': 'files_list.md' +- 'Floodlight Activities': + - 'Delete': 'floodlight-activities_delete.md' + - 'Generatetag': 'floodlight-activities_generatetag.md' + - 'Get': 'floodlight-activities_get.md' + - 'Insert': 'floodlight-activities_insert.md' + - 'List': 'floodlight-activities_list.md' + - 'Patch': 'floodlight-activities_patch.md' + - 'Update': 'floodlight-activities_update.md' +- 'Floodlight Activity Groups': + - 'Get': 'floodlight-activity-groups_get.md' + - 'Insert': 'floodlight-activity-groups_insert.md' + - 'List': 'floodlight-activity-groups_list.md' + - 'Patch': 'floodlight-activity-groups_patch.md' + - 'Update': 'floodlight-activity-groups_update.md' +- 'Floodlight Configurations': + - 'Get': 'floodlight-configurations_get.md' + - 'List': 'floodlight-configurations_list.md' + - 'Patch': 'floodlight-configurations_patch.md' + - 'Update': 'floodlight-configurations_update.md' +- 'Inventory Items': + - 'Get': 'inventory-items_get.md' + - 'List': 'inventory-items_list.md' +- 'Languages': + - 'List': 'languages_list.md' +- 'Metros': + - 'List': 'metros_list.md' +- 'Mobile Apps': + - 'Get': 'mobile-apps_get.md' + - 'List': 'mobile-apps_list.md' +- 'Mobile Carriers': + - 'Get': 'mobile-carriers_get.md' + - 'List': 'mobile-carriers_list.md' +- 'Operating System Versions': + - 'Get': 'operating-system-versions_get.md' + - 'List': 'operating-system-versions_list.md' +- 'Operating Systems': + - 'Get': 'operating-systems_get.md' + - 'List': 'operating-systems_list.md' +- 'Order Documents': + - 'Get': 'order-documents_get.md' + - 'List': 'order-documents_list.md' +- 'Orders': + - 'Get': 'orders_get.md' + - 'List': 'orders_list.md' +- 'Placement Groups': + - 'Get': 'placement-groups_get.md' + - 'Insert': 'placement-groups_insert.md' + - 'List': 'placement-groups_list.md' + - 'Patch': 'placement-groups_patch.md' + - 'Update': 'placement-groups_update.md' +- 'Placement Strategies': + - 'Delete': 'placement-strategies_delete.md' + - 'Get': 'placement-strategies_get.md' + - 'Insert': 'placement-strategies_insert.md' + - 'List': 'placement-strategies_list.md' + - 'Patch': 'placement-strategies_patch.md' + - 'Update': 'placement-strategies_update.md' +- 'Placements': + - 'Generatetags': 'placements_generatetags.md' + - 'Get': 'placements_get.md' + - 'Insert': 'placements_insert.md' + - 'List': 'placements_list.md' + - 'Patch': 'placements_patch.md' + - 'Update': 'placements_update.md' +- 'Platform Types': + - 'Get': 'platform-types_get.md' + - 'List': 'platform-types_list.md' +- 'Postal Codes': + - 'Get': 'postal-codes_get.md' + - 'List': 'postal-codes_list.md' +- 'Projects': + - 'Get': 'projects_get.md' + - 'List': 'projects_list.md' +- 'Regions': + - 'List': 'regions_list.md' +- 'Remarketing List Shares': + - 'Get': 'remarketing-list-shares_get.md' + - 'Patch': 'remarketing-list-shares_patch.md' + - 'Update': 'remarketing-list-shares_update.md' +- 'Remarketing Lists': + - 'Get': 'remarketing-lists_get.md' + - 'Insert': 'remarketing-lists_insert.md' + - 'List': 'remarketing-lists_list.md' + - 'Patch': 'remarketing-lists_patch.md' + - 'Update': 'remarketing-lists_update.md' +- 'Reports': + - 'Compatible Fields Query': 'reports_compatible-fields-query.md' + - 'Delete': 'reports_delete.md' + - 'Files Get': 'reports_files-get.md' + - 'Files List': 'reports_files-list.md' + - 'Get': 'reports_get.md' + - 'Insert': 'reports_insert.md' + - 'List': 'reports_list.md' + - 'Patch': 'reports_patch.md' + - 'Run': 'reports_run.md' + - 'Update': 'reports_update.md' +- 'Sites': + - 'Get': 'sites_get.md' + - 'Insert': 'sites_insert.md' + - 'List': 'sites_list.md' + - 'Patch': 'sites_patch.md' + - 'Update': 'sites_update.md' +- 'Sizes': + - 'Get': 'sizes_get.md' + - 'Insert': 'sizes_insert.md' + - 'List': 'sizes_list.md' +- 'Subaccounts': + - 'Get': 'subaccounts_get.md' + - 'Insert': 'subaccounts_insert.md' + - 'List': 'subaccounts_list.md' + - 'Patch': 'subaccounts_patch.md' + - 'Update': 'subaccounts_update.md' +- 'Targetable Remarketing Lists': + - 'Get': 'targetable-remarketing-lists_get.md' + - 'List': 'targetable-remarketing-lists_list.md' +- 'Targeting Templates': + - 'Get': 'targeting-templates_get.md' + - 'Insert': 'targeting-templates_insert.md' + - 'List': 'targeting-templates_list.md' + - 'Patch': 'targeting-templates_patch.md' + - 'Update': 'targeting-templates_update.md' +- 'User Profiles': + - 'Get': 'user-profiles_get.md' + - 'List': 'user-profiles_list.md' +- 'User Role Permission Groups': + - 'Get': 'user-role-permission-groups_get.md' + - 'List': 'user-role-permission-groups_list.md' +- 'User Role Permissions': + - 'Get': 'user-role-permissions_get.md' + - 'List': 'user-role-permissions_list.md' +- 'User Roles': + - 'Delete': 'user-roles_delete.md' + - 'Get': 'user-roles_get.md' + - 'Insert': 'user-roles_insert.md' + - 'List': 'user-roles_list.md' + - 'Patch': 'user-roles_patch.md' + - 'Update': 'user-roles_update.md' +- 'Video Formats': + - 'Get': 'video-formats_get.md' + - 'List': 'video-formats_list.md' theme: readthedocs diff --git a/gen/dfareporting3d4-cli/src/client.rs b/gen/dfareporting3d4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dfareporting3d4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dfareporting3d4-cli/src/main.rs b/gen/dfareporting3d4-cli/src/main.rs index 4da0700955..51d4fff555 100644 --- a/gen/dfareporting3d4-cli/src/main.rs +++ b/gen/dfareporting3d4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dfareporting3d4::{api, Error, oauth2}; +use google_dfareporting3d4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -478,10 +477,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-role-id" => { - call = call.user_role_id(value.unwrap_or("")); + call = call.user_role_id( value.map(|v| arg_from_str(v, err, "user-role-id", "int64")).unwrap_or(-0)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -496,13 +495,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -838,13 +837,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1317,10 +1316,10 @@ where call = call.add_type(value.unwrap_or("")); }, "ssl-required" => { - call = call.ssl_required(arg_from_str(value.unwrap_or("false"), err, "ssl-required", "boolean")); + call = call.ssl_required( value.map(|v| arg_from_str(v, err, "ssl-required", "boolean")).unwrap_or(false)); }, "ssl-compliant" => { - call = call.ssl_compliant(arg_from_str(value.unwrap_or("false"), err, "ssl-compliant", "boolean")); + call = call.ssl_compliant( value.map(|v| arg_from_str(v, err, "ssl-compliant", "boolean")).unwrap_or(false)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -1329,58 +1328,58 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "remarketing-list-ids" => { - call = call.add_remarketing_list_ids(value.unwrap_or("")); + call = call.add_remarketing_list_ids( value.map(|v| arg_from_str(v, err, "remarketing-list-ids", "int64")).unwrap_or(-0)); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "landing-page-ids" => { - call = call.add_landing_page_ids(value.unwrap_or("")); + call = call.add_landing_page_ids( value.map(|v| arg_from_str(v, err, "landing-page-ids", "int64")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dynamic-click-tracker" => { - call = call.dynamic_click_tracker(arg_from_str(value.unwrap_or("false"), err, "dynamic-click-tracker", "boolean")); + call = call.dynamic_click_tracker( value.map(|v| arg_from_str(v, err, "dynamic-click-tracker", "boolean")).unwrap_or(false)); }, "creative-optimization-configuration-ids" => { - call = call.add_creative_optimization_configuration_ids(value.unwrap_or("")); + call = call.add_creative_optimization_configuration_ids( value.map(|v| arg_from_str(v, err, "creative-optimization-configuration-ids", "int64")).unwrap_or(-0)); }, "creative-ids" => { - call = call.add_creative_ids(value.unwrap_or("")); + call = call.add_creative_ids( value.map(|v| arg_from_str(v, err, "creative-ids", "int64")).unwrap_or(-0)); }, "compatibility" => { call = call.compatibility(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "audience-segment-ids" => { - call = call.add_audience_segment_ids(value.unwrap_or("")); + call = call.add_audience_segment_ids( value.map(|v| arg_from_str(v, err, "audience-segment-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1932,10 +1931,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2309,7 +2308,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -2324,19 +2323,19 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2733,7 +2732,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "status" => { call = call.status(value.unwrap_or("")); @@ -2751,22 +2750,22 @@ where call = call.page_token(value.unwrap_or("")); }, "only-parent" => { - call = call.only_parent(arg_from_str(value.unwrap_or("false"), err, "only-parent", "boolean")); + call = call.only_parent( value.map(|v| arg_from_str(v, err, "only-parent", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-advertisers-without-groups-only" => { - call = call.include_advertisers_without_groups_only(arg_from_str(value.unwrap_or("false"), err, "include-advertisers-without-groups-only", "boolean")); + call = call.include_advertisers_without_groups_only( value.map(|v| arg_from_str(v, err, "include-advertisers-without-groups-only", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-ids" => { - call = call.add_floodlight_configuration_ids(value.unwrap_or("")); + call = call.add_floodlight_configuration_ids( value.map(|v| arg_from_str(v, err, "floodlight-configuration-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3184,7 +3183,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3418,7 +3417,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -3433,28 +3432,28 @@ where call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "excluded-ids" => { - call = call.add_excluded_ids(value.unwrap_or("")); + call = call.add_excluded_ids( value.map(|v| arg_from_str(v, err, "excluded-ids", "int64")).unwrap_or(-0)); }, "at-least-one-optimization-activity" => { - call = call.at_least_one_optimization_activity(arg_from_str(value.unwrap_or("false"), err, "at-least-one-optimization-activity", "boolean")); + call = call.at_least_one_optimization_activity( value.map(|v| arg_from_str(v, err, "at-least-one-optimization-activity", "boolean")).unwrap_or(false)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3814,7 +3813,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-profile-ids" => { - call = call.add_user_profile_ids(value.unwrap_or("")); + call = call.add_user_profile_ids( value.map(|v| arg_from_str(v, err, "user-profile-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -3826,19 +3825,19 @@ where call = call.object_type(value.unwrap_or("")); }, "object-ids" => { - call = call.add_object_ids(value.unwrap_or("")); + call = call.add_object_ids( value.map(|v| arg_from_str(v, err, "object-ids", "int64")).unwrap_or(-0)); }, "min-change-time" => { call = call.min_change_time(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-change-time" => { call = call.max_change_time(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "action" => { call = call.action(value.unwrap_or("")); @@ -3897,16 +3896,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "region-dart-ids" => { - call = call.add_region_dart_ids(value.unwrap_or("")); + call = call.add_region_dart_ids( value.map(|v| arg_from_str(v, err, "region-dart-ids", "int64")).unwrap_or(-0)); }, "name-prefix" => { call = call.name_prefix(value.unwrap_or("")); }, "dart-ids" => { - call = call.add_dart_ids(value.unwrap_or("")); + call = call.add_dart_ids( value.map(|v| arg_from_str(v, err, "dart-ids", "int64")).unwrap_or(-0)); }, "country-dart-ids" => { - call = call.add_country_dart_ids(value.unwrap_or("")); + call = call.add_country_dart_ids( value.map(|v| arg_from_str(v, err, "country-dart-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4262,10 +4261,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5074,10 +5073,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5511,13 +5510,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5926,16 +5925,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-number" => { - call = call.group_number(arg_from_str(value.unwrap_or("-0"), err, "group-number", "integer")); + call = call.group_number( value.map(|v| arg_from_str(v, err, "group-number", "int32")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6416,7 +6415,7 @@ where call = call.add_types(value.unwrap_or("")); }, "studio-creative-id" => { - call = call.studio_creative_id(value.unwrap_or("")); + call = call.studio_creative_id( value.map(|v| arg_from_str(v, err, "studio-creative-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -6425,40 +6424,40 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "rendering-ids" => { - call = call.add_rendering_ids(value.unwrap_or("")); + call = call.add_rendering_ids( value.map(|v| arg_from_str(v, err, "rendering-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "creative-field-ids" => { - call = call.add_creative_field_ids(value.unwrap_or("")); + call = call.add_creative_field_ids( value.map(|v| arg_from_str(v, err, "creative-field-ids", "int64")).unwrap_or(-0)); }, "companion-creative-ids" => { - call = call.add_companion_creative_ids(value.unwrap_or("")); + call = call.add_companion_creative_ids( value.map(|v| arg_from_str(v, err, "companion-creative-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6990,7 +6989,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7214,25 +7213,25 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dfp-network-code" => { call = call.dfp_network_code(value.unwrap_or("")); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7423,13 +7422,13 @@ where call = call.object_type(value.unwrap_or("")); }, "object-id" => { - call = call.object_id(value.unwrap_or("")); + call = call.object_id( value.map(|v| arg_from_str(v, err, "object-id", "int64")).unwrap_or(-0)); }, "names" => { call = call.add_names(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7702,25 +7701,25 @@ where call = call.search_string(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "event-tag-types" => { call = call.add_event_tag_types(value.unwrap_or("")); }, "enabled" => { - call = call.enabled(arg_from_str(value.unwrap_or("false"), err, "enabled", "boolean")); + call = call.enabled( value.map(|v| arg_from_str(v, err, "enabled", "boolean")).unwrap_or(false)); }, "definitions-only" => { - call = call.definitions_only(arg_from_str(value.unwrap_or("false"), err, "definitions-only", "boolean")); + call = call.definitions_only( value.map(|v| arg_from_str(v, err, "definitions-only", "boolean")).unwrap_or(false)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "ad-id" => { - call = call.ad_id(value.unwrap_or("")); + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8074,7 +8073,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8174,7 +8173,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8423,13 +8422,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "floodlight-activity-group-type" => { call = call.floodlight_activity_group_type(value.unwrap_or("")); @@ -8441,10 +8440,10 @@ where call = call.floodlight_activity_group_name(value.unwrap_or("")); }, "floodlight-activity-group-ids" => { - call = call.add_floodlight_activity_group_ids(value.unwrap_or("")); + call = call.add_floodlight_activity_group_ids( value.map(|v| arg_from_str(v, err, "floodlight-activity-group-ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8930,16 +8929,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9269,7 +9268,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9622,22 +9621,22 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "in-plan" => { - call = call.in_plan(arg_from_str(value.unwrap_or("false"), err, "in-plan", "boolean")); + call = call.in_plan( value.map(|v| arg_from_str(v, err, "in-plan", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9855,7 +9854,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { call = call.add_ids(value.unwrap_or("")); @@ -10287,7 +10286,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10296,16 +10295,16 @@ where call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10419,7 +10418,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10428,10 +10427,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10691,7 +10690,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10700,7 +10699,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "placement-group-type" => { call = call.placement_group_type(value.unwrap_or("")); @@ -10718,28 +10717,28 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11283,10 +11282,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11521,10 +11520,10 @@ where call = call.add_tag_formats(value.unwrap_or("")); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11831,10 +11830,10 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -11843,7 +11842,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "payment-source" => { call = call.payment_source(value.unwrap_or("")); @@ -11861,34 +11860,34 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-ids" => { - call = call.add_group_ids(value.unwrap_or("")); + call = call.add_group_ids( value.map(|v| arg_from_str(v, err, "group-ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "compatibilities" => { call = call.add_compatibilities(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12602,13 +12601,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13111,13 +13110,13 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -13687,7 +13686,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14000,7 +13999,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14249,7 +14248,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "synchronous" => { - call = call.synchronous(arg_from_str(value.unwrap_or("false"), err, "synchronous", "boolean")); + call = call.synchronous( value.map(|v| arg_from_str(v, err, "synchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14689,10 +14688,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unmapped-site" => { - call = call.unmapped_site(arg_from_str(value.unwrap_or("false"), err, "unmapped-site", "boolean")); + call = call.unmapped_site( value.map(|v| arg_from_str(v, err, "unmapped-site", "boolean")).unwrap_or(false)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -14707,31 +14706,31 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, "ad-words-site" => { - call = call.ad_words_site(arg_from_str(value.unwrap_or("false"), err, "ad-words-site", "boolean")); + call = call.ad_words_site( value.map(|v| arg_from_str(v, err, "ad-words-site", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15206,16 +15205,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "width" => { - call = call.width(arg_from_str(value.unwrap_or("-0"), err, "width", "integer")); + call = call.width( value.map(|v| arg_from_str(v, err, "width", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "iab-standard" => { - call = call.iab_standard(arg_from_str(value.unwrap_or("false"), err, "iab-standard", "boolean")); + call = call.iab_standard( value.map(|v| arg_from_str(v, err, "iab-standard", "boolean")).unwrap_or(false)); }, "height" => { - call = call.height(arg_from_str(value.unwrap_or("-0"), err, "height", "integer")); + call = call.height( value.map(|v| arg_from_str(v, err, "height", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15424,10 +15423,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15725,10 +15724,10 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15950,13 +15949,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16476,7 +16475,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16719,7 +16718,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -16734,13 +16733,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "account-user-role-only" => { - call = call.account_user_role_only(arg_from_str(value.unwrap_or("false"), err, "account-user-role-only", "boolean")); + call = call.account_user_role_only( value.map(|v| arg_from_str(v, err, "account-user-role-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -24059,7 +24058,7 @@ async fn main() { let mut app = App::new("dfareporting3d4") .author("Sebastian Thiel ") - .version("4.0.1+20220104") + .version("5.0.2+20230118") .about("Build applications to efficiently manage large or complex trafficking, reporting, and attribution workflows for Campaign Manager 360.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dfareporting3d4_cli") .arg(Arg::with_name("url") diff --git a/gen/dfareporting3d4/Cargo.toml b/gen/dfareporting3d4/Cargo.toml index f09d812cb1..9f65d97061 100644 --- a/gen/dfareporting3d4/Cargo.toml +++ b/gen/dfareporting3d4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dfareporting3d4" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dfareporting (protocol v3.4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d4" homepage = "https://developers.google.com/doubleclick-advertisers/" -documentation = "https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-dfareporting3d4/5.0.2+20230118" license = "MIT" keywords = ["dfareporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dfareporting3d4/README.md b/gen/dfareporting3d4/README.md index 75e510e030..5449c91030 100644 --- a/gen/dfareporting3d4/README.md +++ b/gen/dfareporting3d4/README.md @@ -5,146 +5,146 @@ DO NOT EDIT ! --> The `google-dfareporting3d4` library allows access to all features of the *Google Dfareporting* service. -This documentation was generated from *Dfareporting* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dfareporting* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dfareporting* *v3d4* API can be found at the [official documentation site](https://developers.google.com/doubleclick-advertisers/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/Dfareporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/Dfareporting) ... -* [account active ad summaries](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountActiveAdSummary) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountActiveAdSummaryGetCall) -* [account permission groups](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountPermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountPermissionGroupListCall) -* [account permissions](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountPermission) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountPermissionListCall) -* [account user profiles](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountUserProfile) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountUserProfileUpdateCall) -* [accounts](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Account) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AccountUpdateCall) -* [ads](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Ad) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdUpdateCall) -* [advertiser groups](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGroup) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGroupUpdateCall) +* [account active ad summaries](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountActiveAdSummary) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountActiveAdSummaryGetCall) +* [account permission groups](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountPermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountPermissionGroupListCall) +* [account permissions](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountPermission) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountPermissionListCall) +* [account user profiles](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountUserProfile) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountUserProfileUpdateCall) +* [accounts](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Account) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AccountUpdateCall) +* [ads](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Ad) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdUpdateCall) +* [advertiser groups](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGroup) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGroupUpdateCall) * advertiser landing pages - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserLandingPageUpdateCall) -* [advertisers](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Advertiser) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::AdvertiserUpdateCall) -* [browsers](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Browser) - * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::BrowserListCall) -* [campaign creative associations](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignCreativeAssociation) - * [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignCreativeAssociationListCall) -* [campaigns](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Campaign) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CampaignUpdateCall) -* [change logs](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ChangeLog) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ChangeLogListCall) -* [cities](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::City) - * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CityListCall) -* [connection types](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ConnectionType) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ConnectionTypeListCall) -* [content categories](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ContentCategory) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ContentCategoryUpdateCall) -* [conversions](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Conversion) - * [*batchinsert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ConversionBatchupdateCall) -* [countries](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Country) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CountryListCall) -* [creative assets](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeAsset) - * [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeAssetInsertCall) -* [creative field values](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldValue) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldValueUpdateCall) -* [creative fields](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeField) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeFieldUpdateCall) -* [creative groups](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeGroup) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeGroupUpdateCall) -* [creatives](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Creative) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeUpdateCall) -* [custom events](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CustomEvent) - * [*batchinsert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CustomEventBatchinsertCall) -* [dimension values](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DimensionValue) - * [*query*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DimensionValueQueryCall) -* [directory sites](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DirectorySite) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DirectorySiteListCall) -* [dynamic targeting keys](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DynamicTargetingKey) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::DynamicTargetingKeyListCall) -* [event tags](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::EventTag) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::EventTagUpdateCall) -* [files](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::File) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FileListCall) -* [floodlight activities](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivity) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityUpdateCall) -* [floodlight activity groups](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGroup) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightActivityGroupUpdateCall) -* [floodlight configurations](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightConfiguration) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FloodlightConfigurationUpdateCall) -* [inventory items](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::InventoryItem) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::InventoryItemListCall) -* [languages](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Language) - * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::LanguageListCall) -* [metros](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Metro) - * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::MetroListCall) -* [mobile apps](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::MobileApp) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::MobileAppListCall) -* [mobile carriers](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::MobileCarrier) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::MobileCarrierListCall) -* [operating system versions](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OperatingSystemVersion) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OperatingSystemVersionListCall) -* [operating systems](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OperatingSystem) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OperatingSystemListCall) -* [order documents](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OrderDocument) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OrderDocumentListCall) -* [orders](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Order) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::OrderListCall) -* [placement groups](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGroup) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGroupUpdateCall) -* [placement strategies](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementStrategy) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementStrategyUpdateCall) -* [placements](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Placement) - * [*generatetags*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlacementUpdateCall) -* [platform types](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlatformType) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PlatformTypeListCall) -* [postal codes](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PostalCode) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::PostalCodeListCall) -* [projects](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Project) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ProjectListCall) -* [regions](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Region) - * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RegionListCall) -* [remarketing list shares](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListShare) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListShareUpdateCall) -* [remarketing lists](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::RemarketingListUpdateCall) -* [reports](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Report) - * [*compatible fields query*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportUpdateCall) -* [sites](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Site) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SiteUpdateCall) -* [sizes](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Size) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SizeListCall) -* [subaccounts](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::Subaccount) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::SubaccountUpdateCall) -* [targetable remarketing lists](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetableRemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetableRemarketingListListCall) -* [targeting templates](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetingTemplate) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::TargetingTemplateUpdateCall) -* [user profiles](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserProfile) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserProfileListCall) -* [user role permission groups](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRolePermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRolePermissionGroupListCall) -* [user role permissions](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRolePermission) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRolePermissionListCall) -* [user roles](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRole) - * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::UserRoleUpdateCall) -* [video formats](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::VideoFormat) - * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::VideoFormatListCall) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserLandingPageUpdateCall) +* [advertisers](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Advertiser) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::AdvertiserUpdateCall) +* [browsers](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Browser) + * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::BrowserListCall) +* [campaign creative associations](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignCreativeAssociation) + * [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignCreativeAssociationListCall) +* [campaigns](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Campaign) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CampaignUpdateCall) +* [change logs](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ChangeLog) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ChangeLogListCall) +* [cities](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::City) + * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CityListCall) +* [connection types](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ConnectionType) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ConnectionTypeListCall) +* [content categories](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ContentCategory) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ContentCategoryUpdateCall) +* [conversions](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Conversion) + * [*batchinsert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ConversionBatchupdateCall) +* [countries](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Country) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CountryListCall) +* [creative assets](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeAsset) + * [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeAssetInsertCall) +* [creative field values](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldValue) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldValueUpdateCall) +* [creative fields](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeField) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeFieldUpdateCall) +* [creative groups](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeGroup) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeGroupUpdateCall) +* [creatives](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Creative) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeUpdateCall) +* [custom events](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CustomEvent) + * [*batchinsert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CustomEventBatchinsertCall) +* [dimension values](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DimensionValue) + * [*query*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DimensionValueQueryCall) +* [directory sites](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DirectorySite) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DirectorySiteListCall) +* [dynamic targeting keys](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DynamicTargetingKey) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::DynamicTargetingKeyListCall) +* [event tags](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::EventTag) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::EventTagUpdateCall) +* [files](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::File) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FileListCall) +* [floodlight activities](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivity) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityUpdateCall) +* [floodlight activity groups](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGroup) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightActivityGroupUpdateCall) +* [floodlight configurations](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightConfiguration) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FloodlightConfigurationUpdateCall) +* [inventory items](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::InventoryItem) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::InventoryItemListCall) +* [languages](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Language) + * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::LanguageListCall) +* [metros](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Metro) + * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::MetroListCall) +* [mobile apps](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::MobileApp) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::MobileAppListCall) +* [mobile carriers](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::MobileCarrier) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::MobileCarrierListCall) +* [operating system versions](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OperatingSystemVersion) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OperatingSystemVersionListCall) +* [operating systems](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OperatingSystem) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OperatingSystemListCall) +* [order documents](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OrderDocument) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OrderDocumentListCall) +* [orders](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Order) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::OrderListCall) +* [placement groups](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGroup) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGroupUpdateCall) +* [placement strategies](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementStrategy) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementStrategyUpdateCall) +* [placements](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Placement) + * [*generatetags*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlacementUpdateCall) +* [platform types](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlatformType) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PlatformTypeListCall) +* [postal codes](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PostalCode) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::PostalCodeListCall) +* [projects](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Project) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ProjectListCall) +* [regions](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Region) + * [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RegionListCall) +* [remarketing list shares](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListShare) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListShareUpdateCall) +* [remarketing lists](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::RemarketingListUpdateCall) +* [reports](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Report) + * [*compatible fields query*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportUpdateCall) +* [sites](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Site) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SiteUpdateCall) +* [sizes](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Size) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SizeListCall) +* [subaccounts](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::Subaccount) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::SubaccountUpdateCall) +* [targetable remarketing lists](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetableRemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetableRemarketingListListCall) +* [targeting templates](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetingTemplate) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::TargetingTemplateUpdateCall) +* [user profiles](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserProfile) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserProfileListCall) +* [user role permission groups](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRolePermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRolePermissionGroupListCall) +* [user role permissions](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRolePermission) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRolePermissionListCall) +* [user roles](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRole) + * [*delete*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::UserRoleUpdateCall) +* [video formats](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::VideoFormat) + * [*get*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::VideoFormatListCall) Upload supported by ... -* [*insert creative assets*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::CreativeAssetInsertCall) +* [*insert creative assets*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::CreativeAssetInsertCall) Download supported by ... -* [*get files*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::FileGetCall) -* [*files get reports*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/api::ReportFileGetCall) +* [*get files*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::FileGetCall) +* [*files get reports*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/api::ReportFileGetCall) @@ -152,17 +152,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/Dfareporting)** +* **[Hub](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/Dfareporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::CallBuilder) -* **[Resources](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::CallBuilder) +* **[Resources](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Part)** + * **[Parts](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -260,17 +260,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -280,29 +280,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Delegate) to the -[Method Builder](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Delegate) to the +[Method Builder](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::RequestValue) and -[decodable](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::RequestValue) and +[decodable](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dfareporting3d4/5.0.2-beta-1+20230118/google_dfareporting3d4/client::RequestValue) are moved +* [request values](https://docs.rs/google-dfareporting3d4/5.0.2+20230118/google_dfareporting3d4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dfareporting3d4/src/api.rs b/gen/dfareporting3d4/src/api.rs index 05a8a8a6d3..807d974de7 100644 --- a/gen/dfareporting3d4/src/api.rs +++ b/gen/dfareporting3d4/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> Dfareporting { Dfareporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dfareporting.googleapis.com/dfareporting/v3.4/".to_string(), _root_url: "https://dfareporting.googleapis.com/".to_string(), } @@ -322,7 +322,7 @@ impl<'a, S> Dfareporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dfareporting3d4/src/client.rs b/gen/dfareporting3d4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dfareporting3d4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dfareporting3d4/src/lib.rs b/gen/dfareporting3d4/src/lib.rs index f93a9adbf7..74fc4e1449 100644 --- a/gen/dfareporting3d4/src/lib.rs +++ b/gen/dfareporting3d4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dfareporting* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dfareporting* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dfareporting* *v3d4* API can be found at the //! [official documentation site](https://developers.google.com/doubleclick-advertisers/). diff --git a/gen/dfareporting3d5-cli/Cargo.toml b/gen/dfareporting3d5-cli/Cargo.toml index 00a57e82a7..71fc3ddc4e 100644 --- a/gen/dfareporting3d5-cli/Cargo.toml +++ b/gen/dfareporting3d5-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dfareporting3d5-cli" -version = "4.0.1+20220104" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dfareporting (protocol v3.5)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d5-cli" @@ -20,13 +20,13 @@ name = "dfareporting3d5" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dfareporting3d5] path = "../dfareporting3d5" -version = "4.0.1+20220104" +version = "5.0.2+20230118" + diff --git a/gen/dfareporting3d5-cli/README.md b/gen/dfareporting3d5-cli/README.md index cc4eee98c6..ee0a0b33d2 100644 --- a/gen/dfareporting3d5-cli/README.md +++ b/gen/dfareporting3d5-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dfareporting* API at revision *20220104*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dfareporting* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash dfareporting3d5 [options] diff --git a/gen/dfareporting3d5-cli/mkdocs.yml b/gen/dfareporting3d5-cli/mkdocs.yml index 6f18e1c771..59cbed42cf 100644 --- a/gen/dfareporting3d5-cli/mkdocs.yml +++ b/gen/dfareporting3d5-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dfareporting v4.0.1+20220104 +site_name: Dfareporting v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-dfareporting3d5-cli site_description: A complete library to interact with Dfareporting (protocol v3.5) @@ -7,212 +7,272 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d5- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-active-ad-summaries_get.md', 'Account Active Ad Summaries', 'Get'] -- ['account-permission-groups_get.md', 'Account Permission Groups', 'Get'] -- ['account-permission-groups_list.md', 'Account Permission Groups', 'List'] -- ['account-permissions_get.md', 'Account Permissions', 'Get'] -- ['account-permissions_list.md', 'Account Permissions', 'List'] -- ['account-user-profiles_get.md', 'Account User Profiles', 'Get'] -- ['account-user-profiles_insert.md', 'Account User Profiles', 'Insert'] -- ['account-user-profiles_list.md', 'Account User Profiles', 'List'] -- ['account-user-profiles_patch.md', 'Account User Profiles', 'Patch'] -- ['account-user-profiles_update.md', 'Account User Profiles', 'Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['ads_get.md', 'Ads', 'Get'] -- ['ads_insert.md', 'Ads', 'Insert'] -- ['ads_list.md', 'Ads', 'List'] -- ['ads_patch.md', 'Ads', 'Patch'] -- ['ads_update.md', 'Ads', 'Update'] -- ['advertiser-groups_delete.md', 'Advertiser Groups', 'Delete'] -- ['advertiser-groups_get.md', 'Advertiser Groups', 'Get'] -- ['advertiser-groups_insert.md', 'Advertiser Groups', 'Insert'] -- ['advertiser-groups_list.md', 'Advertiser Groups', 'List'] -- ['advertiser-groups_patch.md', 'Advertiser Groups', 'Patch'] -- ['advertiser-groups_update.md', 'Advertiser Groups', 'Update'] -- ['advertiser-landing-pages_get.md', 'Advertiser Landing Pages', 'Get'] -- ['advertiser-landing-pages_insert.md', 'Advertiser Landing Pages', 'Insert'] -- ['advertiser-landing-pages_list.md', 'Advertiser Landing Pages', 'List'] -- ['advertiser-landing-pages_patch.md', 'Advertiser Landing Pages', 'Patch'] -- ['advertiser-landing-pages_update.md', 'Advertiser Landing Pages', 'Update'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_insert.md', 'Advertisers', 'Insert'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['advertisers_patch.md', 'Advertisers', 'Patch'] -- ['advertisers_update.md', 'Advertisers', 'Update'] -- ['browsers_list.md', 'Browsers', 'List'] -- ['campaign-creative-associations_insert.md', 'Campaign Creative Associations', 'Insert'] -- ['campaign-creative-associations_list.md', 'Campaign Creative Associations', 'List'] -- ['campaigns_get.md', 'Campaigns', 'Get'] -- ['campaigns_insert.md', 'Campaigns', 'Insert'] -- ['campaigns_list.md', 'Campaigns', 'List'] -- ['campaigns_patch.md', 'Campaigns', 'Patch'] -- ['campaigns_update.md', 'Campaigns', 'Update'] -- ['change-logs_get.md', 'Change Logs', 'Get'] -- ['change-logs_list.md', 'Change Logs', 'List'] -- ['cities_list.md', 'Cities', 'List'] -- ['connection-types_get.md', 'Connection Types', 'Get'] -- ['connection-types_list.md', 'Connection Types', 'List'] -- ['content-categories_delete.md', 'Content Categories', 'Delete'] -- ['content-categories_get.md', 'Content Categories', 'Get'] -- ['content-categories_insert.md', 'Content Categories', 'Insert'] -- ['content-categories_list.md', 'Content Categories', 'List'] -- ['content-categories_patch.md', 'Content Categories', 'Patch'] -- ['content-categories_update.md', 'Content Categories', 'Update'] -- ['conversions_batchinsert.md', 'Conversions', 'Batchinsert'] -- ['conversions_batchupdate.md', 'Conversions', 'Batchupdate'] -- ['countries_get.md', 'Countries', 'Get'] -- ['countries_list.md', 'Countries', 'List'] -- ['creative-assets_insert.md', 'Creative Assets', 'Insert'] -- ['creative-field-values_delete.md', 'Creative Field Values', 'Delete'] -- ['creative-field-values_get.md', 'Creative Field Values', 'Get'] -- ['creative-field-values_insert.md', 'Creative Field Values', 'Insert'] -- ['creative-field-values_list.md', 'Creative Field Values', 'List'] -- ['creative-field-values_patch.md', 'Creative Field Values', 'Patch'] -- ['creative-field-values_update.md', 'Creative Field Values', 'Update'] -- ['creative-fields_delete.md', 'Creative Fields', 'Delete'] -- ['creative-fields_get.md', 'Creative Fields', 'Get'] -- ['creative-fields_insert.md', 'Creative Fields', 'Insert'] -- ['creative-fields_list.md', 'Creative Fields', 'List'] -- ['creative-fields_patch.md', 'Creative Fields', 'Patch'] -- ['creative-fields_update.md', 'Creative Fields', 'Update'] -- ['creative-groups_get.md', 'Creative Groups', 'Get'] -- ['creative-groups_insert.md', 'Creative Groups', 'Insert'] -- ['creative-groups_list.md', 'Creative Groups', 'List'] -- ['creative-groups_patch.md', 'Creative Groups', 'Patch'] -- ['creative-groups_update.md', 'Creative Groups', 'Update'] -- ['creatives_get.md', 'Creatives', 'Get'] -- ['creatives_insert.md', 'Creatives', 'Insert'] -- ['creatives_list.md', 'Creatives', 'List'] -- ['creatives_patch.md', 'Creatives', 'Patch'] -- ['creatives_update.md', 'Creatives', 'Update'] -- ['dimension-values_query.md', 'Dimension Values', 'Query'] -- ['directory-sites_get.md', 'Directory Sites', 'Get'] -- ['directory-sites_insert.md', 'Directory Sites', 'Insert'] -- ['directory-sites_list.md', 'Directory Sites', 'List'] -- ['dynamic-targeting-keys_delete.md', 'Dynamic Targeting Keys', 'Delete'] -- ['dynamic-targeting-keys_insert.md', 'Dynamic Targeting Keys', 'Insert'] -- ['dynamic-targeting-keys_list.md', 'Dynamic Targeting Keys', 'List'] -- ['event-tags_delete.md', 'Event Tags', 'Delete'] -- ['event-tags_get.md', 'Event Tags', 'Get'] -- ['event-tags_insert.md', 'Event Tags', 'Insert'] -- ['event-tags_list.md', 'Event Tags', 'List'] -- ['event-tags_patch.md', 'Event Tags', 'Patch'] -- ['event-tags_update.md', 'Event Tags', 'Update'] -- ['files_get.md', 'Files', 'Get'] -- ['files_list.md', 'Files', 'List'] -- ['floodlight-activities_delete.md', 'Floodlight Activities', 'Delete'] -- ['floodlight-activities_generatetag.md', 'Floodlight Activities', 'Generatetag'] -- ['floodlight-activities_get.md', 'Floodlight Activities', 'Get'] -- ['floodlight-activities_insert.md', 'Floodlight Activities', 'Insert'] -- ['floodlight-activities_list.md', 'Floodlight Activities', 'List'] -- ['floodlight-activities_patch.md', 'Floodlight Activities', 'Patch'] -- ['floodlight-activities_update.md', 'Floodlight Activities', 'Update'] -- ['floodlight-activity-groups_get.md', 'Floodlight Activity Groups', 'Get'] -- ['floodlight-activity-groups_insert.md', 'Floodlight Activity Groups', 'Insert'] -- ['floodlight-activity-groups_list.md', 'Floodlight Activity Groups', 'List'] -- ['floodlight-activity-groups_patch.md', 'Floodlight Activity Groups', 'Patch'] -- ['floodlight-activity-groups_update.md', 'Floodlight Activity Groups', 'Update'] -- ['floodlight-configurations_get.md', 'Floodlight Configurations', 'Get'] -- ['floodlight-configurations_list.md', 'Floodlight Configurations', 'List'] -- ['floodlight-configurations_patch.md', 'Floodlight Configurations', 'Patch'] -- ['floodlight-configurations_update.md', 'Floodlight Configurations', 'Update'] -- ['inventory-items_get.md', 'Inventory Items', 'Get'] -- ['inventory-items_list.md', 'Inventory Items', 'List'] -- ['languages_list.md', 'Languages', 'List'] -- ['metros_list.md', 'Metros', 'List'] -- ['mobile-apps_get.md', 'Mobile Apps', 'Get'] -- ['mobile-apps_list.md', 'Mobile Apps', 'List'] -- ['mobile-carriers_get.md', 'Mobile Carriers', 'Get'] -- ['mobile-carriers_list.md', 'Mobile Carriers', 'List'] -- ['operating-system-versions_get.md', 'Operating System Versions', 'Get'] -- ['operating-system-versions_list.md', 'Operating System Versions', 'List'] -- ['operating-systems_get.md', 'Operating Systems', 'Get'] -- ['operating-systems_list.md', 'Operating Systems', 'List'] -- ['order-documents_get.md', 'Order Documents', 'Get'] -- ['order-documents_list.md', 'Order Documents', 'List'] -- ['orders_get.md', 'Orders', 'Get'] -- ['orders_list.md', 'Orders', 'List'] -- ['placement-groups_get.md', 'Placement Groups', 'Get'] -- ['placement-groups_insert.md', 'Placement Groups', 'Insert'] -- ['placement-groups_list.md', 'Placement Groups', 'List'] -- ['placement-groups_patch.md', 'Placement Groups', 'Patch'] -- ['placement-groups_update.md', 'Placement Groups', 'Update'] -- ['placement-strategies_delete.md', 'Placement Strategies', 'Delete'] -- ['placement-strategies_get.md', 'Placement Strategies', 'Get'] -- ['placement-strategies_insert.md', 'Placement Strategies', 'Insert'] -- ['placement-strategies_list.md', 'Placement Strategies', 'List'] -- ['placement-strategies_patch.md', 'Placement Strategies', 'Patch'] -- ['placement-strategies_update.md', 'Placement Strategies', 'Update'] -- ['placements_generatetags.md', 'Placements', 'Generatetags'] -- ['placements_get.md', 'Placements', 'Get'] -- ['placements_insert.md', 'Placements', 'Insert'] -- ['placements_list.md', 'Placements', 'List'] -- ['placements_patch.md', 'Placements', 'Patch'] -- ['placements_update.md', 'Placements', 'Update'] -- ['platform-types_get.md', 'Platform Types', 'Get'] -- ['platform-types_list.md', 'Platform Types', 'List'] -- ['postal-codes_get.md', 'Postal Codes', 'Get'] -- ['postal-codes_list.md', 'Postal Codes', 'List'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_list.md', 'Projects', 'List'] -- ['regions_list.md', 'Regions', 'List'] -- ['remarketing-list-shares_get.md', 'Remarketing List Shares', 'Get'] -- ['remarketing-list-shares_patch.md', 'Remarketing List Shares', 'Patch'] -- ['remarketing-list-shares_update.md', 'Remarketing List Shares', 'Update'] -- ['remarketing-lists_get.md', 'Remarketing Lists', 'Get'] -- ['remarketing-lists_insert.md', 'Remarketing Lists', 'Insert'] -- ['remarketing-lists_list.md', 'Remarketing Lists', 'List'] -- ['remarketing-lists_patch.md', 'Remarketing Lists', 'Patch'] -- ['remarketing-lists_update.md', 'Remarketing Lists', 'Update'] -- ['reports_compatible-fields-query.md', 'Reports', 'Compatible Fields Query'] -- ['reports_delete.md', 'Reports', 'Delete'] -- ['reports_files-get.md', 'Reports', 'Files Get'] -- ['reports_files-list.md', 'Reports', 'Files List'] -- ['reports_get.md', 'Reports', 'Get'] -- ['reports_insert.md', 'Reports', 'Insert'] -- ['reports_list.md', 'Reports', 'List'] -- ['reports_patch.md', 'Reports', 'Patch'] -- ['reports_run.md', 'Reports', 'Run'] -- ['reports_update.md', 'Reports', 'Update'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_insert.md', 'Sites', 'Insert'] -- ['sites_list.md', 'Sites', 'List'] -- ['sites_patch.md', 'Sites', 'Patch'] -- ['sites_update.md', 'Sites', 'Update'] -- ['sizes_get.md', 'Sizes', 'Get'] -- ['sizes_insert.md', 'Sizes', 'Insert'] -- ['sizes_list.md', 'Sizes', 'List'] -- ['subaccounts_get.md', 'Subaccounts', 'Get'] -- ['subaccounts_insert.md', 'Subaccounts', 'Insert'] -- ['subaccounts_list.md', 'Subaccounts', 'List'] -- ['subaccounts_patch.md', 'Subaccounts', 'Patch'] -- ['subaccounts_update.md', 'Subaccounts', 'Update'] -- ['targetable-remarketing-lists_get.md', 'Targetable Remarketing Lists', 'Get'] -- ['targetable-remarketing-lists_list.md', 'Targetable Remarketing Lists', 'List'] -- ['targeting-templates_get.md', 'Targeting Templates', 'Get'] -- ['targeting-templates_insert.md', 'Targeting Templates', 'Insert'] -- ['targeting-templates_list.md', 'Targeting Templates', 'List'] -- ['targeting-templates_patch.md', 'Targeting Templates', 'Patch'] -- ['targeting-templates_update.md', 'Targeting Templates', 'Update'] -- ['user-profiles_get.md', 'User Profiles', 'Get'] -- ['user-profiles_list.md', 'User Profiles', 'List'] -- ['user-role-permission-groups_get.md', 'User Role Permission Groups', 'Get'] -- ['user-role-permission-groups_list.md', 'User Role Permission Groups', 'List'] -- ['user-role-permissions_get.md', 'User Role Permissions', 'Get'] -- ['user-role-permissions_list.md', 'User Role Permissions', 'List'] -- ['user-roles_delete.md', 'User Roles', 'Delete'] -- ['user-roles_get.md', 'User Roles', 'Get'] -- ['user-roles_insert.md', 'User Roles', 'Insert'] -- ['user-roles_list.md', 'User Roles', 'List'] -- ['user-roles_patch.md', 'User Roles', 'Patch'] -- ['user-roles_update.md', 'User Roles', 'Update'] -- ['video-formats_get.md', 'Video Formats', 'Get'] -- ['video-formats_list.md', 'Video Formats', 'List'] +nav: +- Home: 'index.md' +- 'Account Active Ad Summaries': + - 'Get': 'account-active-ad-summaries_get.md' +- 'Account Permission Groups': + - 'Get': 'account-permission-groups_get.md' + - 'List': 'account-permission-groups_list.md' +- 'Account Permissions': + - 'Get': 'account-permissions_get.md' + - 'List': 'account-permissions_list.md' +- 'Account User Profiles': + - 'Get': 'account-user-profiles_get.md' + - 'Insert': 'account-user-profiles_insert.md' + - 'List': 'account-user-profiles_list.md' + - 'Patch': 'account-user-profiles_patch.md' + - 'Update': 'account-user-profiles_update.md' +- 'Accounts': + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' + - 'Update': 'accounts_update.md' +- 'Ads': + - 'Get': 'ads_get.md' + - 'Insert': 'ads_insert.md' + - 'List': 'ads_list.md' + - 'Patch': 'ads_patch.md' + - 'Update': 'ads_update.md' +- 'Advertiser Groups': + - 'Delete': 'advertiser-groups_delete.md' + - 'Get': 'advertiser-groups_get.md' + - 'Insert': 'advertiser-groups_insert.md' + - 'List': 'advertiser-groups_list.md' + - 'Patch': 'advertiser-groups_patch.md' + - 'Update': 'advertiser-groups_update.md' +- 'Advertiser Landing Pages': + - 'Get': 'advertiser-landing-pages_get.md' + - 'Insert': 'advertiser-landing-pages_insert.md' + - 'List': 'advertiser-landing-pages_list.md' + - 'Patch': 'advertiser-landing-pages_patch.md' + - 'Update': 'advertiser-landing-pages_update.md' +- 'Advertisers': + - 'Get': 'advertisers_get.md' + - 'Insert': 'advertisers_insert.md' + - 'List': 'advertisers_list.md' + - 'Patch': 'advertisers_patch.md' + - 'Update': 'advertisers_update.md' +- 'Browsers': + - 'List': 'browsers_list.md' +- 'Campaign Creative Associations': + - 'Insert': 'campaign-creative-associations_insert.md' + - 'List': 'campaign-creative-associations_list.md' +- 'Campaigns': + - 'Get': 'campaigns_get.md' + - 'Insert': 'campaigns_insert.md' + - 'List': 'campaigns_list.md' + - 'Patch': 'campaigns_patch.md' + - 'Update': 'campaigns_update.md' +- 'Change Logs': + - 'Get': 'change-logs_get.md' + - 'List': 'change-logs_list.md' +- 'Cities': + - 'List': 'cities_list.md' +- 'Connection Types': + - 'Get': 'connection-types_get.md' + - 'List': 'connection-types_list.md' +- 'Content Categories': + - 'Delete': 'content-categories_delete.md' + - 'Get': 'content-categories_get.md' + - 'Insert': 'content-categories_insert.md' + - 'List': 'content-categories_list.md' + - 'Patch': 'content-categories_patch.md' + - 'Update': 'content-categories_update.md' +- 'Conversions': + - 'Batchinsert': 'conversions_batchinsert.md' + - 'Batchupdate': 'conversions_batchupdate.md' +- 'Countries': + - 'Get': 'countries_get.md' + - 'List': 'countries_list.md' +- 'Creative Assets': + - 'Insert': 'creative-assets_insert.md' +- 'Creative Field Values': + - 'Delete': 'creative-field-values_delete.md' + - 'Get': 'creative-field-values_get.md' + - 'Insert': 'creative-field-values_insert.md' + - 'List': 'creative-field-values_list.md' + - 'Patch': 'creative-field-values_patch.md' + - 'Update': 'creative-field-values_update.md' +- 'Creative Fields': + - 'Delete': 'creative-fields_delete.md' + - 'Get': 'creative-fields_get.md' + - 'Insert': 'creative-fields_insert.md' + - 'List': 'creative-fields_list.md' + - 'Patch': 'creative-fields_patch.md' + - 'Update': 'creative-fields_update.md' +- 'Creative Groups': + - 'Get': 'creative-groups_get.md' + - 'Insert': 'creative-groups_insert.md' + - 'List': 'creative-groups_list.md' + - 'Patch': 'creative-groups_patch.md' + - 'Update': 'creative-groups_update.md' +- 'Creatives': + - 'Get': 'creatives_get.md' + - 'Insert': 'creatives_insert.md' + - 'List': 'creatives_list.md' + - 'Patch': 'creatives_patch.md' + - 'Update': 'creatives_update.md' +- 'Dimension Values': + - 'Query': 'dimension-values_query.md' +- 'Directory Sites': + - 'Get': 'directory-sites_get.md' + - 'Insert': 'directory-sites_insert.md' + - 'List': 'directory-sites_list.md' +- 'Dynamic Targeting Keys': + - 'Delete': 'dynamic-targeting-keys_delete.md' + - 'Insert': 'dynamic-targeting-keys_insert.md' + - 'List': 'dynamic-targeting-keys_list.md' +- 'Event Tags': + - 'Delete': 'event-tags_delete.md' + - 'Get': 'event-tags_get.md' + - 'Insert': 'event-tags_insert.md' + - 'List': 'event-tags_list.md' + - 'Patch': 'event-tags_patch.md' + - 'Update': 'event-tags_update.md' +- 'Files': + - 'Get': 'files_get.md' + - 'List': 'files_list.md' +- 'Floodlight Activities': + - 'Delete': 'floodlight-activities_delete.md' + - 'Generatetag': 'floodlight-activities_generatetag.md' + - 'Get': 'floodlight-activities_get.md' + - 'Insert': 'floodlight-activities_insert.md' + - 'List': 'floodlight-activities_list.md' + - 'Patch': 'floodlight-activities_patch.md' + - 'Update': 'floodlight-activities_update.md' +- 'Floodlight Activity Groups': + - 'Get': 'floodlight-activity-groups_get.md' + - 'Insert': 'floodlight-activity-groups_insert.md' + - 'List': 'floodlight-activity-groups_list.md' + - 'Patch': 'floodlight-activity-groups_patch.md' + - 'Update': 'floodlight-activity-groups_update.md' +- 'Floodlight Configurations': + - 'Get': 'floodlight-configurations_get.md' + - 'List': 'floodlight-configurations_list.md' + - 'Patch': 'floodlight-configurations_patch.md' + - 'Update': 'floodlight-configurations_update.md' +- 'Inventory Items': + - 'Get': 'inventory-items_get.md' + - 'List': 'inventory-items_list.md' +- 'Languages': + - 'List': 'languages_list.md' +- 'Metros': + - 'List': 'metros_list.md' +- 'Mobile Apps': + - 'Get': 'mobile-apps_get.md' + - 'List': 'mobile-apps_list.md' +- 'Mobile Carriers': + - 'Get': 'mobile-carriers_get.md' + - 'List': 'mobile-carriers_list.md' +- 'Operating System Versions': + - 'Get': 'operating-system-versions_get.md' + - 'List': 'operating-system-versions_list.md' +- 'Operating Systems': + - 'Get': 'operating-systems_get.md' + - 'List': 'operating-systems_list.md' +- 'Order Documents': + - 'Get': 'order-documents_get.md' + - 'List': 'order-documents_list.md' +- 'Orders': + - 'Get': 'orders_get.md' + - 'List': 'orders_list.md' +- 'Placement Groups': + - 'Get': 'placement-groups_get.md' + - 'Insert': 'placement-groups_insert.md' + - 'List': 'placement-groups_list.md' + - 'Patch': 'placement-groups_patch.md' + - 'Update': 'placement-groups_update.md' +- 'Placement Strategies': + - 'Delete': 'placement-strategies_delete.md' + - 'Get': 'placement-strategies_get.md' + - 'Insert': 'placement-strategies_insert.md' + - 'List': 'placement-strategies_list.md' + - 'Patch': 'placement-strategies_patch.md' + - 'Update': 'placement-strategies_update.md' +- 'Placements': + - 'Generatetags': 'placements_generatetags.md' + - 'Get': 'placements_get.md' + - 'Insert': 'placements_insert.md' + - 'List': 'placements_list.md' + - 'Patch': 'placements_patch.md' + - 'Update': 'placements_update.md' +- 'Platform Types': + - 'Get': 'platform-types_get.md' + - 'List': 'platform-types_list.md' +- 'Postal Codes': + - 'Get': 'postal-codes_get.md' + - 'List': 'postal-codes_list.md' +- 'Projects': + - 'Get': 'projects_get.md' + - 'List': 'projects_list.md' +- 'Regions': + - 'List': 'regions_list.md' +- 'Remarketing List Shares': + - 'Get': 'remarketing-list-shares_get.md' + - 'Patch': 'remarketing-list-shares_patch.md' + - 'Update': 'remarketing-list-shares_update.md' +- 'Remarketing Lists': + - 'Get': 'remarketing-lists_get.md' + - 'Insert': 'remarketing-lists_insert.md' + - 'List': 'remarketing-lists_list.md' + - 'Patch': 'remarketing-lists_patch.md' + - 'Update': 'remarketing-lists_update.md' +- 'Reports': + - 'Compatible Fields Query': 'reports_compatible-fields-query.md' + - 'Delete': 'reports_delete.md' + - 'Files Get': 'reports_files-get.md' + - 'Files List': 'reports_files-list.md' + - 'Get': 'reports_get.md' + - 'Insert': 'reports_insert.md' + - 'List': 'reports_list.md' + - 'Patch': 'reports_patch.md' + - 'Run': 'reports_run.md' + - 'Update': 'reports_update.md' +- 'Sites': + - 'Get': 'sites_get.md' + - 'Insert': 'sites_insert.md' + - 'List': 'sites_list.md' + - 'Patch': 'sites_patch.md' + - 'Update': 'sites_update.md' +- 'Sizes': + - 'Get': 'sizes_get.md' + - 'Insert': 'sizes_insert.md' + - 'List': 'sizes_list.md' +- 'Subaccounts': + - 'Get': 'subaccounts_get.md' + - 'Insert': 'subaccounts_insert.md' + - 'List': 'subaccounts_list.md' + - 'Patch': 'subaccounts_patch.md' + - 'Update': 'subaccounts_update.md' +- 'Targetable Remarketing Lists': + - 'Get': 'targetable-remarketing-lists_get.md' + - 'List': 'targetable-remarketing-lists_list.md' +- 'Targeting Templates': + - 'Get': 'targeting-templates_get.md' + - 'Insert': 'targeting-templates_insert.md' + - 'List': 'targeting-templates_list.md' + - 'Patch': 'targeting-templates_patch.md' + - 'Update': 'targeting-templates_update.md' +- 'User Profiles': + - 'Get': 'user-profiles_get.md' + - 'List': 'user-profiles_list.md' +- 'User Role Permission Groups': + - 'Get': 'user-role-permission-groups_get.md' + - 'List': 'user-role-permission-groups_list.md' +- 'User Role Permissions': + - 'Get': 'user-role-permissions_get.md' + - 'List': 'user-role-permissions_list.md' +- 'User Roles': + - 'Delete': 'user-roles_delete.md' + - 'Get': 'user-roles_get.md' + - 'Insert': 'user-roles_insert.md' + - 'List': 'user-roles_list.md' + - 'Patch': 'user-roles_patch.md' + - 'Update': 'user-roles_update.md' +- 'Video Formats': + - 'Get': 'video-formats_get.md' + - 'List': 'video-formats_list.md' theme: readthedocs diff --git a/gen/dfareporting3d5-cli/src/client.rs b/gen/dfareporting3d5-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dfareporting3d5-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dfareporting3d5-cli/src/main.rs b/gen/dfareporting3d5-cli/src/main.rs index 6cfcad612e..d4f323c791 100644 --- a/gen/dfareporting3d5-cli/src/main.rs +++ b/gen/dfareporting3d5-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dfareporting3d5::{api, Error, oauth2}; +use google_dfareporting3d5::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -478,10 +477,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-role-id" => { - call = call.user_role_id(value.unwrap_or("")); + call = call.user_role_id( value.map(|v| arg_from_str(v, err, "user-role-id", "int64")).unwrap_or(-0)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -496,13 +495,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -838,13 +837,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1317,10 +1316,10 @@ where call = call.add_type(value.unwrap_or("")); }, "ssl-required" => { - call = call.ssl_required(arg_from_str(value.unwrap_or("false"), err, "ssl-required", "boolean")); + call = call.ssl_required( value.map(|v| arg_from_str(v, err, "ssl-required", "boolean")).unwrap_or(false)); }, "ssl-compliant" => { - call = call.ssl_compliant(arg_from_str(value.unwrap_or("false"), err, "ssl-compliant", "boolean")); + call = call.ssl_compliant( value.map(|v| arg_from_str(v, err, "ssl-compliant", "boolean")).unwrap_or(false)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -1329,58 +1328,58 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "remarketing-list-ids" => { - call = call.add_remarketing_list_ids(value.unwrap_or("")); + call = call.add_remarketing_list_ids( value.map(|v| arg_from_str(v, err, "remarketing-list-ids", "int64")).unwrap_or(-0)); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "landing-page-ids" => { - call = call.add_landing_page_ids(value.unwrap_or("")); + call = call.add_landing_page_ids( value.map(|v| arg_from_str(v, err, "landing-page-ids", "int64")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dynamic-click-tracker" => { - call = call.dynamic_click_tracker(arg_from_str(value.unwrap_or("false"), err, "dynamic-click-tracker", "boolean")); + call = call.dynamic_click_tracker( value.map(|v| arg_from_str(v, err, "dynamic-click-tracker", "boolean")).unwrap_or(false)); }, "creative-optimization-configuration-ids" => { - call = call.add_creative_optimization_configuration_ids(value.unwrap_or("")); + call = call.add_creative_optimization_configuration_ids( value.map(|v| arg_from_str(v, err, "creative-optimization-configuration-ids", "int64")).unwrap_or(-0)); }, "creative-ids" => { - call = call.add_creative_ids(value.unwrap_or("")); + call = call.add_creative_ids( value.map(|v| arg_from_str(v, err, "creative-ids", "int64")).unwrap_or(-0)); }, "compatibility" => { call = call.compatibility(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "audience-segment-ids" => { - call = call.add_audience_segment_ids(value.unwrap_or("")); + call = call.add_audience_segment_ids( value.map(|v| arg_from_str(v, err, "audience-segment-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1932,10 +1931,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2309,7 +2308,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -2324,19 +2323,19 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2736,7 +2735,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "status" => { call = call.status(value.unwrap_or("")); @@ -2754,22 +2753,22 @@ where call = call.page_token(value.unwrap_or("")); }, "only-parent" => { - call = call.only_parent(arg_from_str(value.unwrap_or("false"), err, "only-parent", "boolean")); + call = call.only_parent( value.map(|v| arg_from_str(v, err, "only-parent", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-advertisers-without-groups-only" => { - call = call.include_advertisers_without_groups_only(arg_from_str(value.unwrap_or("false"), err, "include-advertisers-without-groups-only", "boolean")); + call = call.include_advertisers_without_groups_only( value.map(|v| arg_from_str(v, err, "include-advertisers-without-groups-only", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-ids" => { - call = call.add_floodlight_configuration_ids(value.unwrap_or("")); + call = call.add_floodlight_configuration_ids( value.map(|v| arg_from_str(v, err, "floodlight-configuration-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3193,7 +3192,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3427,7 +3426,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -3442,28 +3441,28 @@ where call = call.page_token(value.unwrap_or("")); }, "overridden-event-tag-id" => { - call = call.overridden_event_tag_id(value.unwrap_or("")); + call = call.overridden_event_tag_id( value.map(|v| arg_from_str(v, err, "overridden-event-tag-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "excluded-ids" => { - call = call.add_excluded_ids(value.unwrap_or("")); + call = call.add_excluded_ids( value.map(|v| arg_from_str(v, err, "excluded-ids", "int64")).unwrap_or(-0)); }, "at-least-one-optimization-activity" => { - call = call.at_least_one_optimization_activity(arg_from_str(value.unwrap_or("false"), err, "at-least-one-optimization-activity", "boolean")); + call = call.at_least_one_optimization_activity( value.map(|v| arg_from_str(v, err, "at-least-one-optimization-activity", "boolean")).unwrap_or(false)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, "advertiser-group-ids" => { - call = call.add_advertiser_group_ids(value.unwrap_or("")); + call = call.add_advertiser_group_ids( value.map(|v| arg_from_str(v, err, "advertiser-group-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3823,7 +3822,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "user-profile-ids" => { - call = call.add_user_profile_ids(value.unwrap_or("")); + call = call.add_user_profile_ids( value.map(|v| arg_from_str(v, err, "user-profile-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -3835,19 +3834,19 @@ where call = call.object_type(value.unwrap_or("")); }, "object-ids" => { - call = call.add_object_ids(value.unwrap_or("")); + call = call.add_object_ids( value.map(|v| arg_from_str(v, err, "object-ids", "int64")).unwrap_or(-0)); }, "min-change-time" => { call = call.min_change_time(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-change-time" => { call = call.max_change_time(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "action" => { call = call.action(value.unwrap_or("")); @@ -3906,16 +3905,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "region-dart-ids" => { - call = call.add_region_dart_ids(value.unwrap_or("")); + call = call.add_region_dart_ids( value.map(|v| arg_from_str(v, err, "region-dart-ids", "int64")).unwrap_or(-0)); }, "name-prefix" => { call = call.name_prefix(value.unwrap_or("")); }, "dart-ids" => { - call = call.add_dart_ids(value.unwrap_or("")); + call = call.add_dart_ids( value.map(|v| arg_from_str(v, err, "dart-ids", "int64")).unwrap_or(-0)); }, "country-dart-ids" => { - call = call.add_country_dart_ids(value.unwrap_or("")); + call = call.add_country_dart_ids( value.map(|v| arg_from_str(v, err, "country-dart-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4271,10 +4270,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5083,10 +5082,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5520,13 +5519,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5935,16 +5934,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-number" => { - call = call.group_number(arg_from_str(value.unwrap_or("-0"), err, "group-number", "integer")); + call = call.group_number( value.map(|v| arg_from_str(v, err, "group-number", "int32")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6425,7 +6424,7 @@ where call = call.add_types(value.unwrap_or("")); }, "studio-creative-id" => { - call = call.studio_creative_id(value.unwrap_or("")); + call = call.studio_creative_id( value.map(|v| arg_from_str(v, err, "studio-creative-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -6434,40 +6433,40 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); }, "rendering-ids" => { - call = call.add_rendering_ids(value.unwrap_or("")); + call = call.add_rendering_ids( value.map(|v| arg_from_str(v, err, "rendering-ids", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "creative-field-ids" => { - call = call.add_creative_field_ids(value.unwrap_or("")); + call = call.add_creative_field_ids( value.map(|v| arg_from_str(v, err, "creative-field-ids", "int64")).unwrap_or(-0)); }, "companion-creative-ids" => { - call = call.add_companion_creative_ids(value.unwrap_or("")); + call = call.add_companion_creative_ids( value.map(|v| arg_from_str(v, err, "companion-creative-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6914,7 +6913,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7138,25 +7137,25 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "dfp-network-code" => { call = call.dfp_network_code(value.unwrap_or("")); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7347,13 +7346,13 @@ where call = call.object_type(value.unwrap_or("")); }, "object-id" => { - call = call.object_id(value.unwrap_or("")); + call = call.object_id( value.map(|v| arg_from_str(v, err, "object-id", "int64")).unwrap_or(-0)); }, "names" => { call = call.add_names(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7626,25 +7625,25 @@ where call = call.search_string(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "event-tag-types" => { call = call.add_event_tag_types(value.unwrap_or("")); }, "enabled" => { - call = call.enabled(arg_from_str(value.unwrap_or("false"), err, "enabled", "boolean")); + call = call.enabled( value.map(|v| arg_from_str(v, err, "enabled", "boolean")).unwrap_or(false)); }, "definitions-only" => { - call = call.definitions_only(arg_from_str(value.unwrap_or("false"), err, "definitions-only", "boolean")); + call = call.definitions_only( value.map(|v| arg_from_str(v, err, "definitions-only", "boolean")).unwrap_or(false)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, "ad-id" => { - call = call.ad_id(value.unwrap_or("")); + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7998,7 +7997,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8098,7 +8097,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8347,13 +8346,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "floodlight-activity-group-type" => { call = call.floodlight_activity_group_type(value.unwrap_or("")); @@ -8365,10 +8364,10 @@ where call = call.floodlight_activity_group_name(value.unwrap_or("")); }, "floodlight-activity-group-ids" => { - call = call.add_floodlight_activity_group_ids(value.unwrap_or("")); + call = call.add_floodlight_activity_group_ids( value.map(|v| arg_from_str(v, err, "floodlight-activity-group-ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8854,16 +8853,16 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "floodlight-configuration-id" => { - call = call.floodlight_configuration_id(value.unwrap_or("")); + call = call.floodlight_configuration_id( value.map(|v| arg_from_str(v, err, "floodlight-configuration-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9193,7 +9192,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9546,22 +9545,22 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "in-plan" => { - call = call.in_plan(arg_from_str(value.unwrap_or("false"), err, "in-plan", "boolean")); + call = call.in_plan( value.map(|v| arg_from_str(v, err, "in-plan", "boolean")).unwrap_or(false)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9779,7 +9778,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { call = call.add_ids(value.unwrap_or("")); @@ -10211,7 +10210,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10220,16 +10219,16 @@ where call = call.page_token(value.unwrap_or("")); }, "order-id" => { - call = call.add_order_id(value.unwrap_or("")); + call = call.add_order_id( value.map(|v| arg_from_str(v, err, "order-id", "int64")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10343,7 +10342,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-id" => { - call = call.add_site_id(value.unwrap_or("")); + call = call.add_site_id( value.map(|v| arg_from_str(v, err, "site-id", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10352,10 +10351,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10615,7 +10614,7 @@ where call = call.sort_field(value.unwrap_or("")); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -10624,7 +10623,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "placement-group-type" => { call = call.placement_group_type(value.unwrap_or("")); @@ -10642,28 +10641,28 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11207,10 +11206,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11445,10 +11444,10 @@ where call = call.add_tag_formats(value.unwrap_or("")); }, "placement-ids" => { - call = call.add_placement_ids(value.unwrap_or("")); + call = call.add_placement_ids( value.map(|v| arg_from_str(v, err, "placement-ids", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11761,10 +11760,10 @@ where call = call.sort_field(value.unwrap_or("")); }, "size-ids" => { - call = call.add_size_ids(value.unwrap_or("")); + call = call.add_size_ids( value.map(|v| arg_from_str(v, err, "size-ids", "int64")).unwrap_or(-0)); }, "site-ids" => { - call = call.add_site_ids(value.unwrap_or("")); + call = call.add_site_ids( value.map(|v| arg_from_str(v, err, "site-ids", "int64")).unwrap_or(-0)); }, "search-string" => { call = call.search_string(value.unwrap_or("")); @@ -11773,7 +11772,7 @@ where call = call.add_pricing_types(value.unwrap_or("")); }, "placement-strategy-ids" => { - call = call.add_placement_strategy_ids(value.unwrap_or("")); + call = call.add_placement_strategy_ids( value.map(|v| arg_from_str(v, err, "placement-strategy-ids", "int64")).unwrap_or(-0)); }, "payment-source" => { call = call.payment_source(value.unwrap_or("")); @@ -11791,34 +11790,34 @@ where call = call.max_start_date(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "max-end-date" => { call = call.max_end_date(value.unwrap_or("")); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "group-ids" => { - call = call.add_group_ids(value.unwrap_or("")); + call = call.add_group_ids( value.map(|v| arg_from_str(v, err, "group-ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "content-category-ids" => { - call = call.add_content_category_ids(value.unwrap_or("")); + call = call.add_content_category_ids( value.map(|v| arg_from_str(v, err, "content-category-ids", "int64")).unwrap_or(-0)); }, "compatibilities" => { call = call.add_compatibilities(value.unwrap_or("")); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "archived" => { - call = call.archived(arg_from_str(value.unwrap_or("false"), err, "archived", "boolean")); + call = call.archived( value.map(|v| arg_from_str(v, err, "archived", "boolean")).unwrap_or(false)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12544,13 +12543,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-ids" => { - call = call.add_advertiser_ids(value.unwrap_or("")); + call = call.add_advertiser_ids( value.map(|v| arg_from_str(v, err, "advertiser-ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13053,13 +13052,13 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "floodlight-activity-id" => { - call = call.floodlight_activity_id(value.unwrap_or("")); + call = call.floodlight_activity_id( value.map(|v| arg_from_str(v, err, "floodlight-activity-id", "int64")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -13629,7 +13628,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13942,7 +13941,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14191,7 +14190,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "synchronous" => { - call = call.synchronous(arg_from_str(value.unwrap_or("false"), err, "synchronous", "boolean")); + call = call.synchronous( value.map(|v| arg_from_str(v, err, "synchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -14631,10 +14630,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unmapped-site" => { - call = call.unmapped_site(arg_from_str(value.unwrap_or("false"), err, "unmapped-site", "boolean")); + call = call.unmapped_site( value.map(|v| arg_from_str(v, err, "unmapped-site", "boolean")).unwrap_or(false)); }, "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -14649,31 +14648,31 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "directory-site-ids" => { - call = call.add_directory_site_ids(value.unwrap_or("")); + call = call.add_directory_site_ids( value.map(|v| arg_from_str(v, err, "directory-site-ids", "int64")).unwrap_or(-0)); }, "campaign-ids" => { - call = call.add_campaign_ids(value.unwrap_or("")); + call = call.add_campaign_ids( value.map(|v| arg_from_str(v, err, "campaign-ids", "int64")).unwrap_or(-0)); }, "approved" => { - call = call.approved(arg_from_str(value.unwrap_or("false"), err, "approved", "boolean")); + call = call.approved( value.map(|v| arg_from_str(v, err, "approved", "boolean")).unwrap_or(false)); }, "ad-words-site" => { - call = call.ad_words_site(arg_from_str(value.unwrap_or("false"), err, "ad-words-site", "boolean")); + call = call.ad_words_site( value.map(|v| arg_from_str(v, err, "ad-words-site", "boolean")).unwrap_or(false)); }, "accepts-publisher-paid-placements" => { - call = call.accepts_publisher_paid_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-publisher-paid-placements", "boolean")); + call = call.accepts_publisher_paid_placements( value.map(|v| arg_from_str(v, err, "accepts-publisher-paid-placements", "boolean")).unwrap_or(false)); }, "accepts-interstitial-placements" => { - call = call.accepts_interstitial_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-interstitial-placements", "boolean")); + call = call.accepts_interstitial_placements( value.map(|v| arg_from_str(v, err, "accepts-interstitial-placements", "boolean")).unwrap_or(false)); }, "accepts-in-stream-video-placements" => { - call = call.accepts_in_stream_video_placements(arg_from_str(value.unwrap_or("false"), err, "accepts-in-stream-video-placements", "boolean")); + call = call.accepts_in_stream_video_placements( value.map(|v| arg_from_str(v, err, "accepts-in-stream-video-placements", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15148,16 +15147,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "width" => { - call = call.width(arg_from_str(value.unwrap_or("-0"), err, "width", "integer")); + call = call.width( value.map(|v| arg_from_str(v, err, "width", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "iab-standard" => { - call = call.iab_standard(arg_from_str(value.unwrap_or("false"), err, "iab-standard", "boolean")); + call = call.iab_standard( value.map(|v| arg_from_str(v, err, "iab-standard", "boolean")).unwrap_or(false)); }, "height" => { - call = call.height(arg_from_str(value.unwrap_or("-0"), err, "height", "integer")); + call = call.height( value.map(|v| arg_from_str(v, err, "height", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15366,10 +15365,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15667,10 +15666,10 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "active" => { - call = call.active(arg_from_str(value.unwrap_or("false"), err, "active", "boolean")); + call = call.active( value.map(|v| arg_from_str(v, err, "active", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15892,13 +15891,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16418,7 +16417,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -16661,7 +16660,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "subaccount-id" => { - call = call.subaccount_id(value.unwrap_or("")); + call = call.subaccount_id( value.map(|v| arg_from_str(v, err, "subaccount-id", "int64")).unwrap_or(-0)); }, "sort-order" => { call = call.sort_order(value.unwrap_or("")); @@ -16676,13 +16675,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "ids" => { - call = call.add_ids(value.unwrap_or("")); + call = call.add_ids( value.map(|v| arg_from_str(v, err, "ids", "int64")).unwrap_or(-0)); }, "account-user-role-only" => { - call = call.account_user_role_only(arg_from_str(value.unwrap_or("false"), err, "account-user-role-only", "boolean")); + call = call.account_user_role_only( value.map(|v| arg_from_str(v, err, "account-user-role-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -23959,7 +23958,7 @@ async fn main() { let mut app = App::new("dfareporting3d5") .author("Sebastian Thiel ") - .version("4.0.1+20220104") + .version("5.0.2+20230118") .about("Build applications to efficiently manage large or complex trafficking, reporting, and attribution workflows for Campaign Manager 360.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dfareporting3d5_cli") .arg(Arg::with_name("url") diff --git a/gen/dfareporting3d5/Cargo.toml b/gen/dfareporting3d5/Cargo.toml index f5b7facf7e..097aa98894 100644 --- a/gen/dfareporting3d5/Cargo.toml +++ b/gen/dfareporting3d5/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dfareporting3d5" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dfareporting (protocol v3.5)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dfareporting3d5" homepage = "https://developers.google.com/doubleclick-advertisers/" -documentation = "https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-dfareporting3d5/5.0.2+20230118" license = "MIT" keywords = ["dfareporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dfareporting3d5/README.md b/gen/dfareporting3d5/README.md index 9807e9b815..3ce436110a 100644 --- a/gen/dfareporting3d5/README.md +++ b/gen/dfareporting3d5/README.md @@ -5,144 +5,144 @@ DO NOT EDIT ! --> The `google-dfareporting3d5` library allows access to all features of the *Google Dfareporting* service. -This documentation was generated from *Dfareporting* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dfareporting* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dfareporting* *v3d5* API can be found at the [official documentation site](https://developers.google.com/doubleclick-advertisers/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/Dfareporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/Dfareporting) ... -* [account active ad summaries](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountActiveAdSummary) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountActiveAdSummaryGetCall) -* [account permission groups](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountPermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountPermissionGroupListCall) -* [account permissions](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountPermission) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountPermissionListCall) -* [account user profiles](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountUserProfile) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountUserProfileUpdateCall) -* [accounts](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Account) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AccountUpdateCall) -* [ads](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Ad) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdUpdateCall) -* [advertiser groups](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGroup) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGroupUpdateCall) +* [account active ad summaries](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountActiveAdSummary) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountActiveAdSummaryGetCall) +* [account permission groups](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountPermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountPermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountPermissionGroupListCall) +* [account permissions](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountPermission) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountPermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountPermissionListCall) +* [account user profiles](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountUserProfile) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountUserProfileGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountUserProfileInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountUserProfileListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountUserProfilePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountUserProfileUpdateCall) +* [accounts](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Account) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountGetCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AccountUpdateCall) +* [ads](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Ad) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdUpdateCall) +* [advertiser groups](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGroup) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGroupDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGroupUpdateCall) * advertiser landing pages - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserLandingPageUpdateCall) -* [advertisers](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Advertiser) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::AdvertiserUpdateCall) -* [browsers](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Browser) - * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::BrowserListCall) -* [campaign creative associations](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignCreativeAssociation) - * [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignCreativeAssociationListCall) -* [campaigns](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Campaign) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CampaignUpdateCall) -* [change logs](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ChangeLog) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ChangeLogListCall) -* [cities](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::City) - * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CityListCall) -* [connection types](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ConnectionType) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ConnectionTypeListCall) -* [content categories](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ContentCategory) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ContentCategoryUpdateCall) -* [conversions](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Conversion) - * [*batchinsert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ConversionBatchupdateCall) -* [countries](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Country) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CountryListCall) -* [creative assets](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeAsset) - * [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeAssetInsertCall) -* [creative field values](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldValue) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldValueUpdateCall) -* [creative fields](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeField) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeFieldUpdateCall) -* [creative groups](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeGroup) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeGroupUpdateCall) -* [creatives](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Creative) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeUpdateCall) -* [dimension values](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DimensionValue) - * [*query*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DimensionValueQueryCall) -* [directory sites](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DirectorySite) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DirectorySiteListCall) -* [dynamic targeting keys](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DynamicTargetingKey) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::DynamicTargetingKeyListCall) -* [event tags](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::EventTag) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::EventTagUpdateCall) -* [files](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::File) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FileListCall) -* [floodlight activities](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivity) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityUpdateCall) -* [floodlight activity groups](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGroup) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightActivityGroupUpdateCall) -* [floodlight configurations](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightConfiguration) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FloodlightConfigurationUpdateCall) -* [inventory items](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::InventoryItem) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::InventoryItemListCall) -* [languages](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Language) - * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::LanguageListCall) -* [metros](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Metro) - * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::MetroListCall) -* [mobile apps](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::MobileApp) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::MobileAppListCall) -* [mobile carriers](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::MobileCarrier) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::MobileCarrierListCall) -* [operating system versions](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OperatingSystemVersion) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OperatingSystemVersionListCall) -* [operating systems](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OperatingSystem) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OperatingSystemListCall) -* [order documents](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OrderDocument) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OrderDocumentListCall) -* [orders](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Order) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::OrderListCall) -* [placement groups](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGroup) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGroupUpdateCall) -* [placement strategies](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementStrategy) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementStrategyUpdateCall) -* [placements](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Placement) - * [*generatetags*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlacementUpdateCall) -* [platform types](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlatformType) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PlatformTypeListCall) -* [postal codes](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PostalCode) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::PostalCodeListCall) -* [projects](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Project) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ProjectListCall) -* [regions](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Region) - * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RegionListCall) -* [remarketing list shares](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListShare) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListShareUpdateCall) -* [remarketing lists](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::RemarketingListUpdateCall) -* [reports](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Report) - * [*compatible fields query*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportUpdateCall) -* [sites](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Site) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SiteUpdateCall) -* [sizes](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Size) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SizeListCall) -* [subaccounts](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::Subaccount) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::SubaccountUpdateCall) -* [targetable remarketing lists](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetableRemarketingList) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetableRemarketingListListCall) -* [targeting templates](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetingTemplate) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::TargetingTemplateUpdateCall) -* [user profiles](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserProfile) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserProfileListCall) -* [user role permission groups](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRolePermissionGroup) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRolePermissionGroupListCall) -* [user role permissions](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRolePermission) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRolePermissionListCall) -* [user roles](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRole) - * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::UserRoleUpdateCall) -* [video formats](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::VideoFormat) - * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::VideoFormatListCall) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserLandingPageGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserLandingPageInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserLandingPageListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserLandingPagePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserLandingPageUpdateCall) +* [advertisers](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Advertiser) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::AdvertiserUpdateCall) +* [browsers](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Browser) + * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::BrowserListCall) +* [campaign creative associations](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignCreativeAssociation) + * [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignCreativeAssociationInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignCreativeAssociationListCall) +* [campaigns](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Campaign) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CampaignUpdateCall) +* [change logs](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ChangeLog) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ChangeLogGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ChangeLogListCall) +* [cities](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::City) + * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CityListCall) +* [connection types](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ConnectionType) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ConnectionTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ConnectionTypeListCall) +* [content categories](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ContentCategory) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ContentCategoryDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ContentCategoryGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ContentCategoryInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ContentCategoryListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ContentCategoryPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ContentCategoryUpdateCall) +* [conversions](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Conversion) + * [*batchinsert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ConversionBatchinsertCall) and [*batchupdate*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ConversionBatchupdateCall) +* [countries](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Country) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CountryGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CountryListCall) +* [creative assets](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeAsset) + * [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeAssetInsertCall) +* [creative field values](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldValue) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldValueDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldValueGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldValueInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldValueListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldValuePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldValueUpdateCall) +* [creative fields](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeField) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeFieldUpdateCall) +* [creative groups](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeGroup) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeGroupUpdateCall) +* [creatives](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Creative) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeUpdateCall) +* [dimension values](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DimensionValue) + * [*query*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DimensionValueQueryCall) +* [directory sites](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DirectorySite) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DirectorySiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DirectorySiteInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DirectorySiteListCall) +* [dynamic targeting keys](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DynamicTargetingKey) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DynamicTargetingKeyDeleteCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DynamicTargetingKeyInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::DynamicTargetingKeyListCall) +* [event tags](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::EventTag) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::EventTagDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::EventTagGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::EventTagInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::EventTagListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::EventTagPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::EventTagUpdateCall) +* [files](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::File) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FileGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FileListCall) +* [floodlight activities](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivity) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityDeleteCall), [*generatetag*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityUpdateCall) +* [floodlight activity groups](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGroup) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightActivityGroupUpdateCall) +* [floodlight configurations](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightConfiguration) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightConfigurationGetCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightConfigurationListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightConfigurationPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FloodlightConfigurationUpdateCall) +* [inventory items](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::InventoryItem) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::InventoryItemGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::InventoryItemListCall) +* [languages](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Language) + * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::LanguageListCall) +* [metros](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Metro) + * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::MetroListCall) +* [mobile apps](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::MobileApp) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::MobileAppGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::MobileAppListCall) +* [mobile carriers](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::MobileCarrier) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::MobileCarrierGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::MobileCarrierListCall) +* [operating system versions](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OperatingSystemVersion) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OperatingSystemVersionGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OperatingSystemVersionListCall) +* [operating systems](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OperatingSystem) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OperatingSystemGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OperatingSystemListCall) +* [order documents](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OrderDocument) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OrderDocumentGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OrderDocumentListCall) +* [orders](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Order) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OrderGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::OrderListCall) +* [placement groups](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGroup) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGroupGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGroupInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGroupListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGroupPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGroupUpdateCall) +* [placement strategies](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementStrategy) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementStrategyDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementStrategyGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementStrategyInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementStrategyListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementStrategyPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementStrategyUpdateCall) +* [placements](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Placement) + * [*generatetags*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGeneratetagCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlacementUpdateCall) +* [platform types](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlatformType) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlatformTypeGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PlatformTypeListCall) +* [postal codes](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PostalCode) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PostalCodeGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::PostalCodeListCall) +* [projects](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Project) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ProjectGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ProjectListCall) +* [regions](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Region) + * [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RegionListCall) +* [remarketing list shares](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListShare) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListShareGetCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListSharePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListShareUpdateCall) +* [remarketing lists](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::RemarketingListUpdateCall) +* [reports](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Report) + * [*compatible fields query*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportCompatibleFieldQueryCall), [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportDeleteCall), [*files get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportFileGetCall), [*files list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportFileListCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportPatchCall), [*run*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportRunCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportUpdateCall) +* [sites](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Site) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SiteGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SiteInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SiteListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SitePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SiteUpdateCall) +* [sizes](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Size) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SizeGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SizeInsertCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SizeListCall) +* [subaccounts](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::Subaccount) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SubaccountGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SubaccountInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SubaccountListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SubaccountPatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::SubaccountUpdateCall) +* [targetable remarketing lists](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetableRemarketingList) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetableRemarketingListGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetableRemarketingListListCall) +* [targeting templates](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetingTemplate) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetingTemplateGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetingTemplateInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetingTemplateListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetingTemplatePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::TargetingTemplateUpdateCall) +* [user profiles](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserProfile) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserProfileGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserProfileListCall) +* [user role permission groups](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRolePermissionGroup) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRolePermissionGroupGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRolePermissionGroupListCall) +* [user role permissions](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRolePermission) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRolePermissionGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRolePermissionListCall) +* [user roles](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRole) + * [*delete*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRoleDeleteCall), [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRoleGetCall), [*insert*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRoleInsertCall), [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRoleListCall), [*patch*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRolePatchCall) and [*update*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::UserRoleUpdateCall) +* [video formats](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::VideoFormat) + * [*get*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::VideoFormatGetCall) and [*list*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::VideoFormatListCall) Upload supported by ... -* [*insert creative assets*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::CreativeAssetInsertCall) +* [*insert creative assets*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::CreativeAssetInsertCall) Download supported by ... -* [*get files*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::FileGetCall) -* [*files get reports*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/api::ReportFileGetCall) +* [*get files*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::FileGetCall) +* [*files get reports*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/api::ReportFileGetCall) @@ -150,17 +150,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/Dfareporting)** +* **[Hub](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/Dfareporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::CallBuilder) -* **[Resources](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::CallBuilder) +* **[Resources](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Part)** + * **[Parts](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -258,17 +258,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -278,29 +278,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Delegate) to the -[Method Builder](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Delegate) to the +[Method Builder](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::RequestValue) and -[decodable](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::RequestValue) and +[decodable](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dfareporting3d5/5.0.2-beta-1+20230118/google_dfareporting3d5/client::RequestValue) are moved +* [request values](https://docs.rs/google-dfareporting3d5/5.0.2+20230118/google_dfareporting3d5/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dfareporting3d5/src/api.rs b/gen/dfareporting3d5/src/api.rs index e24d791030..8e5a10b828 100644 --- a/gen/dfareporting3d5/src/api.rs +++ b/gen/dfareporting3d5/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> Dfareporting { Dfareporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dfareporting.googleapis.com/dfareporting/v3.5/".to_string(), _root_url: "https://dfareporting.googleapis.com/".to_string(), } @@ -319,7 +319,7 @@ impl<'a, S> Dfareporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dfareporting3d5/src/client.rs b/gen/dfareporting3d5/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dfareporting3d5/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dfareporting3d5/src/lib.rs b/gen/dfareporting3d5/src/lib.rs index 959231137d..04ac1d2a17 100644 --- a/gen/dfareporting3d5/src/lib.rs +++ b/gen/dfareporting3d5/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dfareporting* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dfareporting* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *dfareporting:v3.5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dfareporting* *v3d5* API can be found at the //! [official documentation site](https://developers.google.com/doubleclick-advertisers/). diff --git a/gen/dialogflow2-cli/Cargo.toml b/gen/dialogflow2-cli/Cargo.toml index 3ce3b21d48..14f9539d02 100644 --- a/gen/dialogflow2-cli/Cargo.toml +++ b/gen/dialogflow2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dialogflow2-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dialogflow (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow2-cli" @@ -20,13 +20,13 @@ name = "dialogflow2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dialogflow2] path = "../dialogflow2" -version = "4.0.1+20220228" +version = "5.0.2+20230110" + diff --git a/gen/dialogflow2-cli/README.md b/gen/dialogflow2-cli/README.md index 4e15cad318..d16048fdbf 100644 --- a/gen/dialogflow2-cli/README.md +++ b/gen/dialogflow2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dialogflow* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dialogflow* API at revision *20230110*. The CLI is at version *5.0.2*. ```bash dialogflow2 [options] @@ -135,6 +135,7 @@ dialogflow2 [options] conversations-participants-suggestions-suggest-articles (-r )... [-p ]... [-o ] conversations-participants-suggestions-suggest-faq-answers (-r )... [-p ]... [-o ] conversations-participants-suggestions-suggest-smart-replies (-r )... [-p ]... [-o ] + conversations-suggestions-suggest-conversation-summary (-r )... [-p ]... [-o ] delete-agent [-p ]... [-o ] get-agent [-p ]... [-o ] knowledge-bases-create (-r )... [-p ]... [-o ] @@ -247,6 +248,7 @@ dialogflow2 [options] locations-conversations-participants-suggestions-suggest-articles (-r )... [-p ]... [-o ] locations-conversations-participants-suggestions-suggest-faq-answers (-r )... [-p ]... [-o ] locations-conversations-participants-suggestions-suggest-smart-replies (-r )... [-p ]... [-o ] + locations-conversations-suggestions-suggest-conversation-summary (-r )... [-p ]... [-o ] locations-delete-agent [-p ]... [-o ] locations-get [-p ]... [-o ] locations-get-agent [-p ]... [-o ] diff --git a/gen/dialogflow2-cli/mkdocs.yml b/gen/dialogflow2-cli/mkdocs.yml index d91c38a51d..67eb9312e9 100644 --- a/gen/dialogflow2-cli/mkdocs.yml +++ b/gen/dialogflow2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dialogflow v4.0.1+20220228 +site_name: Dialogflow v5.0.2+20230110 site_url: http://byron.github.io/google-apis-rs/google-dialogflow2-cli site_description: A complete library to interact with Dialogflow (protocol v2) @@ -7,250 +7,253 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_agent-entity-types-batch-delete.md', 'Projects', 'Agent Entity Types Batch Delete'] -- ['projects_agent-entity-types-batch-update.md', 'Projects', 'Agent Entity Types Batch Update'] -- ['projects_agent-entity-types-create.md', 'Projects', 'Agent Entity Types Create'] -- ['projects_agent-entity-types-delete.md', 'Projects', 'Agent Entity Types Delete'] -- ['projects_agent-entity-types-entities-batch-create.md', 'Projects', 'Agent Entity Types Entities Batch Create'] -- ['projects_agent-entity-types-entities-batch-delete.md', 'Projects', 'Agent Entity Types Entities Batch Delete'] -- ['projects_agent-entity-types-entities-batch-update.md', 'Projects', 'Agent Entity Types Entities Batch Update'] -- ['projects_agent-entity-types-get.md', 'Projects', 'Agent Entity Types Get'] -- ['projects_agent-entity-types-list.md', 'Projects', 'Agent Entity Types List'] -- ['projects_agent-entity-types-patch.md', 'Projects', 'Agent Entity Types Patch'] -- ['projects_agent-environments-create.md', 'Projects', 'Agent Environments Create'] -- ['projects_agent-environments-delete.md', 'Projects', 'Agent Environments Delete'] -- ['projects_agent-environments-get.md', 'Projects', 'Agent Environments Get'] -- ['projects_agent-environments-get-history.md', 'Projects', 'Agent Environments Get History'] -- ['projects_agent-environments-intents-list.md', 'Projects', 'Agent Environments Intents List'] -- ['projects_agent-environments-list.md', 'Projects', 'Agent Environments List'] -- ['projects_agent-environments-patch.md', 'Projects', 'Agent Environments Patch'] -- ['projects_agent-environments-users-sessions-contexts-create.md', 'Projects', 'Agent Environments Users Sessions Contexts Create'] -- ['projects_agent-environments-users-sessions-contexts-delete.md', 'Projects', 'Agent Environments Users Sessions Contexts Delete'] -- ['projects_agent-environments-users-sessions-contexts-get.md', 'Projects', 'Agent Environments Users Sessions Contexts Get'] -- ['projects_agent-environments-users-sessions-contexts-list.md', 'Projects', 'Agent Environments Users Sessions Contexts List'] -- ['projects_agent-environments-users-sessions-contexts-patch.md', 'Projects', 'Agent Environments Users Sessions Contexts Patch'] -- ['projects_agent-environments-users-sessions-delete-contexts.md', 'Projects', 'Agent Environments Users Sessions Delete Contexts'] -- ['projects_agent-environments-users-sessions-detect-intent.md', 'Projects', 'Agent Environments Users Sessions Detect Intent'] -- ['projects_agent-environments-users-sessions-entity-types-create.md', 'Projects', 'Agent Environments Users Sessions Entity Types Create'] -- ['projects_agent-environments-users-sessions-entity-types-delete.md', 'Projects', 'Agent Environments Users Sessions Entity Types Delete'] -- ['projects_agent-environments-users-sessions-entity-types-get.md', 'Projects', 'Agent Environments Users Sessions Entity Types Get'] -- ['projects_agent-environments-users-sessions-entity-types-list.md', 'Projects', 'Agent Environments Users Sessions Entity Types List'] -- ['projects_agent-environments-users-sessions-entity-types-patch.md', 'Projects', 'Agent Environments Users Sessions Entity Types Patch'] -- ['projects_agent-export.md', 'Projects', 'Agent Export'] -- ['projects_agent-get-fulfillment.md', 'Projects', 'Agent Get Fulfillment'] -- ['projects_agent-get-validation-result.md', 'Projects', 'Agent Get Validation Result'] -- ['projects_agent-import.md', 'Projects', 'Agent Import'] -- ['projects_agent-intents-batch-delete.md', 'Projects', 'Agent Intents Batch Delete'] -- ['projects_agent-intents-batch-update.md', 'Projects', 'Agent Intents Batch Update'] -- ['projects_agent-intents-create.md', 'Projects', 'Agent Intents Create'] -- ['projects_agent-intents-delete.md', 'Projects', 'Agent Intents Delete'] -- ['projects_agent-intents-get.md', 'Projects', 'Agent Intents Get'] -- ['projects_agent-intents-list.md', 'Projects', 'Agent Intents List'] -- ['projects_agent-intents-patch.md', 'Projects', 'Agent Intents Patch'] -- ['projects_agent-knowledge-bases-create.md', 'Projects', 'Agent Knowledge Bases Create'] -- ['projects_agent-knowledge-bases-delete.md', 'Projects', 'Agent Knowledge Bases Delete'] -- ['projects_agent-knowledge-bases-documents-create.md', 'Projects', 'Agent Knowledge Bases Documents Create'] -- ['projects_agent-knowledge-bases-documents-delete.md', 'Projects', 'Agent Knowledge Bases Documents Delete'] -- ['projects_agent-knowledge-bases-documents-get.md', 'Projects', 'Agent Knowledge Bases Documents Get'] -- ['projects_agent-knowledge-bases-documents-list.md', 'Projects', 'Agent Knowledge Bases Documents List'] -- ['projects_agent-knowledge-bases-documents-patch.md', 'Projects', 'Agent Knowledge Bases Documents Patch'] -- ['projects_agent-knowledge-bases-documents-reload.md', 'Projects', 'Agent Knowledge Bases Documents Reload'] -- ['projects_agent-knowledge-bases-get.md', 'Projects', 'Agent Knowledge Bases Get'] -- ['projects_agent-knowledge-bases-list.md', 'Projects', 'Agent Knowledge Bases List'] -- ['projects_agent-knowledge-bases-patch.md', 'Projects', 'Agent Knowledge Bases Patch'] -- ['projects_agent-restore.md', 'Projects', 'Agent Restore'] -- ['projects_agent-search.md', 'Projects', 'Agent Search'] -- ['projects_agent-sessions-contexts-create.md', 'Projects', 'Agent Sessions Contexts Create'] -- ['projects_agent-sessions-contexts-delete.md', 'Projects', 'Agent Sessions Contexts Delete'] -- ['projects_agent-sessions-contexts-get.md', 'Projects', 'Agent Sessions Contexts Get'] -- ['projects_agent-sessions-contexts-list.md', 'Projects', 'Agent Sessions Contexts List'] -- ['projects_agent-sessions-contexts-patch.md', 'Projects', 'Agent Sessions Contexts Patch'] -- ['projects_agent-sessions-delete-contexts.md', 'Projects', 'Agent Sessions Delete Contexts'] -- ['projects_agent-sessions-detect-intent.md', 'Projects', 'Agent Sessions Detect Intent'] -- ['projects_agent-sessions-entity-types-create.md', 'Projects', 'Agent Sessions Entity Types Create'] -- ['projects_agent-sessions-entity-types-delete.md', 'Projects', 'Agent Sessions Entity Types Delete'] -- ['projects_agent-sessions-entity-types-get.md', 'Projects', 'Agent Sessions Entity Types Get'] -- ['projects_agent-sessions-entity-types-list.md', 'Projects', 'Agent Sessions Entity Types List'] -- ['projects_agent-sessions-entity-types-patch.md', 'Projects', 'Agent Sessions Entity Types Patch'] -- ['projects_agent-train.md', 'Projects', 'Agent Train'] -- ['projects_agent-update-fulfillment.md', 'Projects', 'Agent Update Fulfillment'] -- ['projects_agent-versions-create.md', 'Projects', 'Agent Versions Create'] -- ['projects_agent-versions-delete.md', 'Projects', 'Agent Versions Delete'] -- ['projects_agent-versions-get.md', 'Projects', 'Agent Versions Get'] -- ['projects_agent-versions-list.md', 'Projects', 'Agent Versions List'] -- ['projects_agent-versions-patch.md', 'Projects', 'Agent Versions Patch'] -- ['projects_answer-records-list.md', 'Projects', 'Answer Records List'] -- ['projects_answer-records-patch.md', 'Projects', 'Answer Records Patch'] -- ['projects_conversation-datasets-get.md', 'Projects', 'Conversation Datasets Get'] -- ['projects_conversation-datasets-import-conversation-data.md', 'Projects', 'Conversation Datasets Import Conversation Data'] -- ['projects_conversation-datasets-list.md', 'Projects', 'Conversation Datasets List'] -- ['projects_conversation-models-create.md', 'Projects', 'Conversation Models Create'] -- ['projects_conversation-models-delete.md', 'Projects', 'Conversation Models Delete'] -- ['projects_conversation-models-deploy.md', 'Projects', 'Conversation Models Deploy'] -- ['projects_conversation-models-evaluations-get.md', 'Projects', 'Conversation Models Evaluations Get'] -- ['projects_conversation-models-evaluations-list.md', 'Projects', 'Conversation Models Evaluations List'] -- ['projects_conversation-models-get.md', 'Projects', 'Conversation Models Get'] -- ['projects_conversation-models-list.md', 'Projects', 'Conversation Models List'] -- ['projects_conversation-models-undeploy.md', 'Projects', 'Conversation Models Undeploy'] -- ['projects_conversation-profiles-clear-suggestion-feature-config.md', 'Projects', 'Conversation Profiles Clear Suggestion Feature Config'] -- ['projects_conversation-profiles-create.md', 'Projects', 'Conversation Profiles Create'] -- ['projects_conversation-profiles-delete.md', 'Projects', 'Conversation Profiles Delete'] -- ['projects_conversation-profiles-get.md', 'Projects', 'Conversation Profiles Get'] -- ['projects_conversation-profiles-list.md', 'Projects', 'Conversation Profiles List'] -- ['projects_conversation-profiles-patch.md', 'Projects', 'Conversation Profiles Patch'] -- ['projects_conversation-profiles-set-suggestion-feature-config.md', 'Projects', 'Conversation Profiles Set Suggestion Feature Config'] -- ['projects_conversations-complete.md', 'Projects', 'Conversations Complete'] -- ['projects_conversations-create.md', 'Projects', 'Conversations Create'] -- ['projects_conversations-get.md', 'Projects', 'Conversations Get'] -- ['projects_conversations-list.md', 'Projects', 'Conversations List'] -- ['projects_conversations-messages-list.md', 'Projects', 'Conversations Messages List'] -- ['projects_conversations-participants-analyze-content.md', 'Projects', 'Conversations Participants Analyze Content'] -- ['projects_conversations-participants-create.md', 'Projects', 'Conversations Participants Create'] -- ['projects_conversations-participants-get.md', 'Projects', 'Conversations Participants Get'] -- ['projects_conversations-participants-list.md', 'Projects', 'Conversations Participants List'] -- ['projects_conversations-participants-patch.md', 'Projects', 'Conversations Participants Patch'] -- ['projects_conversations-participants-suggestions-suggest-articles.md', 'Projects', 'Conversations Participants Suggestions Suggest Articles'] -- ['projects_conversations-participants-suggestions-suggest-faq-answers.md', 'Projects', 'Conversations Participants Suggestions Suggest Faq Answers'] -- ['projects_conversations-participants-suggestions-suggest-smart-replies.md', 'Projects', 'Conversations Participants Suggestions Suggest Smart Replies'] -- ['projects_delete-agent.md', 'Projects', 'Delete Agent'] -- ['projects_get-agent.md', 'Projects', 'Get Agent'] -- ['projects_knowledge-bases-create.md', 'Projects', 'Knowledge Bases Create'] -- ['projects_knowledge-bases-delete.md', 'Projects', 'Knowledge Bases Delete'] -- ['projects_knowledge-bases-documents-create.md', 'Projects', 'Knowledge Bases Documents Create'] -- ['projects_knowledge-bases-documents-delete.md', 'Projects', 'Knowledge Bases Documents Delete'] -- ['projects_knowledge-bases-documents-export.md', 'Projects', 'Knowledge Bases Documents Export'] -- ['projects_knowledge-bases-documents-get.md', 'Projects', 'Knowledge Bases Documents Get'] -- ['projects_knowledge-bases-documents-import.md', 'Projects', 'Knowledge Bases Documents Import'] -- ['projects_knowledge-bases-documents-list.md', 'Projects', 'Knowledge Bases Documents List'] -- ['projects_knowledge-bases-documents-patch.md', 'Projects', 'Knowledge Bases Documents Patch'] -- ['projects_knowledge-bases-documents-reload.md', 'Projects', 'Knowledge Bases Documents Reload'] -- ['projects_knowledge-bases-get.md', 'Projects', 'Knowledge Bases Get'] -- ['projects_knowledge-bases-list.md', 'Projects', 'Knowledge Bases List'] -- ['projects_knowledge-bases-patch.md', 'Projects', 'Knowledge Bases Patch'] -- ['projects_locations-agent-entity-types-batch-delete.md', 'Projects', 'Locations Agent Entity Types Batch Delete'] -- ['projects_locations-agent-entity-types-batch-update.md', 'Projects', 'Locations Agent Entity Types Batch Update'] -- ['projects_locations-agent-entity-types-create.md', 'Projects', 'Locations Agent Entity Types Create'] -- ['projects_locations-agent-entity-types-delete.md', 'Projects', 'Locations Agent Entity Types Delete'] -- ['projects_locations-agent-entity-types-entities-batch-create.md', 'Projects', 'Locations Agent Entity Types Entities Batch Create'] -- ['projects_locations-agent-entity-types-entities-batch-delete.md', 'Projects', 'Locations Agent Entity Types Entities Batch Delete'] -- ['projects_locations-agent-entity-types-entities-batch-update.md', 'Projects', 'Locations Agent Entity Types Entities Batch Update'] -- ['projects_locations-agent-entity-types-get.md', 'Projects', 'Locations Agent Entity Types Get'] -- ['projects_locations-agent-entity-types-list.md', 'Projects', 'Locations Agent Entity Types List'] -- ['projects_locations-agent-entity-types-patch.md', 'Projects', 'Locations Agent Entity Types Patch'] -- ['projects_locations-agent-environments-create.md', 'Projects', 'Locations Agent Environments Create'] -- ['projects_locations-agent-environments-delete.md', 'Projects', 'Locations Agent Environments Delete'] -- ['projects_locations-agent-environments-get.md', 'Projects', 'Locations Agent Environments Get'] -- ['projects_locations-agent-environments-get-history.md', 'Projects', 'Locations Agent Environments Get History'] -- ['projects_locations-agent-environments-intents-list.md', 'Projects', 'Locations Agent Environments Intents List'] -- ['projects_locations-agent-environments-list.md', 'Projects', 'Locations Agent Environments List'] -- ['projects_locations-agent-environments-patch.md', 'Projects', 'Locations Agent Environments Patch'] -- ['projects_locations-agent-environments-users-sessions-contexts-create.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Create'] -- ['projects_locations-agent-environments-users-sessions-contexts-delete.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Delete'] -- ['projects_locations-agent-environments-users-sessions-contexts-get.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Get'] -- ['projects_locations-agent-environments-users-sessions-contexts-list.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts List'] -- ['projects_locations-agent-environments-users-sessions-contexts-patch.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Patch'] -- ['projects_locations-agent-environments-users-sessions-delete-contexts.md', 'Projects', 'Locations Agent Environments Users Sessions Delete Contexts'] -- ['projects_locations-agent-environments-users-sessions-detect-intent.md', 'Projects', 'Locations Agent Environments Users Sessions Detect Intent'] -- ['projects_locations-agent-environments-users-sessions-entity-types-create.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Create'] -- ['projects_locations-agent-environments-users-sessions-entity-types-delete.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Delete'] -- ['projects_locations-agent-environments-users-sessions-entity-types-get.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Get'] -- ['projects_locations-agent-environments-users-sessions-entity-types-list.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types List'] -- ['projects_locations-agent-environments-users-sessions-entity-types-patch.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Patch'] -- ['projects_locations-agent-export.md', 'Projects', 'Locations Agent Export'] -- ['projects_locations-agent-get-fulfillment.md', 'Projects', 'Locations Agent Get Fulfillment'] -- ['projects_locations-agent-get-validation-result.md', 'Projects', 'Locations Agent Get Validation Result'] -- ['projects_locations-agent-import.md', 'Projects', 'Locations Agent Import'] -- ['projects_locations-agent-intents-batch-delete.md', 'Projects', 'Locations Agent Intents Batch Delete'] -- ['projects_locations-agent-intents-batch-update.md', 'Projects', 'Locations Agent Intents Batch Update'] -- ['projects_locations-agent-intents-create.md', 'Projects', 'Locations Agent Intents Create'] -- ['projects_locations-agent-intents-delete.md', 'Projects', 'Locations Agent Intents Delete'] -- ['projects_locations-agent-intents-get.md', 'Projects', 'Locations Agent Intents Get'] -- ['projects_locations-agent-intents-list.md', 'Projects', 'Locations Agent Intents List'] -- ['projects_locations-agent-intents-patch.md', 'Projects', 'Locations Agent Intents Patch'] -- ['projects_locations-agent-restore.md', 'Projects', 'Locations Agent Restore'] -- ['projects_locations-agent-search.md', 'Projects', 'Locations Agent Search'] -- ['projects_locations-agent-sessions-contexts-create.md', 'Projects', 'Locations Agent Sessions Contexts Create'] -- ['projects_locations-agent-sessions-contexts-delete.md', 'Projects', 'Locations Agent Sessions Contexts Delete'] -- ['projects_locations-agent-sessions-contexts-get.md', 'Projects', 'Locations Agent Sessions Contexts Get'] -- ['projects_locations-agent-sessions-contexts-list.md', 'Projects', 'Locations Agent Sessions Contexts List'] -- ['projects_locations-agent-sessions-contexts-patch.md', 'Projects', 'Locations Agent Sessions Contexts Patch'] -- ['projects_locations-agent-sessions-delete-contexts.md', 'Projects', 'Locations Agent Sessions Delete Contexts'] -- ['projects_locations-agent-sessions-detect-intent.md', 'Projects', 'Locations Agent Sessions Detect Intent'] -- ['projects_locations-agent-sessions-entity-types-create.md', 'Projects', 'Locations Agent Sessions Entity Types Create'] -- ['projects_locations-agent-sessions-entity-types-delete.md', 'Projects', 'Locations Agent Sessions Entity Types Delete'] -- ['projects_locations-agent-sessions-entity-types-get.md', 'Projects', 'Locations Agent Sessions Entity Types Get'] -- ['projects_locations-agent-sessions-entity-types-list.md', 'Projects', 'Locations Agent Sessions Entity Types List'] -- ['projects_locations-agent-sessions-entity-types-patch.md', 'Projects', 'Locations Agent Sessions Entity Types Patch'] -- ['projects_locations-agent-train.md', 'Projects', 'Locations Agent Train'] -- ['projects_locations-agent-update-fulfillment.md', 'Projects', 'Locations Agent Update Fulfillment'] -- ['projects_locations-agent-versions-create.md', 'Projects', 'Locations Agent Versions Create'] -- ['projects_locations-agent-versions-delete.md', 'Projects', 'Locations Agent Versions Delete'] -- ['projects_locations-agent-versions-get.md', 'Projects', 'Locations Agent Versions Get'] -- ['projects_locations-agent-versions-list.md', 'Projects', 'Locations Agent Versions List'] -- ['projects_locations-agent-versions-patch.md', 'Projects', 'Locations Agent Versions Patch'] -- ['projects_locations-answer-records-list.md', 'Projects', 'Locations Answer Records List'] -- ['projects_locations-answer-records-patch.md', 'Projects', 'Locations Answer Records Patch'] -- ['projects_locations-conversation-datasets-create.md', 'Projects', 'Locations Conversation Datasets Create'] -- ['projects_locations-conversation-datasets-delete.md', 'Projects', 'Locations Conversation Datasets Delete'] -- ['projects_locations-conversation-datasets-get.md', 'Projects', 'Locations Conversation Datasets Get'] -- ['projects_locations-conversation-datasets-import-conversation-data.md', 'Projects', 'Locations Conversation Datasets Import Conversation Data'] -- ['projects_locations-conversation-datasets-list.md', 'Projects', 'Locations Conversation Datasets List'] -- ['projects_locations-conversation-models-create.md', 'Projects', 'Locations Conversation Models Create'] -- ['projects_locations-conversation-models-delete.md', 'Projects', 'Locations Conversation Models Delete'] -- ['projects_locations-conversation-models-deploy.md', 'Projects', 'Locations Conversation Models Deploy'] -- ['projects_locations-conversation-models-evaluations-create.md', 'Projects', 'Locations Conversation Models Evaluations Create'] -- ['projects_locations-conversation-models-evaluations-get.md', 'Projects', 'Locations Conversation Models Evaluations Get'] -- ['projects_locations-conversation-models-evaluations-list.md', 'Projects', 'Locations Conversation Models Evaluations List'] -- ['projects_locations-conversation-models-get.md', 'Projects', 'Locations Conversation Models Get'] -- ['projects_locations-conversation-models-list.md', 'Projects', 'Locations Conversation Models List'] -- ['projects_locations-conversation-models-undeploy.md', 'Projects', 'Locations Conversation Models Undeploy'] -- ['projects_locations-conversation-profiles-clear-suggestion-feature-config.md', 'Projects', 'Locations Conversation Profiles Clear Suggestion Feature Config'] -- ['projects_locations-conversation-profiles-create.md', 'Projects', 'Locations Conversation Profiles Create'] -- ['projects_locations-conversation-profiles-delete.md', 'Projects', 'Locations Conversation Profiles Delete'] -- ['projects_locations-conversation-profiles-get.md', 'Projects', 'Locations Conversation Profiles Get'] -- ['projects_locations-conversation-profiles-list.md', 'Projects', 'Locations Conversation Profiles List'] -- ['projects_locations-conversation-profiles-patch.md', 'Projects', 'Locations Conversation Profiles Patch'] -- ['projects_locations-conversation-profiles-set-suggestion-feature-config.md', 'Projects', 'Locations Conversation Profiles Set Suggestion Feature Config'] -- ['projects_locations-conversations-complete.md', 'Projects', 'Locations Conversations Complete'] -- ['projects_locations-conversations-create.md', 'Projects', 'Locations Conversations Create'] -- ['projects_locations-conversations-get.md', 'Projects', 'Locations Conversations Get'] -- ['projects_locations-conversations-list.md', 'Projects', 'Locations Conversations List'] -- ['projects_locations-conversations-messages-list.md', 'Projects', 'Locations Conversations Messages List'] -- ['projects_locations-conversations-participants-analyze-content.md', 'Projects', 'Locations Conversations Participants Analyze Content'] -- ['projects_locations-conversations-participants-create.md', 'Projects', 'Locations Conversations Participants Create'] -- ['projects_locations-conversations-participants-get.md', 'Projects', 'Locations Conversations Participants Get'] -- ['projects_locations-conversations-participants-list.md', 'Projects', 'Locations Conversations Participants List'] -- ['projects_locations-conversations-participants-patch.md', 'Projects', 'Locations Conversations Participants Patch'] -- ['projects_locations-conversations-participants-suggestions-suggest-articles.md', 'Projects', 'Locations Conversations Participants Suggestions Suggest Articles'] -- ['projects_locations-conversations-participants-suggestions-suggest-faq-answers.md', 'Projects', 'Locations Conversations Participants Suggestions Suggest Faq Answers'] -- ['projects_locations-conversations-participants-suggestions-suggest-smart-replies.md', 'Projects', 'Locations Conversations Participants Suggestions Suggest Smart Replies'] -- ['projects_locations-delete-agent.md', 'Projects', 'Locations Delete Agent'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-get-agent.md', 'Projects', 'Locations Get Agent'] -- ['projects_locations-knowledge-bases-create.md', 'Projects', 'Locations Knowledge Bases Create'] -- ['projects_locations-knowledge-bases-delete.md', 'Projects', 'Locations Knowledge Bases Delete'] -- ['projects_locations-knowledge-bases-documents-create.md', 'Projects', 'Locations Knowledge Bases Documents Create'] -- ['projects_locations-knowledge-bases-documents-delete.md', 'Projects', 'Locations Knowledge Bases Documents Delete'] -- ['projects_locations-knowledge-bases-documents-export.md', 'Projects', 'Locations Knowledge Bases Documents Export'] -- ['projects_locations-knowledge-bases-documents-get.md', 'Projects', 'Locations Knowledge Bases Documents Get'] -- ['projects_locations-knowledge-bases-documents-import.md', 'Projects', 'Locations Knowledge Bases Documents Import'] -- ['projects_locations-knowledge-bases-documents-list.md', 'Projects', 'Locations Knowledge Bases Documents List'] -- ['projects_locations-knowledge-bases-documents-patch.md', 'Projects', 'Locations Knowledge Bases Documents Patch'] -- ['projects_locations-knowledge-bases-documents-reload.md', 'Projects', 'Locations Knowledge Bases Documents Reload'] -- ['projects_locations-knowledge-bases-get.md', 'Projects', 'Locations Knowledge Bases Get'] -- ['projects_locations-knowledge-bases-list.md', 'Projects', 'Locations Knowledge Bases List'] -- ['projects_locations-knowledge-bases-patch.md', 'Projects', 'Locations Knowledge Bases Patch'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-set-agent.md', 'Projects', 'Locations Set Agent'] -- ['projects_operations-cancel.md', 'Projects', 'Operations Cancel'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] -- ['projects_set-agent.md', 'Projects', 'Set Agent'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Agent Entity Types Batch Delete': 'projects_agent-entity-types-batch-delete.md' + - 'Agent Entity Types Batch Update': 'projects_agent-entity-types-batch-update.md' + - 'Agent Entity Types Create': 'projects_agent-entity-types-create.md' + - 'Agent Entity Types Delete': 'projects_agent-entity-types-delete.md' + - 'Agent Entity Types Entities Batch Create': 'projects_agent-entity-types-entities-batch-create.md' + - 'Agent Entity Types Entities Batch Delete': 'projects_agent-entity-types-entities-batch-delete.md' + - 'Agent Entity Types Entities Batch Update': 'projects_agent-entity-types-entities-batch-update.md' + - 'Agent Entity Types Get': 'projects_agent-entity-types-get.md' + - 'Agent Entity Types List': 'projects_agent-entity-types-list.md' + - 'Agent Entity Types Patch': 'projects_agent-entity-types-patch.md' + - 'Agent Environments Create': 'projects_agent-environments-create.md' + - 'Agent Environments Delete': 'projects_agent-environments-delete.md' + - 'Agent Environments Get': 'projects_agent-environments-get.md' + - 'Agent Environments Get History': 'projects_agent-environments-get-history.md' + - 'Agent Environments Intents List': 'projects_agent-environments-intents-list.md' + - 'Agent Environments List': 'projects_agent-environments-list.md' + - 'Agent Environments Patch': 'projects_agent-environments-patch.md' + - 'Agent Environments Users Sessions Contexts Create': 'projects_agent-environments-users-sessions-contexts-create.md' + - 'Agent Environments Users Sessions Contexts Delete': 'projects_agent-environments-users-sessions-contexts-delete.md' + - 'Agent Environments Users Sessions Contexts Get': 'projects_agent-environments-users-sessions-contexts-get.md' + - 'Agent Environments Users Sessions Contexts List': 'projects_agent-environments-users-sessions-contexts-list.md' + - 'Agent Environments Users Sessions Contexts Patch': 'projects_agent-environments-users-sessions-contexts-patch.md' + - 'Agent Environments Users Sessions Delete Contexts': 'projects_agent-environments-users-sessions-delete-contexts.md' + - 'Agent Environments Users Sessions Detect Intent': 'projects_agent-environments-users-sessions-detect-intent.md' + - 'Agent Environments Users Sessions Entity Types Create': 'projects_agent-environments-users-sessions-entity-types-create.md' + - 'Agent Environments Users Sessions Entity Types Delete': 'projects_agent-environments-users-sessions-entity-types-delete.md' + - 'Agent Environments Users Sessions Entity Types Get': 'projects_agent-environments-users-sessions-entity-types-get.md' + - 'Agent Environments Users Sessions Entity Types List': 'projects_agent-environments-users-sessions-entity-types-list.md' + - 'Agent Environments Users Sessions Entity Types Patch': 'projects_agent-environments-users-sessions-entity-types-patch.md' + - 'Agent Export': 'projects_agent-export.md' + - 'Agent Get Fulfillment': 'projects_agent-get-fulfillment.md' + - 'Agent Get Validation Result': 'projects_agent-get-validation-result.md' + - 'Agent Import': 'projects_agent-import.md' + - 'Agent Intents Batch Delete': 'projects_agent-intents-batch-delete.md' + - 'Agent Intents Batch Update': 'projects_agent-intents-batch-update.md' + - 'Agent Intents Create': 'projects_agent-intents-create.md' + - 'Agent Intents Delete': 'projects_agent-intents-delete.md' + - 'Agent Intents Get': 'projects_agent-intents-get.md' + - 'Agent Intents List': 'projects_agent-intents-list.md' + - 'Agent Intents Patch': 'projects_agent-intents-patch.md' + - 'Agent Knowledge Bases Create': 'projects_agent-knowledge-bases-create.md' + - 'Agent Knowledge Bases Delete': 'projects_agent-knowledge-bases-delete.md' + - 'Agent Knowledge Bases Documents Create': 'projects_agent-knowledge-bases-documents-create.md' + - 'Agent Knowledge Bases Documents Delete': 'projects_agent-knowledge-bases-documents-delete.md' + - 'Agent Knowledge Bases Documents Get': 'projects_agent-knowledge-bases-documents-get.md' + - 'Agent Knowledge Bases Documents List': 'projects_agent-knowledge-bases-documents-list.md' + - 'Agent Knowledge Bases Documents Patch': 'projects_agent-knowledge-bases-documents-patch.md' + - 'Agent Knowledge Bases Documents Reload': 'projects_agent-knowledge-bases-documents-reload.md' + - 'Agent Knowledge Bases Get': 'projects_agent-knowledge-bases-get.md' + - 'Agent Knowledge Bases List': 'projects_agent-knowledge-bases-list.md' + - 'Agent Knowledge Bases Patch': 'projects_agent-knowledge-bases-patch.md' + - 'Agent Restore': 'projects_agent-restore.md' + - 'Agent Search': 'projects_agent-search.md' + - 'Agent Sessions Contexts Create': 'projects_agent-sessions-contexts-create.md' + - 'Agent Sessions Contexts Delete': 'projects_agent-sessions-contexts-delete.md' + - 'Agent Sessions Contexts Get': 'projects_agent-sessions-contexts-get.md' + - 'Agent Sessions Contexts List': 'projects_agent-sessions-contexts-list.md' + - 'Agent Sessions Contexts Patch': 'projects_agent-sessions-contexts-patch.md' + - 'Agent Sessions Delete Contexts': 'projects_agent-sessions-delete-contexts.md' + - 'Agent Sessions Detect Intent': 'projects_agent-sessions-detect-intent.md' + - 'Agent Sessions Entity Types Create': 'projects_agent-sessions-entity-types-create.md' + - 'Agent Sessions Entity Types Delete': 'projects_agent-sessions-entity-types-delete.md' + - 'Agent Sessions Entity Types Get': 'projects_agent-sessions-entity-types-get.md' + - 'Agent Sessions Entity Types List': 'projects_agent-sessions-entity-types-list.md' + - 'Agent Sessions Entity Types Patch': 'projects_agent-sessions-entity-types-patch.md' + - 'Agent Train': 'projects_agent-train.md' + - 'Agent Update Fulfillment': 'projects_agent-update-fulfillment.md' + - 'Agent Versions Create': 'projects_agent-versions-create.md' + - 'Agent Versions Delete': 'projects_agent-versions-delete.md' + - 'Agent Versions Get': 'projects_agent-versions-get.md' + - 'Agent Versions List': 'projects_agent-versions-list.md' + - 'Agent Versions Patch': 'projects_agent-versions-patch.md' + - 'Answer Records List': 'projects_answer-records-list.md' + - 'Answer Records Patch': 'projects_answer-records-patch.md' + - 'Conversation Datasets Get': 'projects_conversation-datasets-get.md' + - 'Conversation Datasets Import Conversation Data': 'projects_conversation-datasets-import-conversation-data.md' + - 'Conversation Datasets List': 'projects_conversation-datasets-list.md' + - 'Conversation Models Create': 'projects_conversation-models-create.md' + - 'Conversation Models Delete': 'projects_conversation-models-delete.md' + - 'Conversation Models Deploy': 'projects_conversation-models-deploy.md' + - 'Conversation Models Evaluations Get': 'projects_conversation-models-evaluations-get.md' + - 'Conversation Models Evaluations List': 'projects_conversation-models-evaluations-list.md' + - 'Conversation Models Get': 'projects_conversation-models-get.md' + - 'Conversation Models List': 'projects_conversation-models-list.md' + - 'Conversation Models Undeploy': 'projects_conversation-models-undeploy.md' + - 'Conversation Profiles Clear Suggestion Feature Config': 'projects_conversation-profiles-clear-suggestion-feature-config.md' + - 'Conversation Profiles Create': 'projects_conversation-profiles-create.md' + - 'Conversation Profiles Delete': 'projects_conversation-profiles-delete.md' + - 'Conversation Profiles Get': 'projects_conversation-profiles-get.md' + - 'Conversation Profiles List': 'projects_conversation-profiles-list.md' + - 'Conversation Profiles Patch': 'projects_conversation-profiles-patch.md' + - 'Conversation Profiles Set Suggestion Feature Config': 'projects_conversation-profiles-set-suggestion-feature-config.md' + - 'Conversations Complete': 'projects_conversations-complete.md' + - 'Conversations Create': 'projects_conversations-create.md' + - 'Conversations Get': 'projects_conversations-get.md' + - 'Conversations List': 'projects_conversations-list.md' + - 'Conversations Messages List': 'projects_conversations-messages-list.md' + - 'Conversations Participants Analyze Content': 'projects_conversations-participants-analyze-content.md' + - 'Conversations Participants Create': 'projects_conversations-participants-create.md' + - 'Conversations Participants Get': 'projects_conversations-participants-get.md' + - 'Conversations Participants List': 'projects_conversations-participants-list.md' + - 'Conversations Participants Patch': 'projects_conversations-participants-patch.md' + - 'Conversations Participants Suggestions Suggest Articles': 'projects_conversations-participants-suggestions-suggest-articles.md' + - 'Conversations Participants Suggestions Suggest Faq Answers': 'projects_conversations-participants-suggestions-suggest-faq-answers.md' + - 'Conversations Participants Suggestions Suggest Smart Replies': 'projects_conversations-participants-suggestions-suggest-smart-replies.md' + - 'Conversations Suggestions Suggest Conversation Summary': 'projects_conversations-suggestions-suggest-conversation-summary.md' + - 'Delete Agent': 'projects_delete-agent.md' + - 'Get Agent': 'projects_get-agent.md' + - 'Knowledge Bases Create': 'projects_knowledge-bases-create.md' + - 'Knowledge Bases Delete': 'projects_knowledge-bases-delete.md' + - 'Knowledge Bases Documents Create': 'projects_knowledge-bases-documents-create.md' + - 'Knowledge Bases Documents Delete': 'projects_knowledge-bases-documents-delete.md' + - 'Knowledge Bases Documents Export': 'projects_knowledge-bases-documents-export.md' + - 'Knowledge Bases Documents Get': 'projects_knowledge-bases-documents-get.md' + - 'Knowledge Bases Documents Import': 'projects_knowledge-bases-documents-import.md' + - 'Knowledge Bases Documents List': 'projects_knowledge-bases-documents-list.md' + - 'Knowledge Bases Documents Patch': 'projects_knowledge-bases-documents-patch.md' + - 'Knowledge Bases Documents Reload': 'projects_knowledge-bases-documents-reload.md' + - 'Knowledge Bases Get': 'projects_knowledge-bases-get.md' + - 'Knowledge Bases List': 'projects_knowledge-bases-list.md' + - 'Knowledge Bases Patch': 'projects_knowledge-bases-patch.md' + - 'Locations Agent Entity Types Batch Delete': 'projects_locations-agent-entity-types-batch-delete.md' + - 'Locations Agent Entity Types Batch Update': 'projects_locations-agent-entity-types-batch-update.md' + - 'Locations Agent Entity Types Create': 'projects_locations-agent-entity-types-create.md' + - 'Locations Agent Entity Types Delete': 'projects_locations-agent-entity-types-delete.md' + - 'Locations Agent Entity Types Entities Batch Create': 'projects_locations-agent-entity-types-entities-batch-create.md' + - 'Locations Agent Entity Types Entities Batch Delete': 'projects_locations-agent-entity-types-entities-batch-delete.md' + - 'Locations Agent Entity Types Entities Batch Update': 'projects_locations-agent-entity-types-entities-batch-update.md' + - 'Locations Agent Entity Types Get': 'projects_locations-agent-entity-types-get.md' + - 'Locations Agent Entity Types List': 'projects_locations-agent-entity-types-list.md' + - 'Locations Agent Entity Types Patch': 'projects_locations-agent-entity-types-patch.md' + - 'Locations Agent Environments Create': 'projects_locations-agent-environments-create.md' + - 'Locations Agent Environments Delete': 'projects_locations-agent-environments-delete.md' + - 'Locations Agent Environments Get': 'projects_locations-agent-environments-get.md' + - 'Locations Agent Environments Get History': 'projects_locations-agent-environments-get-history.md' + - 'Locations Agent Environments Intents List': 'projects_locations-agent-environments-intents-list.md' + - 'Locations Agent Environments List': 'projects_locations-agent-environments-list.md' + - 'Locations Agent Environments Patch': 'projects_locations-agent-environments-patch.md' + - 'Locations Agent Environments Users Sessions Contexts Create': 'projects_locations-agent-environments-users-sessions-contexts-create.md' + - 'Locations Agent Environments Users Sessions Contexts Delete': 'projects_locations-agent-environments-users-sessions-contexts-delete.md' + - 'Locations Agent Environments Users Sessions Contexts Get': 'projects_locations-agent-environments-users-sessions-contexts-get.md' + - 'Locations Agent Environments Users Sessions Contexts List': 'projects_locations-agent-environments-users-sessions-contexts-list.md' + - 'Locations Agent Environments Users Sessions Contexts Patch': 'projects_locations-agent-environments-users-sessions-contexts-patch.md' + - 'Locations Agent Environments Users Sessions Delete Contexts': 'projects_locations-agent-environments-users-sessions-delete-contexts.md' + - 'Locations Agent Environments Users Sessions Detect Intent': 'projects_locations-agent-environments-users-sessions-detect-intent.md' + - 'Locations Agent Environments Users Sessions Entity Types Create': 'projects_locations-agent-environments-users-sessions-entity-types-create.md' + - 'Locations Agent Environments Users Sessions Entity Types Delete': 'projects_locations-agent-environments-users-sessions-entity-types-delete.md' + - 'Locations Agent Environments Users Sessions Entity Types Get': 'projects_locations-agent-environments-users-sessions-entity-types-get.md' + - 'Locations Agent Environments Users Sessions Entity Types List': 'projects_locations-agent-environments-users-sessions-entity-types-list.md' + - 'Locations Agent Environments Users Sessions Entity Types Patch': 'projects_locations-agent-environments-users-sessions-entity-types-patch.md' + - 'Locations Agent Export': 'projects_locations-agent-export.md' + - 'Locations Agent Get Fulfillment': 'projects_locations-agent-get-fulfillment.md' + - 'Locations Agent Get Validation Result': 'projects_locations-agent-get-validation-result.md' + - 'Locations Agent Import': 'projects_locations-agent-import.md' + - 'Locations Agent Intents Batch Delete': 'projects_locations-agent-intents-batch-delete.md' + - 'Locations Agent Intents Batch Update': 'projects_locations-agent-intents-batch-update.md' + - 'Locations Agent Intents Create': 'projects_locations-agent-intents-create.md' + - 'Locations Agent Intents Delete': 'projects_locations-agent-intents-delete.md' + - 'Locations Agent Intents Get': 'projects_locations-agent-intents-get.md' + - 'Locations Agent Intents List': 'projects_locations-agent-intents-list.md' + - 'Locations Agent Intents Patch': 'projects_locations-agent-intents-patch.md' + - 'Locations Agent Restore': 'projects_locations-agent-restore.md' + - 'Locations Agent Search': 'projects_locations-agent-search.md' + - 'Locations Agent Sessions Contexts Create': 'projects_locations-agent-sessions-contexts-create.md' + - 'Locations Agent Sessions Contexts Delete': 'projects_locations-agent-sessions-contexts-delete.md' + - 'Locations Agent Sessions Contexts Get': 'projects_locations-agent-sessions-contexts-get.md' + - 'Locations Agent Sessions Contexts List': 'projects_locations-agent-sessions-contexts-list.md' + - 'Locations Agent Sessions Contexts Patch': 'projects_locations-agent-sessions-contexts-patch.md' + - 'Locations Agent Sessions Delete Contexts': 'projects_locations-agent-sessions-delete-contexts.md' + - 'Locations Agent Sessions Detect Intent': 'projects_locations-agent-sessions-detect-intent.md' + - 'Locations Agent Sessions Entity Types Create': 'projects_locations-agent-sessions-entity-types-create.md' + - 'Locations Agent Sessions Entity Types Delete': 'projects_locations-agent-sessions-entity-types-delete.md' + - 'Locations Agent Sessions Entity Types Get': 'projects_locations-agent-sessions-entity-types-get.md' + - 'Locations Agent Sessions Entity Types List': 'projects_locations-agent-sessions-entity-types-list.md' + - 'Locations Agent Sessions Entity Types Patch': 'projects_locations-agent-sessions-entity-types-patch.md' + - 'Locations Agent Train': 'projects_locations-agent-train.md' + - 'Locations Agent Update Fulfillment': 'projects_locations-agent-update-fulfillment.md' + - 'Locations Agent Versions Create': 'projects_locations-agent-versions-create.md' + - 'Locations Agent Versions Delete': 'projects_locations-agent-versions-delete.md' + - 'Locations Agent Versions Get': 'projects_locations-agent-versions-get.md' + - 'Locations Agent Versions List': 'projects_locations-agent-versions-list.md' + - 'Locations Agent Versions Patch': 'projects_locations-agent-versions-patch.md' + - 'Locations Answer Records List': 'projects_locations-answer-records-list.md' + - 'Locations Answer Records Patch': 'projects_locations-answer-records-patch.md' + - 'Locations Conversation Datasets Create': 'projects_locations-conversation-datasets-create.md' + - 'Locations Conversation Datasets Delete': 'projects_locations-conversation-datasets-delete.md' + - 'Locations Conversation Datasets Get': 'projects_locations-conversation-datasets-get.md' + - 'Locations Conversation Datasets Import Conversation Data': 'projects_locations-conversation-datasets-import-conversation-data.md' + - 'Locations Conversation Datasets List': 'projects_locations-conversation-datasets-list.md' + - 'Locations Conversation Models Create': 'projects_locations-conversation-models-create.md' + - 'Locations Conversation Models Delete': 'projects_locations-conversation-models-delete.md' + - 'Locations Conversation Models Deploy': 'projects_locations-conversation-models-deploy.md' + - 'Locations Conversation Models Evaluations Create': 'projects_locations-conversation-models-evaluations-create.md' + - 'Locations Conversation Models Evaluations Get': 'projects_locations-conversation-models-evaluations-get.md' + - 'Locations Conversation Models Evaluations List': 'projects_locations-conversation-models-evaluations-list.md' + - 'Locations Conversation Models Get': 'projects_locations-conversation-models-get.md' + - 'Locations Conversation Models List': 'projects_locations-conversation-models-list.md' + - 'Locations Conversation Models Undeploy': 'projects_locations-conversation-models-undeploy.md' + - 'Locations Conversation Profiles Clear Suggestion Feature Config': 'projects_locations-conversation-profiles-clear-suggestion-feature-config.md' + - 'Locations Conversation Profiles Create': 'projects_locations-conversation-profiles-create.md' + - 'Locations Conversation Profiles Delete': 'projects_locations-conversation-profiles-delete.md' + - 'Locations Conversation Profiles Get': 'projects_locations-conversation-profiles-get.md' + - 'Locations Conversation Profiles List': 'projects_locations-conversation-profiles-list.md' + - 'Locations Conversation Profiles Patch': 'projects_locations-conversation-profiles-patch.md' + - 'Locations Conversation Profiles Set Suggestion Feature Config': 'projects_locations-conversation-profiles-set-suggestion-feature-config.md' + - 'Locations Conversations Complete': 'projects_locations-conversations-complete.md' + - 'Locations Conversations Create': 'projects_locations-conversations-create.md' + - 'Locations Conversations Get': 'projects_locations-conversations-get.md' + - 'Locations Conversations List': 'projects_locations-conversations-list.md' + - 'Locations Conversations Messages List': 'projects_locations-conversations-messages-list.md' + - 'Locations Conversations Participants Analyze Content': 'projects_locations-conversations-participants-analyze-content.md' + - 'Locations Conversations Participants Create': 'projects_locations-conversations-participants-create.md' + - 'Locations Conversations Participants Get': 'projects_locations-conversations-participants-get.md' + - 'Locations Conversations Participants List': 'projects_locations-conversations-participants-list.md' + - 'Locations Conversations Participants Patch': 'projects_locations-conversations-participants-patch.md' + - 'Locations Conversations Participants Suggestions Suggest Articles': 'projects_locations-conversations-participants-suggestions-suggest-articles.md' + - 'Locations Conversations Participants Suggestions Suggest Faq Answers': 'projects_locations-conversations-participants-suggestions-suggest-faq-answers.md' + - 'Locations Conversations Participants Suggestions Suggest Smart Replies': 'projects_locations-conversations-participants-suggestions-suggest-smart-replies.md' + - 'Locations Conversations Suggestions Suggest Conversation Summary': 'projects_locations-conversations-suggestions-suggest-conversation-summary.md' + - 'Locations Delete Agent': 'projects_locations-delete-agent.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Get Agent': 'projects_locations-get-agent.md' + - 'Locations Knowledge Bases Create': 'projects_locations-knowledge-bases-create.md' + - 'Locations Knowledge Bases Delete': 'projects_locations-knowledge-bases-delete.md' + - 'Locations Knowledge Bases Documents Create': 'projects_locations-knowledge-bases-documents-create.md' + - 'Locations Knowledge Bases Documents Delete': 'projects_locations-knowledge-bases-documents-delete.md' + - 'Locations Knowledge Bases Documents Export': 'projects_locations-knowledge-bases-documents-export.md' + - 'Locations Knowledge Bases Documents Get': 'projects_locations-knowledge-bases-documents-get.md' + - 'Locations Knowledge Bases Documents Import': 'projects_locations-knowledge-bases-documents-import.md' + - 'Locations Knowledge Bases Documents List': 'projects_locations-knowledge-bases-documents-list.md' + - 'Locations Knowledge Bases Documents Patch': 'projects_locations-knowledge-bases-documents-patch.md' + - 'Locations Knowledge Bases Documents Reload': 'projects_locations-knowledge-bases-documents-reload.md' + - 'Locations Knowledge Bases Get': 'projects_locations-knowledge-bases-get.md' + - 'Locations Knowledge Bases List': 'projects_locations-knowledge-bases-list.md' + - 'Locations Knowledge Bases Patch': 'projects_locations-knowledge-bases-patch.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Set Agent': 'projects_locations-set-agent.md' + - 'Operations Cancel': 'projects_operations-cancel.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' + - 'Set Agent': 'projects_set-agent.md' theme: readthedocs diff --git a/gen/dialogflow2-cli/src/client.rs b/gen/dialogflow2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dialogflow2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dialogflow2-cli/src/main.rs b/gen/dialogflow2-cli/src/main.rs index 33c04cbab9..8fdcc0b987 100644 --- a/gen/dialogflow2-cli/src/main.rs +++ b/gen/dialogflow2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dialogflow2::{api, Error, oauth2}; +use google_dialogflow2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -691,7 +690,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -787,7 +786,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -1057,7 +1056,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1116,7 +1115,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -1181,7 +1180,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1285,10 +1284,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "allow-load-to-draft-and-discard-changes" => { - call = call.allow_load_to_draft_and_discard_changes(arg_from_str(value.unwrap_or("false"), err, "allow-load-to-draft-and-discard-changes", "boolean")); + call = call.allow_load_to_draft_and_discard_changes( value.map(|v| arg_from_str(v, err, "allow-load-to-draft-and-discard-changes", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1537,7 +1536,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1627,7 +1626,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2041,7 +2040,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2131,7 +2130,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2858,7 +2857,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -2967,7 +2966,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3116,7 +3115,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3375,7 +3374,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3478,7 +3477,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3676,7 +3675,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3770,7 +3769,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3915,7 +3914,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4164,7 +4163,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4254,7 +4253,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4668,7 +4667,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4758,7 +4757,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4938,7 +4937,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5190,7 +5189,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5283,7 +5282,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5342,7 +5341,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5432,6 +5431,9 @@ where "answer-feedback.agent-assistant-detail-feedback.answer-relevance" => Some(("answerFeedback.agentAssistantDetailFeedback.answerRelevance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.agent-assistant-detail-feedback.document-correctness" => Some(("answerFeedback.agentAssistantDetailFeedback.documentCorrectness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.agent-assistant-detail-feedback.document-efficiency" => Some(("answerFeedback.agentAssistantDetailFeedback.documentEfficiency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "answer-feedback.agent-assistant-detail-feedback.summarization-feedback.start-time" => Some(("answerFeedback.agentAssistantDetailFeedback.summarizationFeedback.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "answer-feedback.agent-assistant-detail-feedback.summarization-feedback.submit-time" => Some(("answerFeedback.agentAssistantDetailFeedback.summarizationFeedback.submitTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "answer-feedback.agent-assistant-detail-feedback.summarization-feedback.summary-text" => Some(("answerFeedback.agentAssistantDetailFeedback.summarizationFeedback.summaryText", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.click-time" => Some(("answerFeedback.clickTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.clicked" => Some(("answerFeedback.clicked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "answer-feedback.correctness-level" => Some(("answerFeedback.correctnessLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5439,7 +5441,7 @@ where "answer-feedback.displayed" => Some(("answerFeedback.displayed", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-assistant-detail-feedback", "agent-assistant-record", "answer", "answer-feedback", "answer-record", "answer-relevance", "article-suggestion-answer", "click-time", "clicked", "confidence", "correctness-level", "display-time", "displayed", "document-correctness", "document-efficiency", "faq-answer", "metadata", "name", "question", "snippets", "source", "title", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-assistant-detail-feedback", "agent-assistant-record", "answer", "answer-feedback", "answer-record", "answer-relevance", "article-suggestion-answer", "click-time", "clicked", "confidence", "correctness-level", "display-time", "displayed", "document-correctness", "document-efficiency", "faq-answer", "metadata", "name", "question", "snippets", "source", "start-time", "submit-time", "summarization-feedback", "summary-text", "title", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5454,7 +5456,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5650,7 +5652,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5988,7 +5990,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6099,7 +6101,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6363,11 +6365,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6541,7 +6544,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6635,11 +6638,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6654,7 +6658,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7043,7 +7047,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7105,7 +7109,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7286,10 +7290,11 @@ where match &temp_cursor.to_string()[..] { "documents-metadata-filters" => Some(("documentsMetadataFilters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "obfuscated-external-user-id" => Some(("obfuscatedExternalUserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sip-recording-media-label" => Some(("sipRecordingMediaLabel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "role", "sip-recording-media-label"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "obfuscated-external-user-id", "role", "sip-recording-media-label"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7411,7 +7416,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7485,10 +7490,11 @@ where match &temp_cursor.to_string()[..] { "documents-metadata-filters" => Some(("documentsMetadataFilters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "obfuscated-external-user-id" => Some(("obfuscatedExternalUserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sip-recording-media-label" => Some(("sipRecordingMediaLabel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "role", "sip-recording-media-label"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "obfuscated-external-user-id", "role", "sip-recording-media-label"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7503,7 +7509,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7814,6 +7820,92 @@ where } } + async fn _projects_conversations_suggestions_suggest_conversation_summary(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "context-size" => Some(("contextSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "latest-message" => Some(("latestMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["context-size", "latest-message"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDialogflowV2SuggestConversationSummaryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().conversations_suggestions_suggest_conversation_summary(request, opt.value_of("conversation").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_delete_agent(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().delete_agent(opt.value_of("parent").unwrap_or("")); @@ -8012,7 +8104,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -8447,7 +8539,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8550,7 +8642,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8748,7 +8840,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8842,7 +8934,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -9531,7 +9623,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -9627,7 +9719,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -9897,7 +9989,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9956,7 +10048,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -10021,7 +10113,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10125,10 +10217,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "allow-load-to-draft-and-discard-changes" => { - call = call.allow_load_to_draft_and_discard_changes(arg_from_str(value.unwrap_or("false"), err, "allow-load-to-draft-and-discard-changes", "boolean")); + call = call.allow_load_to_draft_and_discard_changes( value.map(|v| arg_from_str(v, err, "allow-load-to-draft-and-discard-changes", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -10377,7 +10469,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10467,7 +10559,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -10881,7 +10973,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10971,7 +11063,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -11698,7 +11790,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -11807,7 +11899,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -11958,7 +12050,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12207,7 +12299,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12297,7 +12389,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -12711,7 +12803,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12801,7 +12893,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -12981,7 +13073,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -13233,7 +13325,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13326,7 +13418,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -13385,7 +13477,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -13475,6 +13567,9 @@ where "answer-feedback.agent-assistant-detail-feedback.answer-relevance" => Some(("answerFeedback.agentAssistantDetailFeedback.answerRelevance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.agent-assistant-detail-feedback.document-correctness" => Some(("answerFeedback.agentAssistantDetailFeedback.documentCorrectness", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.agent-assistant-detail-feedback.document-efficiency" => Some(("answerFeedback.agentAssistantDetailFeedback.documentEfficiency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "answer-feedback.agent-assistant-detail-feedback.summarization-feedback.start-time" => Some(("answerFeedback.agentAssistantDetailFeedback.summarizationFeedback.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "answer-feedback.agent-assistant-detail-feedback.summarization-feedback.submit-time" => Some(("answerFeedback.agentAssistantDetailFeedback.summarizationFeedback.submitTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "answer-feedback.agent-assistant-detail-feedback.summarization-feedback.summary-text" => Some(("answerFeedback.agentAssistantDetailFeedback.summarizationFeedback.summaryText", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.click-time" => Some(("answerFeedback.clickTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "answer-feedback.clicked" => Some(("answerFeedback.clicked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "answer-feedback.correctness-level" => Some(("answerFeedback.correctnessLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -13482,7 +13577,7 @@ where "answer-feedback.displayed" => Some(("answerFeedback.displayed", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-assistant-detail-feedback", "agent-assistant-record", "answer", "answer-feedback", "answer-record", "answer-relevance", "article-suggestion-answer", "click-time", "clicked", "confidence", "correctness-level", "display-time", "displayed", "document-correctness", "document-efficiency", "faq-answer", "metadata", "name", "question", "snippets", "source", "title", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-assistant-detail-feedback", "agent-assistant-record", "answer", "answer-feedback", "answer-record", "answer-relevance", "article-suggestion-answer", "click-time", "clicked", "confidence", "correctness-level", "display-time", "displayed", "document-correctness", "document-efficiency", "faq-answer", "metadata", "name", "question", "snippets", "source", "start-time", "submit-time", "summarization-feedback", "summary-text", "title", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13497,7 +13592,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -13836,7 +13931,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14142,10 +14237,11 @@ where "conversation-model-evaluation.evaluation-config.smart-reply-config.allowlist-document" => Some(("conversationModelEvaluation.evaluationConfig.smartReplyConfig.allowlistDocument", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "conversation-model-evaluation.evaluation-config.smart-reply-config.max-result-count" => Some(("conversationModelEvaluation.evaluationConfig.smartReplyConfig.maxResultCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "conversation-model-evaluation.name" => Some(("conversationModelEvaluation.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "conversation-model-evaluation.raw-human-eval-template-csv" => Some(("conversationModelEvaluation.rawHumanEvalTemplateCsv", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "conversation-model-evaluation.smart-reply-metrics.allowlist-coverage" => Some(("conversationModelEvaluation.smartReplyMetrics.allowlistCoverage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "conversation-model-evaluation.smart-reply-metrics.conversation-count" => Some(("conversationModelEvaluation.smartReplyMetrics.conversationCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowlist-coverage", "allowlist-document", "conversation-count", "conversation-model-evaluation", "create-time", "display-name", "evaluation-config", "max-result-count", "name", "smart-compose-config", "smart-reply-config", "smart-reply-metrics"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowlist-coverage", "allowlist-document", "conversation-count", "conversation-model-evaluation", "create-time", "display-name", "evaluation-config", "max-result-count", "name", "raw-human-eval-template-csv", "smart-compose-config", "smart-reply-config", "smart-reply-metrics"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14267,7 +14363,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14378,7 +14474,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14642,11 +14738,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14820,7 +14917,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14914,11 +15011,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14933,7 +15031,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -15322,7 +15420,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -15384,7 +15482,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -15565,10 +15663,11 @@ where match &temp_cursor.to_string()[..] { "documents-metadata-filters" => Some(("documentsMetadataFilters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "obfuscated-external-user-id" => Some(("obfuscatedExternalUserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sip-recording-media-label" => Some(("sipRecordingMediaLabel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "role", "sip-recording-media-label"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "obfuscated-external-user-id", "role", "sip-recording-media-label"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -15690,7 +15789,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -15764,10 +15863,11 @@ where match &temp_cursor.to_string()[..] { "documents-metadata-filters" => Some(("documentsMetadataFilters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "obfuscated-external-user-id" => Some(("obfuscatedExternalUserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sip-recording-media-label" => Some(("sipRecordingMediaLabel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "role", "sip-recording-media-label"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["documents-metadata-filters", "name", "obfuscated-external-user-id", "role", "sip-recording-media-label"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -15782,7 +15882,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -16093,6 +16193,92 @@ where } } + async fn _projects_locations_conversations_suggestions_suggest_conversation_summary(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "context-size" => Some(("contextSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "latest-message" => Some(("latestMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["context-size", "latest-message"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDialogflowV2SuggestConversationSummaryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversations_suggestions_suggest_conversation_summary(request, opt.value_of("conversation").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_delete_agent(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_delete_agent(opt.value_of("parent").unwrap_or("")); @@ -16343,7 +16529,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -16778,7 +16964,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -16881,7 +17067,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -17079,7 +17265,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -17173,7 +17359,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -17232,7 +17418,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -17398,7 +17584,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -17501,7 +17687,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -17664,7 +17850,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -17767,7 +17953,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -18138,6 +18324,9 @@ where ("conversations-participants-suggestions-suggest-smart-replies", Some(opt)) => { call_result = self._projects_conversations_participants_suggestions_suggest_smart_replies(opt, dry_run, &mut err).await; }, + ("conversations-suggestions-suggest-conversation-summary", Some(opt)) => { + call_result = self._projects_conversations_suggestions_suggest_conversation_summary(opt, dry_run, &mut err).await; + }, ("delete-agent", Some(opt)) => { call_result = self._projects_delete_agent(opt, dry_run, &mut err).await; }, @@ -18474,6 +18663,9 @@ where ("locations-conversations-participants-suggestions-suggest-smart-replies", Some(opt)) => { call_result = self._projects_locations_conversations_participants_suggestions_suggest_smart_replies(opt, dry_run, &mut err).await; }, + ("locations-conversations-suggestions-suggest-conversation-summary", Some(opt)) => { + call_result = self._projects_locations_conversations_suggestions_suggest_conversation_summary(opt, dry_run, &mut err).await; + }, ("locations-delete-agent", Some(opt)) => { call_result = self._projects_locations_delete_agent(opt, dry_run, &mut err).await; }, @@ -18628,7 +18820,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'agent-entity-types-batch-delete', 'agent-entity-types-batch-update', 'agent-entity-types-create', 'agent-entity-types-delete', 'agent-entity-types-entities-batch-create', 'agent-entity-types-entities-batch-delete', 'agent-entity-types-entities-batch-update', 'agent-entity-types-get', 'agent-entity-types-list', 'agent-entity-types-patch', 'agent-environments-create', 'agent-environments-delete', 'agent-environments-get', 'agent-environments-get-history', 'agent-environments-intents-list', 'agent-environments-list', 'agent-environments-patch', 'agent-environments-users-sessions-contexts-create', 'agent-environments-users-sessions-contexts-delete', 'agent-environments-users-sessions-contexts-get', 'agent-environments-users-sessions-contexts-list', 'agent-environments-users-sessions-contexts-patch', 'agent-environments-users-sessions-delete-contexts', 'agent-environments-users-sessions-detect-intent', 'agent-environments-users-sessions-entity-types-create', 'agent-environments-users-sessions-entity-types-delete', 'agent-environments-users-sessions-entity-types-get', 'agent-environments-users-sessions-entity-types-list', 'agent-environments-users-sessions-entity-types-patch', 'agent-export', 'agent-get-fulfillment', 'agent-get-validation-result', 'agent-import', 'agent-intents-batch-delete', 'agent-intents-batch-update', 'agent-intents-create', 'agent-intents-delete', 'agent-intents-get', 'agent-intents-list', 'agent-intents-patch', 'agent-knowledge-bases-create', 'agent-knowledge-bases-delete', 'agent-knowledge-bases-documents-create', 'agent-knowledge-bases-documents-delete', 'agent-knowledge-bases-documents-get', 'agent-knowledge-bases-documents-list', 'agent-knowledge-bases-documents-patch', 'agent-knowledge-bases-documents-reload', 'agent-knowledge-bases-get', 'agent-knowledge-bases-list', 'agent-knowledge-bases-patch', 'agent-restore', 'agent-search', 'agent-sessions-contexts-create', 'agent-sessions-contexts-delete', 'agent-sessions-contexts-get', 'agent-sessions-contexts-list', 'agent-sessions-contexts-patch', 'agent-sessions-delete-contexts', 'agent-sessions-detect-intent', 'agent-sessions-entity-types-create', 'agent-sessions-entity-types-delete', 'agent-sessions-entity-types-get', 'agent-sessions-entity-types-list', 'agent-sessions-entity-types-patch', 'agent-train', 'agent-update-fulfillment', 'agent-versions-create', 'agent-versions-delete', 'agent-versions-get', 'agent-versions-list', 'agent-versions-patch', 'answer-records-list', 'answer-records-patch', 'conversation-datasets-get', 'conversation-datasets-import-conversation-data', 'conversation-datasets-list', 'conversation-models-create', 'conversation-models-delete', 'conversation-models-deploy', 'conversation-models-evaluations-get', 'conversation-models-evaluations-list', 'conversation-models-get', 'conversation-models-list', 'conversation-models-undeploy', 'conversation-profiles-clear-suggestion-feature-config', 'conversation-profiles-create', 'conversation-profiles-delete', 'conversation-profiles-get', 'conversation-profiles-list', 'conversation-profiles-patch', 'conversation-profiles-set-suggestion-feature-config', 'conversations-complete', 'conversations-create', 'conversations-get', 'conversations-list', 'conversations-messages-list', 'conversations-participants-analyze-content', 'conversations-participants-create', 'conversations-participants-get', 'conversations-participants-list', 'conversations-participants-patch', 'conversations-participants-suggestions-suggest-articles', 'conversations-participants-suggestions-suggest-faq-answers', 'conversations-participants-suggestions-suggest-smart-replies', 'delete-agent', 'get-agent', 'knowledge-bases-create', 'knowledge-bases-delete', 'knowledge-bases-documents-create', 'knowledge-bases-documents-delete', 'knowledge-bases-documents-export', 'knowledge-bases-documents-get', 'knowledge-bases-documents-import', 'knowledge-bases-documents-list', 'knowledge-bases-documents-patch', 'knowledge-bases-documents-reload', 'knowledge-bases-get', 'knowledge-bases-list', 'knowledge-bases-patch', 'locations-agent-entity-types-batch-delete', 'locations-agent-entity-types-batch-update', 'locations-agent-entity-types-create', 'locations-agent-entity-types-delete', 'locations-agent-entity-types-entities-batch-create', 'locations-agent-entity-types-entities-batch-delete', 'locations-agent-entity-types-entities-batch-update', 'locations-agent-entity-types-get', 'locations-agent-entity-types-list', 'locations-agent-entity-types-patch', 'locations-agent-environments-create', 'locations-agent-environments-delete', 'locations-agent-environments-get', 'locations-agent-environments-get-history', 'locations-agent-environments-intents-list', 'locations-agent-environments-list', 'locations-agent-environments-patch', 'locations-agent-environments-users-sessions-contexts-create', 'locations-agent-environments-users-sessions-contexts-delete', 'locations-agent-environments-users-sessions-contexts-get', 'locations-agent-environments-users-sessions-contexts-list', 'locations-agent-environments-users-sessions-contexts-patch', 'locations-agent-environments-users-sessions-delete-contexts', 'locations-agent-environments-users-sessions-detect-intent', 'locations-agent-environments-users-sessions-entity-types-create', 'locations-agent-environments-users-sessions-entity-types-delete', 'locations-agent-environments-users-sessions-entity-types-get', 'locations-agent-environments-users-sessions-entity-types-list', 'locations-agent-environments-users-sessions-entity-types-patch', 'locations-agent-export', 'locations-agent-get-fulfillment', 'locations-agent-get-validation-result', 'locations-agent-import', 'locations-agent-intents-batch-delete', 'locations-agent-intents-batch-update', 'locations-agent-intents-create', 'locations-agent-intents-delete', 'locations-agent-intents-get', 'locations-agent-intents-list', 'locations-agent-intents-patch', 'locations-agent-restore', 'locations-agent-search', 'locations-agent-sessions-contexts-create', 'locations-agent-sessions-contexts-delete', 'locations-agent-sessions-contexts-get', 'locations-agent-sessions-contexts-list', 'locations-agent-sessions-contexts-patch', 'locations-agent-sessions-delete-contexts', 'locations-agent-sessions-detect-intent', 'locations-agent-sessions-entity-types-create', 'locations-agent-sessions-entity-types-delete', 'locations-agent-sessions-entity-types-get', 'locations-agent-sessions-entity-types-list', 'locations-agent-sessions-entity-types-patch', 'locations-agent-train', 'locations-agent-update-fulfillment', 'locations-agent-versions-create', 'locations-agent-versions-delete', 'locations-agent-versions-get', 'locations-agent-versions-list', 'locations-agent-versions-patch', 'locations-answer-records-list', 'locations-answer-records-patch', 'locations-conversation-datasets-create', 'locations-conversation-datasets-delete', 'locations-conversation-datasets-get', 'locations-conversation-datasets-import-conversation-data', 'locations-conversation-datasets-list', 'locations-conversation-models-create', 'locations-conversation-models-delete', 'locations-conversation-models-deploy', 'locations-conversation-models-evaluations-create', 'locations-conversation-models-evaluations-get', 'locations-conversation-models-evaluations-list', 'locations-conversation-models-get', 'locations-conversation-models-list', 'locations-conversation-models-undeploy', 'locations-conversation-profiles-clear-suggestion-feature-config', 'locations-conversation-profiles-create', 'locations-conversation-profiles-delete', 'locations-conversation-profiles-get', 'locations-conversation-profiles-list', 'locations-conversation-profiles-patch', 'locations-conversation-profiles-set-suggestion-feature-config', 'locations-conversations-complete', 'locations-conversations-create', 'locations-conversations-get', 'locations-conversations-list', 'locations-conversations-messages-list', 'locations-conversations-participants-analyze-content', 'locations-conversations-participants-create', 'locations-conversations-participants-get', 'locations-conversations-participants-list', 'locations-conversations-participants-patch', 'locations-conversations-participants-suggestions-suggest-articles', 'locations-conversations-participants-suggestions-suggest-faq-answers', 'locations-conversations-participants-suggestions-suggest-smart-replies', 'locations-delete-agent', 'locations-get', 'locations-get-agent', 'locations-knowledge-bases-create', 'locations-knowledge-bases-delete', 'locations-knowledge-bases-documents-create', 'locations-knowledge-bases-documents-delete', 'locations-knowledge-bases-documents-export', 'locations-knowledge-bases-documents-get', 'locations-knowledge-bases-documents-import', 'locations-knowledge-bases-documents-list', 'locations-knowledge-bases-documents-patch', 'locations-knowledge-bases-documents-reload', 'locations-knowledge-bases-get', 'locations-knowledge-bases-list', 'locations-knowledge-bases-patch', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-set-agent', 'operations-cancel', 'operations-get', 'operations-list' and 'set-agent'", vec![ + ("projects", "methods: 'agent-entity-types-batch-delete', 'agent-entity-types-batch-update', 'agent-entity-types-create', 'agent-entity-types-delete', 'agent-entity-types-entities-batch-create', 'agent-entity-types-entities-batch-delete', 'agent-entity-types-entities-batch-update', 'agent-entity-types-get', 'agent-entity-types-list', 'agent-entity-types-patch', 'agent-environments-create', 'agent-environments-delete', 'agent-environments-get', 'agent-environments-get-history', 'agent-environments-intents-list', 'agent-environments-list', 'agent-environments-patch', 'agent-environments-users-sessions-contexts-create', 'agent-environments-users-sessions-contexts-delete', 'agent-environments-users-sessions-contexts-get', 'agent-environments-users-sessions-contexts-list', 'agent-environments-users-sessions-contexts-patch', 'agent-environments-users-sessions-delete-contexts', 'agent-environments-users-sessions-detect-intent', 'agent-environments-users-sessions-entity-types-create', 'agent-environments-users-sessions-entity-types-delete', 'agent-environments-users-sessions-entity-types-get', 'agent-environments-users-sessions-entity-types-list', 'agent-environments-users-sessions-entity-types-patch', 'agent-export', 'agent-get-fulfillment', 'agent-get-validation-result', 'agent-import', 'agent-intents-batch-delete', 'agent-intents-batch-update', 'agent-intents-create', 'agent-intents-delete', 'agent-intents-get', 'agent-intents-list', 'agent-intents-patch', 'agent-knowledge-bases-create', 'agent-knowledge-bases-delete', 'agent-knowledge-bases-documents-create', 'agent-knowledge-bases-documents-delete', 'agent-knowledge-bases-documents-get', 'agent-knowledge-bases-documents-list', 'agent-knowledge-bases-documents-patch', 'agent-knowledge-bases-documents-reload', 'agent-knowledge-bases-get', 'agent-knowledge-bases-list', 'agent-knowledge-bases-patch', 'agent-restore', 'agent-search', 'agent-sessions-contexts-create', 'agent-sessions-contexts-delete', 'agent-sessions-contexts-get', 'agent-sessions-contexts-list', 'agent-sessions-contexts-patch', 'agent-sessions-delete-contexts', 'agent-sessions-detect-intent', 'agent-sessions-entity-types-create', 'agent-sessions-entity-types-delete', 'agent-sessions-entity-types-get', 'agent-sessions-entity-types-list', 'agent-sessions-entity-types-patch', 'agent-train', 'agent-update-fulfillment', 'agent-versions-create', 'agent-versions-delete', 'agent-versions-get', 'agent-versions-list', 'agent-versions-patch', 'answer-records-list', 'answer-records-patch', 'conversation-datasets-get', 'conversation-datasets-import-conversation-data', 'conversation-datasets-list', 'conversation-models-create', 'conversation-models-delete', 'conversation-models-deploy', 'conversation-models-evaluations-get', 'conversation-models-evaluations-list', 'conversation-models-get', 'conversation-models-list', 'conversation-models-undeploy', 'conversation-profiles-clear-suggestion-feature-config', 'conversation-profiles-create', 'conversation-profiles-delete', 'conversation-profiles-get', 'conversation-profiles-list', 'conversation-profiles-patch', 'conversation-profiles-set-suggestion-feature-config', 'conversations-complete', 'conversations-create', 'conversations-get', 'conversations-list', 'conversations-messages-list', 'conversations-participants-analyze-content', 'conversations-participants-create', 'conversations-participants-get', 'conversations-participants-list', 'conversations-participants-patch', 'conversations-participants-suggestions-suggest-articles', 'conversations-participants-suggestions-suggest-faq-answers', 'conversations-participants-suggestions-suggest-smart-replies', 'conversations-suggestions-suggest-conversation-summary', 'delete-agent', 'get-agent', 'knowledge-bases-create', 'knowledge-bases-delete', 'knowledge-bases-documents-create', 'knowledge-bases-documents-delete', 'knowledge-bases-documents-export', 'knowledge-bases-documents-get', 'knowledge-bases-documents-import', 'knowledge-bases-documents-list', 'knowledge-bases-documents-patch', 'knowledge-bases-documents-reload', 'knowledge-bases-get', 'knowledge-bases-list', 'knowledge-bases-patch', 'locations-agent-entity-types-batch-delete', 'locations-agent-entity-types-batch-update', 'locations-agent-entity-types-create', 'locations-agent-entity-types-delete', 'locations-agent-entity-types-entities-batch-create', 'locations-agent-entity-types-entities-batch-delete', 'locations-agent-entity-types-entities-batch-update', 'locations-agent-entity-types-get', 'locations-agent-entity-types-list', 'locations-agent-entity-types-patch', 'locations-agent-environments-create', 'locations-agent-environments-delete', 'locations-agent-environments-get', 'locations-agent-environments-get-history', 'locations-agent-environments-intents-list', 'locations-agent-environments-list', 'locations-agent-environments-patch', 'locations-agent-environments-users-sessions-contexts-create', 'locations-agent-environments-users-sessions-contexts-delete', 'locations-agent-environments-users-sessions-contexts-get', 'locations-agent-environments-users-sessions-contexts-list', 'locations-agent-environments-users-sessions-contexts-patch', 'locations-agent-environments-users-sessions-delete-contexts', 'locations-agent-environments-users-sessions-detect-intent', 'locations-agent-environments-users-sessions-entity-types-create', 'locations-agent-environments-users-sessions-entity-types-delete', 'locations-agent-environments-users-sessions-entity-types-get', 'locations-agent-environments-users-sessions-entity-types-list', 'locations-agent-environments-users-sessions-entity-types-patch', 'locations-agent-export', 'locations-agent-get-fulfillment', 'locations-agent-get-validation-result', 'locations-agent-import', 'locations-agent-intents-batch-delete', 'locations-agent-intents-batch-update', 'locations-agent-intents-create', 'locations-agent-intents-delete', 'locations-agent-intents-get', 'locations-agent-intents-list', 'locations-agent-intents-patch', 'locations-agent-restore', 'locations-agent-search', 'locations-agent-sessions-contexts-create', 'locations-agent-sessions-contexts-delete', 'locations-agent-sessions-contexts-get', 'locations-agent-sessions-contexts-list', 'locations-agent-sessions-contexts-patch', 'locations-agent-sessions-delete-contexts', 'locations-agent-sessions-detect-intent', 'locations-agent-sessions-entity-types-create', 'locations-agent-sessions-entity-types-delete', 'locations-agent-sessions-entity-types-get', 'locations-agent-sessions-entity-types-list', 'locations-agent-sessions-entity-types-patch', 'locations-agent-train', 'locations-agent-update-fulfillment', 'locations-agent-versions-create', 'locations-agent-versions-delete', 'locations-agent-versions-get', 'locations-agent-versions-list', 'locations-agent-versions-patch', 'locations-answer-records-list', 'locations-answer-records-patch', 'locations-conversation-datasets-create', 'locations-conversation-datasets-delete', 'locations-conversation-datasets-get', 'locations-conversation-datasets-import-conversation-data', 'locations-conversation-datasets-list', 'locations-conversation-models-create', 'locations-conversation-models-delete', 'locations-conversation-models-deploy', 'locations-conversation-models-evaluations-create', 'locations-conversation-models-evaluations-get', 'locations-conversation-models-evaluations-list', 'locations-conversation-models-get', 'locations-conversation-models-list', 'locations-conversation-models-undeploy', 'locations-conversation-profiles-clear-suggestion-feature-config', 'locations-conversation-profiles-create', 'locations-conversation-profiles-delete', 'locations-conversation-profiles-get', 'locations-conversation-profiles-list', 'locations-conversation-profiles-patch', 'locations-conversation-profiles-set-suggestion-feature-config', 'locations-conversations-complete', 'locations-conversations-create', 'locations-conversations-get', 'locations-conversations-list', 'locations-conversations-messages-list', 'locations-conversations-participants-analyze-content', 'locations-conversations-participants-create', 'locations-conversations-participants-get', 'locations-conversations-participants-list', 'locations-conversations-participants-patch', 'locations-conversations-participants-suggestions-suggest-articles', 'locations-conversations-participants-suggestions-suggest-faq-answers', 'locations-conversations-participants-suggestions-suggest-smart-replies', 'locations-conversations-suggestions-suggest-conversation-summary', 'locations-delete-agent', 'locations-get', 'locations-get-agent', 'locations-knowledge-bases-create', 'locations-knowledge-bases-delete', 'locations-knowledge-bases-documents-create', 'locations-knowledge-bases-documents-delete', 'locations-knowledge-bases-documents-export', 'locations-knowledge-bases-documents-get', 'locations-knowledge-bases-documents-import', 'locations-knowledge-bases-documents-list', 'locations-knowledge-bases-documents-patch', 'locations-knowledge-bases-documents-reload', 'locations-knowledge-bases-get', 'locations-knowledge-bases-list', 'locations-knowledge-bases-patch', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-set-agent', 'operations-cancel', 'operations-get', 'operations-list' and 'set-agent'", vec![ ("agent-entity-types-batch-delete", Some(r##"Deletes entity types in the specified agent. This method is a [long-running operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). The returned `Operation` type has the following method-specific fields: - `metadata`: An empty [Struct message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) - `response`: An [Empty message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) Note: You should always train an agent prior to sending it queries. See the [training documentation](https://cloud.google.com/dialogflow/es/docs/training)."##), "Details at http://byron.github.io/google-apis-rs/google_dialogflow2_cli/projects_agent-entity-types-batch-delete", @@ -21245,6 +21437,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("conversations-suggestions-suggest-conversation-summary", + Some(r##"Suggests summary for a conversation based on specific historical messages. The range of the messages to be used for summary can be specified in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_dialogflow2_cli/projects_conversations-suggestions-suggest-conversation-summary", + vec![ + (Some(r##"conversation"##), + None, + Some(r##"Required. The conversation to fetch suggestion for. Format: `projects//locations//conversations/`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -24045,6 +24265,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversations-suggestions-suggest-conversation-summary", + Some(r##"Suggests summary for a conversation based on specific historical messages. The range of the messages to be used for summary can be specified in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_dialogflow2_cli/projects_locations-conversations-suggestions-suggest-conversation-summary", + vec![ + (Some(r##"conversation"##), + None, + Some(r##"Required. The conversation to fetch suggestion for. Format: `projects//locations//conversations/`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -24661,7 +24909,7 @@ async fn main() { let mut app = App::new("dialogflow2") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230110") .about("Builds conversational interfaces (for example, chatbots, and voice-powered apps and devices).") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dialogflow2_cli") .arg(Arg::with_name("url") diff --git a/gen/dialogflow2/Cargo.toml b/gen/dialogflow2/Cargo.toml index 67fe48ed2a..8861d8acc2 100644 --- a/gen/dialogflow2/Cargo.toml +++ b/gen/dialogflow2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dialogflow2" -version = "5.0.2-beta-1+20230110" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dialogflow (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow2" homepage = "https://cloud.google.com/dialogflow/" -documentation = "https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110" +documentation = "https://docs.rs/google-dialogflow2/5.0.2+20230110" license = "MIT" keywords = ["dialogflow", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dialogflow2/README.md b/gen/dialogflow2/README.md index 95dae8ea9b..e257af0fca 100644 --- a/gen/dialogflow2/README.md +++ b/gen/dialogflow2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-dialogflow2` library allows access to all features of the *Google Dialogflow* service. -This documentation was generated from *Dialogflow* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *dialogflow:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dialogflow* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *dialogflow:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dialogflow* *v2* API can be found at the [official documentation site](https://cloud.google.com/dialogflow/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/Dialogflow) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/Dialogflow) ... * projects - * [*agent entity types batch delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeBatchDeleteCall), [*agent entity types batch update*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeBatchUpdateCall), [*agent entity types create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeCreateCall), [*agent entity types delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeDeleteCall), [*agent entity types entities batch create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeEntityBatchCreateCall), [*agent entity types entities batch delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeEntityBatchDeleteCall), [*agent entity types entities batch update*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeEntityBatchUpdateCall), [*agent entity types get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeGetCall), [*agent entity types list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypeListCall), [*agent entity types patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEntityTypePatchCall), [*agent environments create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentCreateCall), [*agent environments delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentDeleteCall), [*agent environments get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentGetCall), [*agent environments get history*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentGetHistoryCall), [*agent environments intents list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentIntentListCall), [*agent environments list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentListCall), [*agent environments patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentPatchCall), [*agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextCreateCall), [*agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextDeleteCall), [*agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextGetCall), [*agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextListCall), [*agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextPatchCall), [*agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionDeleteContextCall), [*agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionDetectIntentCall), [*agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeCreateCall), [*agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeDeleteCall), [*agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeGetCall), [*agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeListCall), [*agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypePatchCall), [*agent export*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentExportCall), [*agent get fulfillment*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentGetFulfillmentCall), [*agent get validation result*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentGetValidationResultCall), [*agent import*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentImportCall), [*agent intents batch delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentIntentBatchDeleteCall), [*agent intents batch update*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentIntentBatchUpdateCall), [*agent intents create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentIntentCreateCall), [*agent intents delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentIntentDeleteCall), [*agent intents get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentIntentGetCall), [*agent intents list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentIntentListCall), [*agent intents patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentIntentPatchCall), [*agent knowledge bases create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseCreateCall), [*agent knowledge bases delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDeleteCall), [*agent knowledge bases documents create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentCreateCall), [*agent knowledge bases documents delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentDeleteCall), [*agent knowledge bases documents get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentGetCall), [*agent knowledge bases documents list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentListCall), [*agent knowledge bases documents patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentPatchCall), [*agent knowledge bases documents reload*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentReloadCall), [*agent knowledge bases get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseGetCall), [*agent knowledge bases list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseListCall), [*agent knowledge bases patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBasePatchCall), [*agent restore*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentRestoreCall), [*agent search*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSearchCall), [*agent sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionContextCreateCall), [*agent sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionContextDeleteCall), [*agent sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionContextGetCall), [*agent sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionContextListCall), [*agent sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionContextPatchCall), [*agent sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionDeleteContextCall), [*agent sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionDetectIntentCall), [*agent sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeCreateCall), [*agent sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeDeleteCall), [*agent sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeGetCall), [*agent sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeListCall), [*agent sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypePatchCall), [*agent train*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentTrainCall), [*agent update fulfillment*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentUpdateFulfillmentCall), [*agent versions create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentVersionCreateCall), [*agent versions delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentVersionDeleteCall), [*agent versions get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentVersionGetCall), [*agent versions list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentVersionListCall), [*agent versions patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAgentVersionPatchCall), [*answer records list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAnswerRecordListCall), [*answer records patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectAnswerRecordPatchCall), [*conversation datasets get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationDatasetGetCall), [*conversation datasets import conversation data*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationDatasetImportConversationDataCall), [*conversation datasets list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationDatasetListCall), [*conversation models create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelCreateCall), [*conversation models delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelDeleteCall), [*conversation models deploy*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelDeployCall), [*conversation models evaluations get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelEvaluationGetCall), [*conversation models evaluations list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelEvaluationListCall), [*conversation models get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelGetCall), [*conversation models list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelListCall), [*conversation models undeploy*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationModelUndeployCall), [*conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationProfileClearSuggestionFeatureConfigCall), [*conversation profiles create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationProfileCreateCall), [*conversation profiles delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationProfileDeleteCall), [*conversation profiles get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationProfileGetCall), [*conversation profiles list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationProfileListCall), [*conversation profiles patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationProfilePatchCall), [*conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationProfileSetSuggestionFeatureConfigCall), [*conversations complete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationCompleteCall), [*conversations create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationCreateCall), [*conversations get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationGetCall), [*conversations list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationListCall), [*conversations messages list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationMessageListCall), [*conversations participants analyze content*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantAnalyzeContentCall), [*conversations participants create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantCreateCall), [*conversations participants get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantGetCall), [*conversations participants list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantListCall), [*conversations participants patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantPatchCall), [*conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantSuggestionSuggestArticleCall), [*conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantSuggestionSuggestFaqAnswerCall), [*conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationParticipantSuggestionSuggestSmartReplyCall), [*conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectConversationSuggestionSuggestConversationSummaryCall), [*delete agent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectDeleteAgentCall), [*get agent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectGetAgentCall), [*knowledge bases create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseCreateCall), [*knowledge bases delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDeleteCall), [*knowledge bases documents create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentCreateCall), [*knowledge bases documents delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentDeleteCall), [*knowledge bases documents export*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentExportCall), [*knowledge bases documents get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentGetCall), [*knowledge bases documents import*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentImportCall), [*knowledge bases documents list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentListCall), [*knowledge bases documents patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentPatchCall), [*knowledge bases documents reload*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentReloadCall), [*knowledge bases get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseGetCall), [*knowledge bases list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBaseListCall), [*knowledge bases patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectKnowledgeBasePatchCall), [*locations agent entity types batch delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeBatchDeleteCall), [*locations agent entity types batch update*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeBatchUpdateCall), [*locations agent entity types create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeCreateCall), [*locations agent entity types delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeDeleteCall), [*locations agent entity types entities batch create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeEntityBatchCreateCall), [*locations agent entity types entities batch delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeEntityBatchDeleteCall), [*locations agent entity types entities batch update*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeEntityBatchUpdateCall), [*locations agent entity types get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeGetCall), [*locations agent entity types list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeListCall), [*locations agent entity types patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypePatchCall), [*locations agent environments create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentCreateCall), [*locations agent environments delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentDeleteCall), [*locations agent environments get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentGetCall), [*locations agent environments get history*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentGetHistoryCall), [*locations agent environments intents list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentIntentListCall), [*locations agent environments list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentListCall), [*locations agent environments patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentPatchCall), [*locations agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextCreateCall), [*locations agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextDeleteCall), [*locations agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextGetCall), [*locations agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextListCall), [*locations agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextPatchCall), [*locations agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionDeleteContextCall), [*locations agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionDetectIntentCall), [*locations agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeCreateCall), [*locations agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeDeleteCall), [*locations agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeGetCall), [*locations agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeListCall), [*locations agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypePatchCall), [*locations agent export*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentExportCall), [*locations agent get fulfillment*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentGetFulfillmentCall), [*locations agent get validation result*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentGetValidationResultCall), [*locations agent import*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentImportCall), [*locations agent intents batch delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentIntentBatchDeleteCall), [*locations agent intents batch update*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentIntentBatchUpdateCall), [*locations agent intents create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentIntentCreateCall), [*locations agent intents delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentIntentDeleteCall), [*locations agent intents get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentIntentGetCall), [*locations agent intents list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentIntentListCall), [*locations agent intents patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentIntentPatchCall), [*locations agent restore*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentRestoreCall), [*locations agent search*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSearchCall), [*locations agent sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextCreateCall), [*locations agent sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextDeleteCall), [*locations agent sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextGetCall), [*locations agent sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextListCall), [*locations agent sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextPatchCall), [*locations agent sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionDeleteContextCall), [*locations agent sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionDetectIntentCall), [*locations agent sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeCreateCall), [*locations agent sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeDeleteCall), [*locations agent sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeGetCall), [*locations agent sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeListCall), [*locations agent sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypePatchCall), [*locations agent train*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentTrainCall), [*locations agent update fulfillment*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentUpdateFulfillmentCall), [*locations agent versions create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentVersionCreateCall), [*locations agent versions delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentVersionDeleteCall), [*locations agent versions get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentVersionGetCall), [*locations agent versions list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentVersionListCall), [*locations agent versions patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAgentVersionPatchCall), [*locations answer records list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAnswerRecordListCall), [*locations answer records patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationAnswerRecordPatchCall), [*locations conversation datasets create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetCreateCall), [*locations conversation datasets delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetDeleteCall), [*locations conversation datasets get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetGetCall), [*locations conversation datasets import conversation data*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetImportConversationDataCall), [*locations conversation datasets list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetListCall), [*locations conversation models create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelCreateCall), [*locations conversation models delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelDeleteCall), [*locations conversation models deploy*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelDeployCall), [*locations conversation models evaluations create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelEvaluationCreateCall), [*locations conversation models evaluations get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelEvaluationGetCall), [*locations conversation models evaluations list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelEvaluationListCall), [*locations conversation models get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelGetCall), [*locations conversation models list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelListCall), [*locations conversation models undeploy*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationModelUndeployCall), [*locations conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationProfileClearSuggestionFeatureConfigCall), [*locations conversation profiles create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationProfileCreateCall), [*locations conversation profiles delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationProfileDeleteCall), [*locations conversation profiles get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationProfileGetCall), [*locations conversation profiles list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationProfileListCall), [*locations conversation profiles patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationProfilePatchCall), [*locations conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationProfileSetSuggestionFeatureConfigCall), [*locations conversations complete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationCompleteCall), [*locations conversations create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationCreateCall), [*locations conversations get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationGetCall), [*locations conversations list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationListCall), [*locations conversations messages list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationMessageListCall), [*locations conversations participants analyze content*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantAnalyzeContentCall), [*locations conversations participants create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantCreateCall), [*locations conversations participants get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantGetCall), [*locations conversations participants list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantListCall), [*locations conversations participants patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantPatchCall), [*locations conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantSuggestionSuggestArticleCall), [*locations conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantSuggestionSuggestFaqAnswerCall), [*locations conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantSuggestionSuggestSmartReplyCall), [*locations conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationConversationSuggestionSuggestConversationSummaryCall), [*locations delete agent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationDeleteAgentCall), [*locations get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationGetCall), [*locations get agent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationGetAgentCall), [*locations knowledge bases create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseCreateCall), [*locations knowledge bases delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDeleteCall), [*locations knowledge bases documents create*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentCreateCall), [*locations knowledge bases documents delete*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentDeleteCall), [*locations knowledge bases documents export*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentExportCall), [*locations knowledge bases documents get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentGetCall), [*locations knowledge bases documents import*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentImportCall), [*locations knowledge bases documents list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentListCall), [*locations knowledge bases documents patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentPatchCall), [*locations knowledge bases documents reload*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentReloadCall), [*locations knowledge bases get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseGetCall), [*locations knowledge bases list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseListCall), [*locations knowledge bases patch*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBasePatchCall), [*locations list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationOperationListCall), [*locations set agent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectLocationSetAgentCall), [*operations cancel*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectOperationListCall) and [*set agent*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/api::ProjectSetAgentCall) + * [*agent entity types batch delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeBatchDeleteCall), [*agent entity types batch update*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeBatchUpdateCall), [*agent entity types create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeCreateCall), [*agent entity types delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeDeleteCall), [*agent entity types entities batch create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeEntityBatchCreateCall), [*agent entity types entities batch delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeEntityBatchDeleteCall), [*agent entity types entities batch update*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeEntityBatchUpdateCall), [*agent entity types get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeGetCall), [*agent entity types list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypeListCall), [*agent entity types patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEntityTypePatchCall), [*agent environments create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentCreateCall), [*agent environments delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentDeleteCall), [*agent environments get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentGetCall), [*agent environments get history*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentGetHistoryCall), [*agent environments intents list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentIntentListCall), [*agent environments list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentListCall), [*agent environments patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentPatchCall), [*agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextCreateCall), [*agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextDeleteCall), [*agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextGetCall), [*agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextListCall), [*agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionContextPatchCall), [*agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionDeleteContextCall), [*agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionDetectIntentCall), [*agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeCreateCall), [*agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeDeleteCall), [*agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeGetCall), [*agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypeListCall), [*agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentEnvironmentUserSessionEntityTypePatchCall), [*agent export*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentExportCall), [*agent get fulfillment*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentGetFulfillmentCall), [*agent get validation result*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentGetValidationResultCall), [*agent import*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentImportCall), [*agent intents batch delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentIntentBatchDeleteCall), [*agent intents batch update*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentIntentBatchUpdateCall), [*agent intents create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentIntentCreateCall), [*agent intents delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentIntentDeleteCall), [*agent intents get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentIntentGetCall), [*agent intents list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentIntentListCall), [*agent intents patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentIntentPatchCall), [*agent knowledge bases create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseCreateCall), [*agent knowledge bases delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDeleteCall), [*agent knowledge bases documents create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentCreateCall), [*agent knowledge bases documents delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentDeleteCall), [*agent knowledge bases documents get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentGetCall), [*agent knowledge bases documents list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentListCall), [*agent knowledge bases documents patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentPatchCall), [*agent knowledge bases documents reload*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseDocumentReloadCall), [*agent knowledge bases get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseGetCall), [*agent knowledge bases list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBaseListCall), [*agent knowledge bases patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentKnowledgeBasePatchCall), [*agent restore*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentRestoreCall), [*agent search*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSearchCall), [*agent sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionContextCreateCall), [*agent sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionContextDeleteCall), [*agent sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionContextGetCall), [*agent sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionContextListCall), [*agent sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionContextPatchCall), [*agent sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionDeleteContextCall), [*agent sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionDetectIntentCall), [*agent sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeCreateCall), [*agent sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeDeleteCall), [*agent sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeGetCall), [*agent sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypeListCall), [*agent sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentSessionEntityTypePatchCall), [*agent train*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentTrainCall), [*agent update fulfillment*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentUpdateFulfillmentCall), [*agent versions create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentVersionCreateCall), [*agent versions delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentVersionDeleteCall), [*agent versions get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentVersionGetCall), [*agent versions list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentVersionListCall), [*agent versions patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAgentVersionPatchCall), [*answer records list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAnswerRecordListCall), [*answer records patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectAnswerRecordPatchCall), [*conversation datasets get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationDatasetGetCall), [*conversation datasets import conversation data*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationDatasetImportConversationDataCall), [*conversation datasets list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationDatasetListCall), [*conversation models create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelCreateCall), [*conversation models delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelDeleteCall), [*conversation models deploy*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelDeployCall), [*conversation models evaluations get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelEvaluationGetCall), [*conversation models evaluations list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelEvaluationListCall), [*conversation models get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelGetCall), [*conversation models list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelListCall), [*conversation models undeploy*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationModelUndeployCall), [*conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationProfileClearSuggestionFeatureConfigCall), [*conversation profiles create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationProfileCreateCall), [*conversation profiles delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationProfileDeleteCall), [*conversation profiles get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationProfileGetCall), [*conversation profiles list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationProfileListCall), [*conversation profiles patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationProfilePatchCall), [*conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationProfileSetSuggestionFeatureConfigCall), [*conversations complete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationCompleteCall), [*conversations create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationCreateCall), [*conversations get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationGetCall), [*conversations list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationListCall), [*conversations messages list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationMessageListCall), [*conversations participants analyze content*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantAnalyzeContentCall), [*conversations participants create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantCreateCall), [*conversations participants get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantGetCall), [*conversations participants list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantListCall), [*conversations participants patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantPatchCall), [*conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantSuggestionSuggestArticleCall), [*conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantSuggestionSuggestFaqAnswerCall), [*conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationParticipantSuggestionSuggestSmartReplyCall), [*conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectConversationSuggestionSuggestConversationSummaryCall), [*delete agent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectDeleteAgentCall), [*get agent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectGetAgentCall), [*knowledge bases create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseCreateCall), [*knowledge bases delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDeleteCall), [*knowledge bases documents create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentCreateCall), [*knowledge bases documents delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentDeleteCall), [*knowledge bases documents export*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentExportCall), [*knowledge bases documents get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentGetCall), [*knowledge bases documents import*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentImportCall), [*knowledge bases documents list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentListCall), [*knowledge bases documents patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentPatchCall), [*knowledge bases documents reload*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseDocumentReloadCall), [*knowledge bases get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseGetCall), [*knowledge bases list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBaseListCall), [*knowledge bases patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectKnowledgeBasePatchCall), [*locations agent entity types batch delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeBatchDeleteCall), [*locations agent entity types batch update*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeBatchUpdateCall), [*locations agent entity types create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeCreateCall), [*locations agent entity types delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeDeleteCall), [*locations agent entity types entities batch create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeEntityBatchCreateCall), [*locations agent entity types entities batch delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeEntityBatchDeleteCall), [*locations agent entity types entities batch update*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeEntityBatchUpdateCall), [*locations agent entity types get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeGetCall), [*locations agent entity types list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypeListCall), [*locations agent entity types patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEntityTypePatchCall), [*locations agent environments create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentCreateCall), [*locations agent environments delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentDeleteCall), [*locations agent environments get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentGetCall), [*locations agent environments get history*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentGetHistoryCall), [*locations agent environments intents list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentIntentListCall), [*locations agent environments list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentListCall), [*locations agent environments patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentPatchCall), [*locations agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextCreateCall), [*locations agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextDeleteCall), [*locations agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextGetCall), [*locations agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextListCall), [*locations agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionContextPatchCall), [*locations agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionDeleteContextCall), [*locations agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionDetectIntentCall), [*locations agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeCreateCall), [*locations agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeDeleteCall), [*locations agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeGetCall), [*locations agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeListCall), [*locations agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentEnvironmentUserSessionEntityTypePatchCall), [*locations agent export*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentExportCall), [*locations agent get fulfillment*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentGetFulfillmentCall), [*locations agent get validation result*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentGetValidationResultCall), [*locations agent import*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentImportCall), [*locations agent intents batch delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentIntentBatchDeleteCall), [*locations agent intents batch update*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentIntentBatchUpdateCall), [*locations agent intents create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentIntentCreateCall), [*locations agent intents delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentIntentDeleteCall), [*locations agent intents get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentIntentGetCall), [*locations agent intents list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentIntentListCall), [*locations agent intents patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentIntentPatchCall), [*locations agent restore*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentRestoreCall), [*locations agent search*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSearchCall), [*locations agent sessions contexts create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextCreateCall), [*locations agent sessions contexts delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextDeleteCall), [*locations agent sessions contexts get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextGetCall), [*locations agent sessions contexts list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextListCall), [*locations agent sessions contexts patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionContextPatchCall), [*locations agent sessions delete contexts*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionDeleteContextCall), [*locations agent sessions detect intent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionDetectIntentCall), [*locations agent sessions entity types create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeCreateCall), [*locations agent sessions entity types delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeDeleteCall), [*locations agent sessions entity types get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeGetCall), [*locations agent sessions entity types list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypeListCall), [*locations agent sessions entity types patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentSessionEntityTypePatchCall), [*locations agent train*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentTrainCall), [*locations agent update fulfillment*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentUpdateFulfillmentCall), [*locations agent versions create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentVersionCreateCall), [*locations agent versions delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentVersionDeleteCall), [*locations agent versions get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentVersionGetCall), [*locations agent versions list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentVersionListCall), [*locations agent versions patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAgentVersionPatchCall), [*locations answer records list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAnswerRecordListCall), [*locations answer records patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationAnswerRecordPatchCall), [*locations conversation datasets create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetCreateCall), [*locations conversation datasets delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetDeleteCall), [*locations conversation datasets get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetGetCall), [*locations conversation datasets import conversation data*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetImportConversationDataCall), [*locations conversation datasets list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationDatasetListCall), [*locations conversation models create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelCreateCall), [*locations conversation models delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelDeleteCall), [*locations conversation models deploy*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelDeployCall), [*locations conversation models evaluations create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelEvaluationCreateCall), [*locations conversation models evaluations get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelEvaluationGetCall), [*locations conversation models evaluations list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelEvaluationListCall), [*locations conversation models get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelGetCall), [*locations conversation models list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelListCall), [*locations conversation models undeploy*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationModelUndeployCall), [*locations conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationProfileClearSuggestionFeatureConfigCall), [*locations conversation profiles create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationProfileCreateCall), [*locations conversation profiles delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationProfileDeleteCall), [*locations conversation profiles get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationProfileGetCall), [*locations conversation profiles list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationProfileListCall), [*locations conversation profiles patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationProfilePatchCall), [*locations conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationProfileSetSuggestionFeatureConfigCall), [*locations conversations complete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationCompleteCall), [*locations conversations create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationCreateCall), [*locations conversations get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationGetCall), [*locations conversations list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationListCall), [*locations conversations messages list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationMessageListCall), [*locations conversations participants analyze content*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantAnalyzeContentCall), [*locations conversations participants create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantCreateCall), [*locations conversations participants get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantGetCall), [*locations conversations participants list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantListCall), [*locations conversations participants patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantPatchCall), [*locations conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantSuggestionSuggestArticleCall), [*locations conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantSuggestionSuggestFaqAnswerCall), [*locations conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationParticipantSuggestionSuggestSmartReplyCall), [*locations conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationConversationSuggestionSuggestConversationSummaryCall), [*locations delete agent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationDeleteAgentCall), [*locations get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationGetCall), [*locations get agent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationGetAgentCall), [*locations knowledge bases create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseCreateCall), [*locations knowledge bases delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDeleteCall), [*locations knowledge bases documents create*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentCreateCall), [*locations knowledge bases documents delete*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentDeleteCall), [*locations knowledge bases documents export*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentExportCall), [*locations knowledge bases documents get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentGetCall), [*locations knowledge bases documents import*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentImportCall), [*locations knowledge bases documents list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentListCall), [*locations knowledge bases documents patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentPatchCall), [*locations knowledge bases documents reload*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseDocumentReloadCall), [*locations knowledge bases get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseGetCall), [*locations knowledge bases list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBaseListCall), [*locations knowledge bases patch*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationKnowledgeBasePatchCall), [*locations list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationOperationListCall), [*locations set agent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectLocationSetAgentCall), [*operations cancel*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectOperationListCall) and [*set agent*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/api::ProjectSetAgentCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/Dialogflow)** +* **[Hub](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/Dialogflow)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::CallBuilder) -* **[Resources](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::CallBuilder) +* **[Resources](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Part)** + * **[Parts](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -181,17 +181,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -201,29 +201,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Delegate) to the -[Method Builder](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Delegate) to the +[Method Builder](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::RequestValue) and -[decodable](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::RequestValue) and +[decodable](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dialogflow2/5.0.2-beta-1+20230110/google_dialogflow2/client::RequestValue) are moved +* [request values](https://docs.rs/google-dialogflow2/5.0.2+20230110/google_dialogflow2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dialogflow2/src/api.rs b/gen/dialogflow2/src/api.rs index 142caba606..e0b88c735a 100644 --- a/gen/dialogflow2/src/api.rs +++ b/gen/dialogflow2/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> Dialogflow { Dialogflow { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dialogflow.googleapis.com/".to_string(), _root_url: "https://dialogflow.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> Dialogflow { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dialogflow2/src/client.rs b/gen/dialogflow2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dialogflow2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dialogflow2/src/lib.rs b/gen/dialogflow2/src/lib.rs index e9f61f6777..7b4de0d266 100644 --- a/gen/dialogflow2/src/lib.rs +++ b/gen/dialogflow2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dialogflow* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *dialogflow:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dialogflow* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *dialogflow:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dialogflow* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/dialogflow/). diff --git a/gen/dialogflow2_beta1-cli/Cargo.toml b/gen/dialogflow2_beta1-cli/Cargo.toml index 86478859a2..6520be36f3 100644 --- a/gen/dialogflow2_beta1-cli/Cargo.toml +++ b/gen/dialogflow2_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dialogflow2_beta1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dialogflow (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow2_beta1-cli" @@ -20,13 +20,13 @@ name = "dialogflow2-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dialogflow2_beta1] path = "../dialogflow2_beta1" -version = "4.0.1+20220228" +version = "5.0.2+20230110" + diff --git a/gen/dialogflow2_beta1-cli/README.md b/gen/dialogflow2_beta1-cli/README.md index 06890f3c8a..0fc3cb11c5 100644 --- a/gen/dialogflow2_beta1-cli/README.md +++ b/gen/dialogflow2_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dialogflow* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dialogflow* API at revision *20230110*. The CLI is at version *5.0.2*. ```bash dialogflow2-beta1 [options] @@ -128,6 +128,7 @@ dialogflow2-beta1 [options] conversations-participants-suggestions-suggest-articles (-r )... [-p ]... [-o ] conversations-participants-suggestions-suggest-faq-answers (-r )... [-p ]... [-o ] conversations-participants-suggestions-suggest-smart-replies (-r )... [-p ]... [-o ] + conversations-suggestions-suggest-conversation-summary (-r )... [-p ]... [-o ] delete-agent [-p ]... [-o ] get-agent [-p ]... [-o ] knowledge-bases-create (-r )... [-p ]... [-o ] @@ -227,6 +228,7 @@ dialogflow2-beta1 [options] locations-conversations-participants-suggestions-suggest-articles (-r )... [-p ]... [-o ] locations-conversations-participants-suggestions-suggest-faq-answers (-r )... [-p ]... [-o ] locations-conversations-participants-suggestions-suggest-smart-replies (-r )... [-p ]... [-o ] + locations-conversations-suggestions-suggest-conversation-summary (-r )... [-p ]... [-o ] locations-delete-agent [-p ]... [-o ] locations-get [-p ]... [-o ] locations-get-agent [-p ]... [-o ] diff --git a/gen/dialogflow2_beta1-cli/mkdocs.yml b/gen/dialogflow2_beta1-cli/mkdocs.yml index d876dddbb8..4d7fece0ab 100644 --- a/gen/dialogflow2_beta1-cli/mkdocs.yml +++ b/gen/dialogflow2_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dialogflow v4.0.1+20220228 +site_name: Dialogflow v5.0.2+20230110 site_url: http://byron.github.io/google-apis-rs/google-dialogflow2_beta1-cli site_description: A complete library to interact with Dialogflow (protocol v2beta1) @@ -7,229 +7,232 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow2_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_agent-entity-types-batch-delete.md', 'Projects', 'Agent Entity Types Batch Delete'] -- ['projects_agent-entity-types-batch-update.md', 'Projects', 'Agent Entity Types Batch Update'] -- ['projects_agent-entity-types-create.md', 'Projects', 'Agent Entity Types Create'] -- ['projects_agent-entity-types-delete.md', 'Projects', 'Agent Entity Types Delete'] -- ['projects_agent-entity-types-entities-batch-create.md', 'Projects', 'Agent Entity Types Entities Batch Create'] -- ['projects_agent-entity-types-entities-batch-delete.md', 'Projects', 'Agent Entity Types Entities Batch Delete'] -- ['projects_agent-entity-types-entities-batch-update.md', 'Projects', 'Agent Entity Types Entities Batch Update'] -- ['projects_agent-entity-types-get.md', 'Projects', 'Agent Entity Types Get'] -- ['projects_agent-entity-types-list.md', 'Projects', 'Agent Entity Types List'] -- ['projects_agent-entity-types-patch.md', 'Projects', 'Agent Entity Types Patch'] -- ['projects_agent-environments-create.md', 'Projects', 'Agent Environments Create'] -- ['projects_agent-environments-delete.md', 'Projects', 'Agent Environments Delete'] -- ['projects_agent-environments-get.md', 'Projects', 'Agent Environments Get'] -- ['projects_agent-environments-get-history.md', 'Projects', 'Agent Environments Get History'] -- ['projects_agent-environments-intents-list.md', 'Projects', 'Agent Environments Intents List'] -- ['projects_agent-environments-list.md', 'Projects', 'Agent Environments List'] -- ['projects_agent-environments-patch.md', 'Projects', 'Agent Environments Patch'] -- ['projects_agent-environments-users-sessions-contexts-create.md', 'Projects', 'Agent Environments Users Sessions Contexts Create'] -- ['projects_agent-environments-users-sessions-contexts-delete.md', 'Projects', 'Agent Environments Users Sessions Contexts Delete'] -- ['projects_agent-environments-users-sessions-contexts-get.md', 'Projects', 'Agent Environments Users Sessions Contexts Get'] -- ['projects_agent-environments-users-sessions-contexts-list.md', 'Projects', 'Agent Environments Users Sessions Contexts List'] -- ['projects_agent-environments-users-sessions-contexts-patch.md', 'Projects', 'Agent Environments Users Sessions Contexts Patch'] -- ['projects_agent-environments-users-sessions-delete-contexts.md', 'Projects', 'Agent Environments Users Sessions Delete Contexts'] -- ['projects_agent-environments-users-sessions-detect-intent.md', 'Projects', 'Agent Environments Users Sessions Detect Intent'] -- ['projects_agent-environments-users-sessions-entity-types-create.md', 'Projects', 'Agent Environments Users Sessions Entity Types Create'] -- ['projects_agent-environments-users-sessions-entity-types-delete.md', 'Projects', 'Agent Environments Users Sessions Entity Types Delete'] -- ['projects_agent-environments-users-sessions-entity-types-get.md', 'Projects', 'Agent Environments Users Sessions Entity Types Get'] -- ['projects_agent-environments-users-sessions-entity-types-list.md', 'Projects', 'Agent Environments Users Sessions Entity Types List'] -- ['projects_agent-environments-users-sessions-entity-types-patch.md', 'Projects', 'Agent Environments Users Sessions Entity Types Patch'] -- ['projects_agent-export.md', 'Projects', 'Agent Export'] -- ['projects_agent-get-fulfillment.md', 'Projects', 'Agent Get Fulfillment'] -- ['projects_agent-get-validation-result.md', 'Projects', 'Agent Get Validation Result'] -- ['projects_agent-import.md', 'Projects', 'Agent Import'] -- ['projects_agent-intents-batch-delete.md', 'Projects', 'Agent Intents Batch Delete'] -- ['projects_agent-intents-batch-update.md', 'Projects', 'Agent Intents Batch Update'] -- ['projects_agent-intents-create.md', 'Projects', 'Agent Intents Create'] -- ['projects_agent-intents-delete.md', 'Projects', 'Agent Intents Delete'] -- ['projects_agent-intents-get.md', 'Projects', 'Agent Intents Get'] -- ['projects_agent-intents-list.md', 'Projects', 'Agent Intents List'] -- ['projects_agent-intents-patch.md', 'Projects', 'Agent Intents Patch'] -- ['projects_agent-knowledge-bases-create.md', 'Projects', 'Agent Knowledge Bases Create'] -- ['projects_agent-knowledge-bases-delete.md', 'Projects', 'Agent Knowledge Bases Delete'] -- ['projects_agent-knowledge-bases-documents-create.md', 'Projects', 'Agent Knowledge Bases Documents Create'] -- ['projects_agent-knowledge-bases-documents-delete.md', 'Projects', 'Agent Knowledge Bases Documents Delete'] -- ['projects_agent-knowledge-bases-documents-get.md', 'Projects', 'Agent Knowledge Bases Documents Get'] -- ['projects_agent-knowledge-bases-documents-list.md', 'Projects', 'Agent Knowledge Bases Documents List'] -- ['projects_agent-knowledge-bases-documents-patch.md', 'Projects', 'Agent Knowledge Bases Documents Patch'] -- ['projects_agent-knowledge-bases-documents-reload.md', 'Projects', 'Agent Knowledge Bases Documents Reload'] -- ['projects_agent-knowledge-bases-get.md', 'Projects', 'Agent Knowledge Bases Get'] -- ['projects_agent-knowledge-bases-list.md', 'Projects', 'Agent Knowledge Bases List'] -- ['projects_agent-knowledge-bases-patch.md', 'Projects', 'Agent Knowledge Bases Patch'] -- ['projects_agent-restore.md', 'Projects', 'Agent Restore'] -- ['projects_agent-search.md', 'Projects', 'Agent Search'] -- ['projects_agent-sessions-contexts-create.md', 'Projects', 'Agent Sessions Contexts Create'] -- ['projects_agent-sessions-contexts-delete.md', 'Projects', 'Agent Sessions Contexts Delete'] -- ['projects_agent-sessions-contexts-get.md', 'Projects', 'Agent Sessions Contexts Get'] -- ['projects_agent-sessions-contexts-list.md', 'Projects', 'Agent Sessions Contexts List'] -- ['projects_agent-sessions-contexts-patch.md', 'Projects', 'Agent Sessions Contexts Patch'] -- ['projects_agent-sessions-delete-contexts.md', 'Projects', 'Agent Sessions Delete Contexts'] -- ['projects_agent-sessions-detect-intent.md', 'Projects', 'Agent Sessions Detect Intent'] -- ['projects_agent-sessions-entity-types-create.md', 'Projects', 'Agent Sessions Entity Types Create'] -- ['projects_agent-sessions-entity-types-delete.md', 'Projects', 'Agent Sessions Entity Types Delete'] -- ['projects_agent-sessions-entity-types-get.md', 'Projects', 'Agent Sessions Entity Types Get'] -- ['projects_agent-sessions-entity-types-list.md', 'Projects', 'Agent Sessions Entity Types List'] -- ['projects_agent-sessions-entity-types-patch.md', 'Projects', 'Agent Sessions Entity Types Patch'] -- ['projects_agent-train.md', 'Projects', 'Agent Train'] -- ['projects_agent-update-fulfillment.md', 'Projects', 'Agent Update Fulfillment'] -- ['projects_agent-versions-create.md', 'Projects', 'Agent Versions Create'] -- ['projects_agent-versions-delete.md', 'Projects', 'Agent Versions Delete'] -- ['projects_agent-versions-get.md', 'Projects', 'Agent Versions Get'] -- ['projects_agent-versions-list.md', 'Projects', 'Agent Versions List'] -- ['projects_agent-versions-patch.md', 'Projects', 'Agent Versions Patch'] -- ['projects_answer-records-get.md', 'Projects', 'Answer Records Get'] -- ['projects_answer-records-list.md', 'Projects', 'Answer Records List'] -- ['projects_answer-records-patch.md', 'Projects', 'Answer Records Patch'] -- ['projects_conversation-profiles-clear-suggestion-feature-config.md', 'Projects', 'Conversation Profiles Clear Suggestion Feature Config'] -- ['projects_conversation-profiles-create.md', 'Projects', 'Conversation Profiles Create'] -- ['projects_conversation-profiles-delete.md', 'Projects', 'Conversation Profiles Delete'] -- ['projects_conversation-profiles-get.md', 'Projects', 'Conversation Profiles Get'] -- ['projects_conversation-profiles-list.md', 'Projects', 'Conversation Profiles List'] -- ['projects_conversation-profiles-patch.md', 'Projects', 'Conversation Profiles Patch'] -- ['projects_conversation-profiles-set-suggestion-feature-config.md', 'Projects', 'Conversation Profiles Set Suggestion Feature Config'] -- ['projects_conversations-complete.md', 'Projects', 'Conversations Complete'] -- ['projects_conversations-create.md', 'Projects', 'Conversations Create'] -- ['projects_conversations-get.md', 'Projects', 'Conversations Get'] -- ['projects_conversations-list.md', 'Projects', 'Conversations List'] -- ['projects_conversations-messages-batch-create.md', 'Projects', 'Conversations Messages Batch Create'] -- ['projects_conversations-messages-list.md', 'Projects', 'Conversations Messages List'] -- ['projects_conversations-participants-analyze-content.md', 'Projects', 'Conversations Participants Analyze Content'] -- ['projects_conversations-participants-create.md', 'Projects', 'Conversations Participants Create'] -- ['projects_conversations-participants-get.md', 'Projects', 'Conversations Participants Get'] -- ['projects_conversations-participants-list.md', 'Projects', 'Conversations Participants List'] -- ['projects_conversations-participants-patch.md', 'Projects', 'Conversations Participants Patch'] -- ['projects_conversations-participants-suggestions-compile.md', 'Projects', 'Conversations Participants Suggestions Compile'] -- ['projects_conversations-participants-suggestions-list.md', 'Projects', 'Conversations Participants Suggestions List'] -- ['projects_conversations-participants-suggestions-suggest-articles.md', 'Projects', 'Conversations Participants Suggestions Suggest Articles'] -- ['projects_conversations-participants-suggestions-suggest-faq-answers.md', 'Projects', 'Conversations Participants Suggestions Suggest Faq Answers'] -- ['projects_conversations-participants-suggestions-suggest-smart-replies.md', 'Projects', 'Conversations Participants Suggestions Suggest Smart Replies'] -- ['projects_delete-agent.md', 'Projects', 'Delete Agent'] -- ['projects_get-agent.md', 'Projects', 'Get Agent'] -- ['projects_knowledge-bases-create.md', 'Projects', 'Knowledge Bases Create'] -- ['projects_knowledge-bases-delete.md', 'Projects', 'Knowledge Bases Delete'] -- ['projects_knowledge-bases-documents-create.md', 'Projects', 'Knowledge Bases Documents Create'] -- ['projects_knowledge-bases-documents-delete.md', 'Projects', 'Knowledge Bases Documents Delete'] -- ['projects_knowledge-bases-documents-get.md', 'Projects', 'Knowledge Bases Documents Get'] -- ['projects_knowledge-bases-documents-import.md', 'Projects', 'Knowledge Bases Documents Import'] -- ['projects_knowledge-bases-documents-list.md', 'Projects', 'Knowledge Bases Documents List'] -- ['projects_knowledge-bases-documents-patch.md', 'Projects', 'Knowledge Bases Documents Patch'] -- ['projects_knowledge-bases-documents-reload.md', 'Projects', 'Knowledge Bases Documents Reload'] -- ['projects_knowledge-bases-get.md', 'Projects', 'Knowledge Bases Get'] -- ['projects_knowledge-bases-list.md', 'Projects', 'Knowledge Bases List'] -- ['projects_knowledge-bases-patch.md', 'Projects', 'Knowledge Bases Patch'] -- ['projects_locations-agent-entity-types-batch-delete.md', 'Projects', 'Locations Agent Entity Types Batch Delete'] -- ['projects_locations-agent-entity-types-batch-update.md', 'Projects', 'Locations Agent Entity Types Batch Update'] -- ['projects_locations-agent-entity-types-create.md', 'Projects', 'Locations Agent Entity Types Create'] -- ['projects_locations-agent-entity-types-delete.md', 'Projects', 'Locations Agent Entity Types Delete'] -- ['projects_locations-agent-entity-types-entities-batch-create.md', 'Projects', 'Locations Agent Entity Types Entities Batch Create'] -- ['projects_locations-agent-entity-types-entities-batch-delete.md', 'Projects', 'Locations Agent Entity Types Entities Batch Delete'] -- ['projects_locations-agent-entity-types-entities-batch-update.md', 'Projects', 'Locations Agent Entity Types Entities Batch Update'] -- ['projects_locations-agent-entity-types-get.md', 'Projects', 'Locations Agent Entity Types Get'] -- ['projects_locations-agent-entity-types-list.md', 'Projects', 'Locations Agent Entity Types List'] -- ['projects_locations-agent-entity-types-patch.md', 'Projects', 'Locations Agent Entity Types Patch'] -- ['projects_locations-agent-environments-create.md', 'Projects', 'Locations Agent Environments Create'] -- ['projects_locations-agent-environments-delete.md', 'Projects', 'Locations Agent Environments Delete'] -- ['projects_locations-agent-environments-get.md', 'Projects', 'Locations Agent Environments Get'] -- ['projects_locations-agent-environments-get-history.md', 'Projects', 'Locations Agent Environments Get History'] -- ['projects_locations-agent-environments-intents-list.md', 'Projects', 'Locations Agent Environments Intents List'] -- ['projects_locations-agent-environments-list.md', 'Projects', 'Locations Agent Environments List'] -- ['projects_locations-agent-environments-patch.md', 'Projects', 'Locations Agent Environments Patch'] -- ['projects_locations-agent-environments-users-sessions-contexts-create.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Create'] -- ['projects_locations-agent-environments-users-sessions-contexts-delete.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Delete'] -- ['projects_locations-agent-environments-users-sessions-contexts-get.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Get'] -- ['projects_locations-agent-environments-users-sessions-contexts-list.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts List'] -- ['projects_locations-agent-environments-users-sessions-contexts-patch.md', 'Projects', 'Locations Agent Environments Users Sessions Contexts Patch'] -- ['projects_locations-agent-environments-users-sessions-delete-contexts.md', 'Projects', 'Locations Agent Environments Users Sessions Delete Contexts'] -- ['projects_locations-agent-environments-users-sessions-detect-intent.md', 'Projects', 'Locations Agent Environments Users Sessions Detect Intent'] -- ['projects_locations-agent-environments-users-sessions-entity-types-create.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Create'] -- ['projects_locations-agent-environments-users-sessions-entity-types-delete.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Delete'] -- ['projects_locations-agent-environments-users-sessions-entity-types-get.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Get'] -- ['projects_locations-agent-environments-users-sessions-entity-types-list.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types List'] -- ['projects_locations-agent-environments-users-sessions-entity-types-patch.md', 'Projects', 'Locations Agent Environments Users Sessions Entity Types Patch'] -- ['projects_locations-agent-export.md', 'Projects', 'Locations Agent Export'] -- ['projects_locations-agent-get-fulfillment.md', 'Projects', 'Locations Agent Get Fulfillment'] -- ['projects_locations-agent-get-validation-result.md', 'Projects', 'Locations Agent Get Validation Result'] -- ['projects_locations-agent-import.md', 'Projects', 'Locations Agent Import'] -- ['projects_locations-agent-intents-batch-delete.md', 'Projects', 'Locations Agent Intents Batch Delete'] -- ['projects_locations-agent-intents-batch-update.md', 'Projects', 'Locations Agent Intents Batch Update'] -- ['projects_locations-agent-intents-create.md', 'Projects', 'Locations Agent Intents Create'] -- ['projects_locations-agent-intents-delete.md', 'Projects', 'Locations Agent Intents Delete'] -- ['projects_locations-agent-intents-get.md', 'Projects', 'Locations Agent Intents Get'] -- ['projects_locations-agent-intents-list.md', 'Projects', 'Locations Agent Intents List'] -- ['projects_locations-agent-intents-patch.md', 'Projects', 'Locations Agent Intents Patch'] -- ['projects_locations-agent-restore.md', 'Projects', 'Locations Agent Restore'] -- ['projects_locations-agent-search.md', 'Projects', 'Locations Agent Search'] -- ['projects_locations-agent-sessions-contexts-create.md', 'Projects', 'Locations Agent Sessions Contexts Create'] -- ['projects_locations-agent-sessions-contexts-delete.md', 'Projects', 'Locations Agent Sessions Contexts Delete'] -- ['projects_locations-agent-sessions-contexts-get.md', 'Projects', 'Locations Agent Sessions Contexts Get'] -- ['projects_locations-agent-sessions-contexts-list.md', 'Projects', 'Locations Agent Sessions Contexts List'] -- ['projects_locations-agent-sessions-contexts-patch.md', 'Projects', 'Locations Agent Sessions Contexts Patch'] -- ['projects_locations-agent-sessions-delete-contexts.md', 'Projects', 'Locations Agent Sessions Delete Contexts'] -- ['projects_locations-agent-sessions-detect-intent.md', 'Projects', 'Locations Agent Sessions Detect Intent'] -- ['projects_locations-agent-sessions-entity-types-create.md', 'Projects', 'Locations Agent Sessions Entity Types Create'] -- ['projects_locations-agent-sessions-entity-types-delete.md', 'Projects', 'Locations Agent Sessions Entity Types Delete'] -- ['projects_locations-agent-sessions-entity-types-get.md', 'Projects', 'Locations Agent Sessions Entity Types Get'] -- ['projects_locations-agent-sessions-entity-types-list.md', 'Projects', 'Locations Agent Sessions Entity Types List'] -- ['projects_locations-agent-sessions-entity-types-patch.md', 'Projects', 'Locations Agent Sessions Entity Types Patch'] -- ['projects_locations-agent-train.md', 'Projects', 'Locations Agent Train'] -- ['projects_locations-agent-update-fulfillment.md', 'Projects', 'Locations Agent Update Fulfillment'] -- ['projects_locations-agent-versions-create.md', 'Projects', 'Locations Agent Versions Create'] -- ['projects_locations-agent-versions-delete.md', 'Projects', 'Locations Agent Versions Delete'] -- ['projects_locations-agent-versions-get.md', 'Projects', 'Locations Agent Versions Get'] -- ['projects_locations-agent-versions-list.md', 'Projects', 'Locations Agent Versions List'] -- ['projects_locations-agent-versions-patch.md', 'Projects', 'Locations Agent Versions Patch'] -- ['projects_locations-answer-records-get.md', 'Projects', 'Locations Answer Records Get'] -- ['projects_locations-answer-records-list.md', 'Projects', 'Locations Answer Records List'] -- ['projects_locations-answer-records-patch.md', 'Projects', 'Locations Answer Records Patch'] -- ['projects_locations-conversation-profiles-clear-suggestion-feature-config.md', 'Projects', 'Locations Conversation Profiles Clear Suggestion Feature Config'] -- ['projects_locations-conversation-profiles-create.md', 'Projects', 'Locations Conversation Profiles Create'] -- ['projects_locations-conversation-profiles-delete.md', 'Projects', 'Locations Conversation Profiles Delete'] -- ['projects_locations-conversation-profiles-get.md', 'Projects', 'Locations Conversation Profiles Get'] -- ['projects_locations-conversation-profiles-list.md', 'Projects', 'Locations Conversation Profiles List'] -- ['projects_locations-conversation-profiles-patch.md', 'Projects', 'Locations Conversation Profiles Patch'] -- ['projects_locations-conversation-profiles-set-suggestion-feature-config.md', 'Projects', 'Locations Conversation Profiles Set Suggestion Feature Config'] -- ['projects_locations-conversations-complete.md', 'Projects', 'Locations Conversations Complete'] -- ['projects_locations-conversations-create.md', 'Projects', 'Locations Conversations Create'] -- ['projects_locations-conversations-get.md', 'Projects', 'Locations Conversations Get'] -- ['projects_locations-conversations-list.md', 'Projects', 'Locations Conversations List'] -- ['projects_locations-conversations-messages-batch-create.md', 'Projects', 'Locations Conversations Messages Batch Create'] -- ['projects_locations-conversations-messages-list.md', 'Projects', 'Locations Conversations Messages List'] -- ['projects_locations-conversations-participants-analyze-content.md', 'Projects', 'Locations Conversations Participants Analyze Content'] -- ['projects_locations-conversations-participants-create.md', 'Projects', 'Locations Conversations Participants Create'] -- ['projects_locations-conversations-participants-get.md', 'Projects', 'Locations Conversations Participants Get'] -- ['projects_locations-conversations-participants-list.md', 'Projects', 'Locations Conversations Participants List'] -- ['projects_locations-conversations-participants-patch.md', 'Projects', 'Locations Conversations Participants Patch'] -- ['projects_locations-conversations-participants-suggestions-suggest-articles.md', 'Projects', 'Locations Conversations Participants Suggestions Suggest Articles'] -- ['projects_locations-conversations-participants-suggestions-suggest-faq-answers.md', 'Projects', 'Locations Conversations Participants Suggestions Suggest Faq Answers'] -- ['projects_locations-conversations-participants-suggestions-suggest-smart-replies.md', 'Projects', 'Locations Conversations Participants Suggestions Suggest Smart Replies'] -- ['projects_locations-delete-agent.md', 'Projects', 'Locations Delete Agent'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-get-agent.md', 'Projects', 'Locations Get Agent'] -- ['projects_locations-knowledge-bases-create.md', 'Projects', 'Locations Knowledge Bases Create'] -- ['projects_locations-knowledge-bases-delete.md', 'Projects', 'Locations Knowledge Bases Delete'] -- ['projects_locations-knowledge-bases-documents-create.md', 'Projects', 'Locations Knowledge Bases Documents Create'] -- ['projects_locations-knowledge-bases-documents-delete.md', 'Projects', 'Locations Knowledge Bases Documents Delete'] -- ['projects_locations-knowledge-bases-documents-get.md', 'Projects', 'Locations Knowledge Bases Documents Get'] -- ['projects_locations-knowledge-bases-documents-import.md', 'Projects', 'Locations Knowledge Bases Documents Import'] -- ['projects_locations-knowledge-bases-documents-list.md', 'Projects', 'Locations Knowledge Bases Documents List'] -- ['projects_locations-knowledge-bases-documents-patch.md', 'Projects', 'Locations Knowledge Bases Documents Patch'] -- ['projects_locations-knowledge-bases-documents-reload.md', 'Projects', 'Locations Knowledge Bases Documents Reload'] -- ['projects_locations-knowledge-bases-get.md', 'Projects', 'Locations Knowledge Bases Get'] -- ['projects_locations-knowledge-bases-list.md', 'Projects', 'Locations Knowledge Bases List'] -- ['projects_locations-knowledge-bases-patch.md', 'Projects', 'Locations Knowledge Bases Patch'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-set-agent.md', 'Projects', 'Locations Set Agent'] -- ['projects_operations-cancel.md', 'Projects', 'Operations Cancel'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] -- ['projects_set-agent.md', 'Projects', 'Set Agent'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Agent Entity Types Batch Delete': 'projects_agent-entity-types-batch-delete.md' + - 'Agent Entity Types Batch Update': 'projects_agent-entity-types-batch-update.md' + - 'Agent Entity Types Create': 'projects_agent-entity-types-create.md' + - 'Agent Entity Types Delete': 'projects_agent-entity-types-delete.md' + - 'Agent Entity Types Entities Batch Create': 'projects_agent-entity-types-entities-batch-create.md' + - 'Agent Entity Types Entities Batch Delete': 'projects_agent-entity-types-entities-batch-delete.md' + - 'Agent Entity Types Entities Batch Update': 'projects_agent-entity-types-entities-batch-update.md' + - 'Agent Entity Types Get': 'projects_agent-entity-types-get.md' + - 'Agent Entity Types List': 'projects_agent-entity-types-list.md' + - 'Agent Entity Types Patch': 'projects_agent-entity-types-patch.md' + - 'Agent Environments Create': 'projects_agent-environments-create.md' + - 'Agent Environments Delete': 'projects_agent-environments-delete.md' + - 'Agent Environments Get': 'projects_agent-environments-get.md' + - 'Agent Environments Get History': 'projects_agent-environments-get-history.md' + - 'Agent Environments Intents List': 'projects_agent-environments-intents-list.md' + - 'Agent Environments List': 'projects_agent-environments-list.md' + - 'Agent Environments Patch': 'projects_agent-environments-patch.md' + - 'Agent Environments Users Sessions Contexts Create': 'projects_agent-environments-users-sessions-contexts-create.md' + - 'Agent Environments Users Sessions Contexts Delete': 'projects_agent-environments-users-sessions-contexts-delete.md' + - 'Agent Environments Users Sessions Contexts Get': 'projects_agent-environments-users-sessions-contexts-get.md' + - 'Agent Environments Users Sessions Contexts List': 'projects_agent-environments-users-sessions-contexts-list.md' + - 'Agent Environments Users Sessions Contexts Patch': 'projects_agent-environments-users-sessions-contexts-patch.md' + - 'Agent Environments Users Sessions Delete Contexts': 'projects_agent-environments-users-sessions-delete-contexts.md' + - 'Agent Environments Users Sessions Detect Intent': 'projects_agent-environments-users-sessions-detect-intent.md' + - 'Agent Environments Users Sessions Entity Types Create': 'projects_agent-environments-users-sessions-entity-types-create.md' + - 'Agent Environments Users Sessions Entity Types Delete': 'projects_agent-environments-users-sessions-entity-types-delete.md' + - 'Agent Environments Users Sessions Entity Types Get': 'projects_agent-environments-users-sessions-entity-types-get.md' + - 'Agent Environments Users Sessions Entity Types List': 'projects_agent-environments-users-sessions-entity-types-list.md' + - 'Agent Environments Users Sessions Entity Types Patch': 'projects_agent-environments-users-sessions-entity-types-patch.md' + - 'Agent Export': 'projects_agent-export.md' + - 'Agent Get Fulfillment': 'projects_agent-get-fulfillment.md' + - 'Agent Get Validation Result': 'projects_agent-get-validation-result.md' + - 'Agent Import': 'projects_agent-import.md' + - 'Agent Intents Batch Delete': 'projects_agent-intents-batch-delete.md' + - 'Agent Intents Batch Update': 'projects_agent-intents-batch-update.md' + - 'Agent Intents Create': 'projects_agent-intents-create.md' + - 'Agent Intents Delete': 'projects_agent-intents-delete.md' + - 'Agent Intents Get': 'projects_agent-intents-get.md' + - 'Agent Intents List': 'projects_agent-intents-list.md' + - 'Agent Intents Patch': 'projects_agent-intents-patch.md' + - 'Agent Knowledge Bases Create': 'projects_agent-knowledge-bases-create.md' + - 'Agent Knowledge Bases Delete': 'projects_agent-knowledge-bases-delete.md' + - 'Agent Knowledge Bases Documents Create': 'projects_agent-knowledge-bases-documents-create.md' + - 'Agent Knowledge Bases Documents Delete': 'projects_agent-knowledge-bases-documents-delete.md' + - 'Agent Knowledge Bases Documents Get': 'projects_agent-knowledge-bases-documents-get.md' + - 'Agent Knowledge Bases Documents List': 'projects_agent-knowledge-bases-documents-list.md' + - 'Agent Knowledge Bases Documents Patch': 'projects_agent-knowledge-bases-documents-patch.md' + - 'Agent Knowledge Bases Documents Reload': 'projects_agent-knowledge-bases-documents-reload.md' + - 'Agent Knowledge Bases Get': 'projects_agent-knowledge-bases-get.md' + - 'Agent Knowledge Bases List': 'projects_agent-knowledge-bases-list.md' + - 'Agent Knowledge Bases Patch': 'projects_agent-knowledge-bases-patch.md' + - 'Agent Restore': 'projects_agent-restore.md' + - 'Agent Search': 'projects_agent-search.md' + - 'Agent Sessions Contexts Create': 'projects_agent-sessions-contexts-create.md' + - 'Agent Sessions Contexts Delete': 'projects_agent-sessions-contexts-delete.md' + - 'Agent Sessions Contexts Get': 'projects_agent-sessions-contexts-get.md' + - 'Agent Sessions Contexts List': 'projects_agent-sessions-contexts-list.md' + - 'Agent Sessions Contexts Patch': 'projects_agent-sessions-contexts-patch.md' + - 'Agent Sessions Delete Contexts': 'projects_agent-sessions-delete-contexts.md' + - 'Agent Sessions Detect Intent': 'projects_agent-sessions-detect-intent.md' + - 'Agent Sessions Entity Types Create': 'projects_agent-sessions-entity-types-create.md' + - 'Agent Sessions Entity Types Delete': 'projects_agent-sessions-entity-types-delete.md' + - 'Agent Sessions Entity Types Get': 'projects_agent-sessions-entity-types-get.md' + - 'Agent Sessions Entity Types List': 'projects_agent-sessions-entity-types-list.md' + - 'Agent Sessions Entity Types Patch': 'projects_agent-sessions-entity-types-patch.md' + - 'Agent Train': 'projects_agent-train.md' + - 'Agent Update Fulfillment': 'projects_agent-update-fulfillment.md' + - 'Agent Versions Create': 'projects_agent-versions-create.md' + - 'Agent Versions Delete': 'projects_agent-versions-delete.md' + - 'Agent Versions Get': 'projects_agent-versions-get.md' + - 'Agent Versions List': 'projects_agent-versions-list.md' + - 'Agent Versions Patch': 'projects_agent-versions-patch.md' + - 'Answer Records Get': 'projects_answer-records-get.md' + - 'Answer Records List': 'projects_answer-records-list.md' + - 'Answer Records Patch': 'projects_answer-records-patch.md' + - 'Conversation Profiles Clear Suggestion Feature Config': 'projects_conversation-profiles-clear-suggestion-feature-config.md' + - 'Conversation Profiles Create': 'projects_conversation-profiles-create.md' + - 'Conversation Profiles Delete': 'projects_conversation-profiles-delete.md' + - 'Conversation Profiles Get': 'projects_conversation-profiles-get.md' + - 'Conversation Profiles List': 'projects_conversation-profiles-list.md' + - 'Conversation Profiles Patch': 'projects_conversation-profiles-patch.md' + - 'Conversation Profiles Set Suggestion Feature Config': 'projects_conversation-profiles-set-suggestion-feature-config.md' + - 'Conversations Complete': 'projects_conversations-complete.md' + - 'Conversations Create': 'projects_conversations-create.md' + - 'Conversations Get': 'projects_conversations-get.md' + - 'Conversations List': 'projects_conversations-list.md' + - 'Conversations Messages Batch Create': 'projects_conversations-messages-batch-create.md' + - 'Conversations Messages List': 'projects_conversations-messages-list.md' + - 'Conversations Participants Analyze Content': 'projects_conversations-participants-analyze-content.md' + - 'Conversations Participants Create': 'projects_conversations-participants-create.md' + - 'Conversations Participants Get': 'projects_conversations-participants-get.md' + - 'Conversations Participants List': 'projects_conversations-participants-list.md' + - 'Conversations Participants Patch': 'projects_conversations-participants-patch.md' + - 'Conversations Participants Suggestions Compile': 'projects_conversations-participants-suggestions-compile.md' + - 'Conversations Participants Suggestions List': 'projects_conversations-participants-suggestions-list.md' + - 'Conversations Participants Suggestions Suggest Articles': 'projects_conversations-participants-suggestions-suggest-articles.md' + - 'Conversations Participants Suggestions Suggest Faq Answers': 'projects_conversations-participants-suggestions-suggest-faq-answers.md' + - 'Conversations Participants Suggestions Suggest Smart Replies': 'projects_conversations-participants-suggestions-suggest-smart-replies.md' + - 'Conversations Suggestions Suggest Conversation Summary': 'projects_conversations-suggestions-suggest-conversation-summary.md' + - 'Delete Agent': 'projects_delete-agent.md' + - 'Get Agent': 'projects_get-agent.md' + - 'Knowledge Bases Create': 'projects_knowledge-bases-create.md' + - 'Knowledge Bases Delete': 'projects_knowledge-bases-delete.md' + - 'Knowledge Bases Documents Create': 'projects_knowledge-bases-documents-create.md' + - 'Knowledge Bases Documents Delete': 'projects_knowledge-bases-documents-delete.md' + - 'Knowledge Bases Documents Get': 'projects_knowledge-bases-documents-get.md' + - 'Knowledge Bases Documents Import': 'projects_knowledge-bases-documents-import.md' + - 'Knowledge Bases Documents List': 'projects_knowledge-bases-documents-list.md' + - 'Knowledge Bases Documents Patch': 'projects_knowledge-bases-documents-patch.md' + - 'Knowledge Bases Documents Reload': 'projects_knowledge-bases-documents-reload.md' + - 'Knowledge Bases Get': 'projects_knowledge-bases-get.md' + - 'Knowledge Bases List': 'projects_knowledge-bases-list.md' + - 'Knowledge Bases Patch': 'projects_knowledge-bases-patch.md' + - 'Locations Agent Entity Types Batch Delete': 'projects_locations-agent-entity-types-batch-delete.md' + - 'Locations Agent Entity Types Batch Update': 'projects_locations-agent-entity-types-batch-update.md' + - 'Locations Agent Entity Types Create': 'projects_locations-agent-entity-types-create.md' + - 'Locations Agent Entity Types Delete': 'projects_locations-agent-entity-types-delete.md' + - 'Locations Agent Entity Types Entities Batch Create': 'projects_locations-agent-entity-types-entities-batch-create.md' + - 'Locations Agent Entity Types Entities Batch Delete': 'projects_locations-agent-entity-types-entities-batch-delete.md' + - 'Locations Agent Entity Types Entities Batch Update': 'projects_locations-agent-entity-types-entities-batch-update.md' + - 'Locations Agent Entity Types Get': 'projects_locations-agent-entity-types-get.md' + - 'Locations Agent Entity Types List': 'projects_locations-agent-entity-types-list.md' + - 'Locations Agent Entity Types Patch': 'projects_locations-agent-entity-types-patch.md' + - 'Locations Agent Environments Create': 'projects_locations-agent-environments-create.md' + - 'Locations Agent Environments Delete': 'projects_locations-agent-environments-delete.md' + - 'Locations Agent Environments Get': 'projects_locations-agent-environments-get.md' + - 'Locations Agent Environments Get History': 'projects_locations-agent-environments-get-history.md' + - 'Locations Agent Environments Intents List': 'projects_locations-agent-environments-intents-list.md' + - 'Locations Agent Environments List': 'projects_locations-agent-environments-list.md' + - 'Locations Agent Environments Patch': 'projects_locations-agent-environments-patch.md' + - 'Locations Agent Environments Users Sessions Contexts Create': 'projects_locations-agent-environments-users-sessions-contexts-create.md' + - 'Locations Agent Environments Users Sessions Contexts Delete': 'projects_locations-agent-environments-users-sessions-contexts-delete.md' + - 'Locations Agent Environments Users Sessions Contexts Get': 'projects_locations-agent-environments-users-sessions-contexts-get.md' + - 'Locations Agent Environments Users Sessions Contexts List': 'projects_locations-agent-environments-users-sessions-contexts-list.md' + - 'Locations Agent Environments Users Sessions Contexts Patch': 'projects_locations-agent-environments-users-sessions-contexts-patch.md' + - 'Locations Agent Environments Users Sessions Delete Contexts': 'projects_locations-agent-environments-users-sessions-delete-contexts.md' + - 'Locations Agent Environments Users Sessions Detect Intent': 'projects_locations-agent-environments-users-sessions-detect-intent.md' + - 'Locations Agent Environments Users Sessions Entity Types Create': 'projects_locations-agent-environments-users-sessions-entity-types-create.md' + - 'Locations Agent Environments Users Sessions Entity Types Delete': 'projects_locations-agent-environments-users-sessions-entity-types-delete.md' + - 'Locations Agent Environments Users Sessions Entity Types Get': 'projects_locations-agent-environments-users-sessions-entity-types-get.md' + - 'Locations Agent Environments Users Sessions Entity Types List': 'projects_locations-agent-environments-users-sessions-entity-types-list.md' + - 'Locations Agent Environments Users Sessions Entity Types Patch': 'projects_locations-agent-environments-users-sessions-entity-types-patch.md' + - 'Locations Agent Export': 'projects_locations-agent-export.md' + - 'Locations Agent Get Fulfillment': 'projects_locations-agent-get-fulfillment.md' + - 'Locations Agent Get Validation Result': 'projects_locations-agent-get-validation-result.md' + - 'Locations Agent Import': 'projects_locations-agent-import.md' + - 'Locations Agent Intents Batch Delete': 'projects_locations-agent-intents-batch-delete.md' + - 'Locations Agent Intents Batch Update': 'projects_locations-agent-intents-batch-update.md' + - 'Locations Agent Intents Create': 'projects_locations-agent-intents-create.md' + - 'Locations Agent Intents Delete': 'projects_locations-agent-intents-delete.md' + - 'Locations Agent Intents Get': 'projects_locations-agent-intents-get.md' + - 'Locations Agent Intents List': 'projects_locations-agent-intents-list.md' + - 'Locations Agent Intents Patch': 'projects_locations-agent-intents-patch.md' + - 'Locations Agent Restore': 'projects_locations-agent-restore.md' + - 'Locations Agent Search': 'projects_locations-agent-search.md' + - 'Locations Agent Sessions Contexts Create': 'projects_locations-agent-sessions-contexts-create.md' + - 'Locations Agent Sessions Contexts Delete': 'projects_locations-agent-sessions-contexts-delete.md' + - 'Locations Agent Sessions Contexts Get': 'projects_locations-agent-sessions-contexts-get.md' + - 'Locations Agent Sessions Contexts List': 'projects_locations-agent-sessions-contexts-list.md' + - 'Locations Agent Sessions Contexts Patch': 'projects_locations-agent-sessions-contexts-patch.md' + - 'Locations Agent Sessions Delete Contexts': 'projects_locations-agent-sessions-delete-contexts.md' + - 'Locations Agent Sessions Detect Intent': 'projects_locations-agent-sessions-detect-intent.md' + - 'Locations Agent Sessions Entity Types Create': 'projects_locations-agent-sessions-entity-types-create.md' + - 'Locations Agent Sessions Entity Types Delete': 'projects_locations-agent-sessions-entity-types-delete.md' + - 'Locations Agent Sessions Entity Types Get': 'projects_locations-agent-sessions-entity-types-get.md' + - 'Locations Agent Sessions Entity Types List': 'projects_locations-agent-sessions-entity-types-list.md' + - 'Locations Agent Sessions Entity Types Patch': 'projects_locations-agent-sessions-entity-types-patch.md' + - 'Locations Agent Train': 'projects_locations-agent-train.md' + - 'Locations Agent Update Fulfillment': 'projects_locations-agent-update-fulfillment.md' + - 'Locations Agent Versions Create': 'projects_locations-agent-versions-create.md' + - 'Locations Agent Versions Delete': 'projects_locations-agent-versions-delete.md' + - 'Locations Agent Versions Get': 'projects_locations-agent-versions-get.md' + - 'Locations Agent Versions List': 'projects_locations-agent-versions-list.md' + - 'Locations Agent Versions Patch': 'projects_locations-agent-versions-patch.md' + - 'Locations Answer Records Get': 'projects_locations-answer-records-get.md' + - 'Locations Answer Records List': 'projects_locations-answer-records-list.md' + - 'Locations Answer Records Patch': 'projects_locations-answer-records-patch.md' + - 'Locations Conversation Profiles Clear Suggestion Feature Config': 'projects_locations-conversation-profiles-clear-suggestion-feature-config.md' + - 'Locations Conversation Profiles Create': 'projects_locations-conversation-profiles-create.md' + - 'Locations Conversation Profiles Delete': 'projects_locations-conversation-profiles-delete.md' + - 'Locations Conversation Profiles Get': 'projects_locations-conversation-profiles-get.md' + - 'Locations Conversation Profiles List': 'projects_locations-conversation-profiles-list.md' + - 'Locations Conversation Profiles Patch': 'projects_locations-conversation-profiles-patch.md' + - 'Locations Conversation Profiles Set Suggestion Feature Config': 'projects_locations-conversation-profiles-set-suggestion-feature-config.md' + - 'Locations Conversations Complete': 'projects_locations-conversations-complete.md' + - 'Locations Conversations Create': 'projects_locations-conversations-create.md' + - 'Locations Conversations Get': 'projects_locations-conversations-get.md' + - 'Locations Conversations List': 'projects_locations-conversations-list.md' + - 'Locations Conversations Messages Batch Create': 'projects_locations-conversations-messages-batch-create.md' + - 'Locations Conversations Messages List': 'projects_locations-conversations-messages-list.md' + - 'Locations Conversations Participants Analyze Content': 'projects_locations-conversations-participants-analyze-content.md' + - 'Locations Conversations Participants Create': 'projects_locations-conversations-participants-create.md' + - 'Locations Conversations Participants Get': 'projects_locations-conversations-participants-get.md' + - 'Locations Conversations Participants List': 'projects_locations-conversations-participants-list.md' + - 'Locations Conversations Participants Patch': 'projects_locations-conversations-participants-patch.md' + - 'Locations Conversations Participants Suggestions Suggest Articles': 'projects_locations-conversations-participants-suggestions-suggest-articles.md' + - 'Locations Conversations Participants Suggestions Suggest Faq Answers': 'projects_locations-conversations-participants-suggestions-suggest-faq-answers.md' + - 'Locations Conversations Participants Suggestions Suggest Smart Replies': 'projects_locations-conversations-participants-suggestions-suggest-smart-replies.md' + - 'Locations Conversations Suggestions Suggest Conversation Summary': 'projects_locations-conversations-suggestions-suggest-conversation-summary.md' + - 'Locations Delete Agent': 'projects_locations-delete-agent.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Get Agent': 'projects_locations-get-agent.md' + - 'Locations Knowledge Bases Create': 'projects_locations-knowledge-bases-create.md' + - 'Locations Knowledge Bases Delete': 'projects_locations-knowledge-bases-delete.md' + - 'Locations Knowledge Bases Documents Create': 'projects_locations-knowledge-bases-documents-create.md' + - 'Locations Knowledge Bases Documents Delete': 'projects_locations-knowledge-bases-documents-delete.md' + - 'Locations Knowledge Bases Documents Get': 'projects_locations-knowledge-bases-documents-get.md' + - 'Locations Knowledge Bases Documents Import': 'projects_locations-knowledge-bases-documents-import.md' + - 'Locations Knowledge Bases Documents List': 'projects_locations-knowledge-bases-documents-list.md' + - 'Locations Knowledge Bases Documents Patch': 'projects_locations-knowledge-bases-documents-patch.md' + - 'Locations Knowledge Bases Documents Reload': 'projects_locations-knowledge-bases-documents-reload.md' + - 'Locations Knowledge Bases Get': 'projects_locations-knowledge-bases-get.md' + - 'Locations Knowledge Bases List': 'projects_locations-knowledge-bases-list.md' + - 'Locations Knowledge Bases Patch': 'projects_locations-knowledge-bases-patch.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Set Agent': 'projects_locations-set-agent.md' + - 'Operations Cancel': 'projects_operations-cancel.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' + - 'Set Agent': 'projects_set-agent.md' theme: readthedocs diff --git a/gen/dialogflow2_beta1-cli/src/client.rs b/gen/dialogflow2_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dialogflow2_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dialogflow2_beta1-cli/src/main.rs b/gen/dialogflow2_beta1-cli/src/main.rs index 880722e0f7..a38657b6b5 100644 --- a/gen/dialogflow2_beta1-cli/src/main.rs +++ b/gen/dialogflow2_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dialogflow2_beta1::{api, Error, oauth2}; +use google_dialogflow2_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -691,7 +690,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -787,7 +786,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -1057,7 +1056,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1116,7 +1115,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -1181,7 +1180,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1285,10 +1284,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "allow-load-to-draft-and-discard-changes" => { - call = call.allow_load_to_draft_and_discard_changes(arg_from_str(value.unwrap_or("false"), err, "allow-load-to-draft-and-discard-changes", "boolean")); + call = call.allow_load_to_draft_and_discard_changes( value.map(|v| arg_from_str(v, err, "allow-load-to-draft-and-discard-changes", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1537,7 +1536,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1627,7 +1626,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2043,7 +2042,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2133,7 +2132,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2861,7 +2860,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -2971,7 +2970,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3120,7 +3119,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3221,7 +3220,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "import-gcs-custom-metadata" => { - call = call.import_gcs_custom_metadata(arg_from_str(value.unwrap_or("false"), err, "import-gcs-custom-metadata", "boolean")); + call = call.import_gcs_custom_metadata( value.map(|v| arg_from_str(v, err, "import-gcs-custom-metadata", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3384,7 +3383,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3488,7 +3487,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3685,7 +3684,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3779,7 +3778,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3924,7 +3923,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4173,7 +4172,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4263,7 +4262,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4679,7 +4678,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4769,7 +4768,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4949,7 +4948,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5201,7 +5200,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5294,7 +5293,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5405,7 +5404,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); }, _ => { let mut found = false; @@ -5420,7 +5422,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -5516,7 +5518,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5696,11 +5698,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5874,7 +5877,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5968,11 +5971,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5987,7 +5991,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6376,7 +6380,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6522,7 +6526,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6598,6 +6602,17 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "assist-query-params.documents-metadata-filters" => Some(("assistQueryParams.documentsMetadataFilters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "audio-input.audio" => Some(("audioInput.audio", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.audio-encoding" => Some(("audioInput.config.audioEncoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.disable-no-speech-recognized-event" => Some(("audioInput.config.disableNoSpeechRecognizedEvent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "audio-input.config.enable-word-info" => Some(("audioInput.config.enableWordInfo", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "audio-input.config.language-code" => Some(("audioInput.config.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.model" => Some(("audioInput.config.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.model-variant" => Some(("audioInput.config.modelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.phrase-hints" => Some(("audioInput.config.phraseHints", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "audio-input.config.sample-rate-hertz" => Some(("audioInput.config.sampleRateHertz", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "audio-input.config.single-utterance" => Some(("audioInput.config.singleUtterance", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cx-current-page" => Some(("cxCurrentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-input.language-code" => Some(("eventInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-input.name" => Some(("eventInput.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "message-send-time" => Some(("messageSendTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -6620,7 +6635,7 @@ where "text-input.language-code" => Some(("textInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "text-input.text" => Some(("textInput.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "assist-query-params", "audio-encoding", "documents-metadata-filters", "effects-profile-id", "event-input", "geo-location", "knowledge-base-names", "language-code", "latitude", "longitude", "message-send-time", "name", "pitch", "query-params", "reply-audio-config", "request-id", "reset-contexts", "sample-rate-hertz", "sentiment-analysis-request-config", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "text-input", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "assist-query-params", "audio", "audio-encoding", "audio-input", "config", "cx-current-page", "disable-no-speech-recognized-event", "documents-metadata-filters", "effects-profile-id", "enable-word-info", "event-input", "geo-location", "knowledge-base-names", "language-code", "latitude", "longitude", "message-send-time", "model", "model-variant", "name", "phrase-hints", "pitch", "query-params", "reply-audio-config", "request-id", "reset-contexts", "sample-rate-hertz", "sentiment-analysis-request-config", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "text-input", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6830,7 +6845,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6922,7 +6937,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7067,7 +7082,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7381,6 +7396,92 @@ where } } + async fn _projects_conversations_suggestions_suggest_conversation_summary(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "context-size" => Some(("contextSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "latest-message" => Some(("latestMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["context-size", "latest-message"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDialogflowV2beta1SuggestConversationSummaryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().conversations_suggestions_suggest_conversation_summary(request, opt.value_of("conversation").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_delete_agent(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().delete_agent(opt.value_of("parent").unwrap_or("")); @@ -7579,7 +7680,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7680,7 +7781,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "import-gcs-custom-metadata" => { - call = call.import_gcs_custom_metadata(arg_from_str(value.unwrap_or("false"), err, "import-gcs-custom-metadata", "boolean")); + call = call.import_gcs_custom_metadata( value.map(|v| arg_from_str(v, err, "import-gcs-custom-metadata", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7932,7 +8033,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8036,7 +8137,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8233,7 +8334,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8327,7 +8428,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -9016,7 +9117,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -9112,7 +9213,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -9382,7 +9483,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9441,7 +9542,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -9506,7 +9607,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9610,10 +9711,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "allow-load-to-draft-and-discard-changes" => { - call = call.allow_load_to_draft_and_discard_changes(arg_from_str(value.unwrap_or("false"), err, "allow-load-to-draft-and-discard-changes", "boolean")); + call = call.allow_load_to_draft_and_discard_changes( value.map(|v| arg_from_str(v, err, "allow-load-to-draft-and-discard-changes", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -9862,7 +9963,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9952,7 +10053,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -10368,7 +10469,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10458,7 +10559,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -11186,7 +11287,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -11296,7 +11397,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -11447,7 +11548,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11696,7 +11797,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11786,7 +11887,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -12202,7 +12303,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12292,7 +12393,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -12472,7 +12573,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -12724,7 +12825,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12817,7 +12918,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -12928,7 +13029,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); }, _ => { let mut found = false; @@ -12943,7 +13047,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -13039,7 +13143,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -13219,11 +13323,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13397,7 +13502,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -13491,11 +13596,12 @@ where "notification-config.message-format" => Some(("notificationConfig.messageFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notification-config.topic" => Some(("notificationConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "stt-config.model" => Some(("sttConfig.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "stt-config.speech-model-variant" => Some(("sttConfig.speechModelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-number", "agent", "automated-agent-config", "button-id", "create-time", "deployment-id", "display-name", "enable-entity-extraction", "enable-sentiment-analysis", "enable-stackdriver-logging", "end-user-suggestion-config", "endpoint-domain", "group-suggestion-responses", "human-agent-assistant-config", "human-agent-handoff-config", "human-agent-suggestion-config", "language-code", "live-person-config", "logging-config", "message-analysis-config", "message-format", "model", "name", "new-message-event-notification-config", "notification-config", "organization-id", "salesforce-live-agent-config", "security-settings", "speech-model-variant", "stt-config", "time-zone", "topic", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13510,7 +13616,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -13899,7 +14005,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -14045,7 +14151,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -14121,6 +14227,17 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "assist-query-params.documents-metadata-filters" => Some(("assistQueryParams.documentsMetadataFilters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "audio-input.audio" => Some(("audioInput.audio", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.audio-encoding" => Some(("audioInput.config.audioEncoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.disable-no-speech-recognized-event" => Some(("audioInput.config.disableNoSpeechRecognizedEvent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "audio-input.config.enable-word-info" => Some(("audioInput.config.enableWordInfo", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "audio-input.config.language-code" => Some(("audioInput.config.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.model" => Some(("audioInput.config.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.model-variant" => Some(("audioInput.config.modelVariant", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-input.config.phrase-hints" => Some(("audioInput.config.phraseHints", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "audio-input.config.sample-rate-hertz" => Some(("audioInput.config.sampleRateHertz", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "audio-input.config.single-utterance" => Some(("audioInput.config.singleUtterance", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "cx-current-page" => Some(("cxCurrentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-input.language-code" => Some(("eventInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-input.name" => Some(("eventInput.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "message-send-time" => Some(("messageSendTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -14143,7 +14260,7 @@ where "text-input.language-code" => Some(("textInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "text-input.text" => Some(("textInput.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "assist-query-params", "audio-encoding", "documents-metadata-filters", "effects-profile-id", "event-input", "geo-location", "knowledge-base-names", "language-code", "latitude", "longitude", "message-send-time", "name", "pitch", "query-params", "reply-audio-config", "request-id", "reset-contexts", "sample-rate-hertz", "sentiment-analysis-request-config", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "text-input", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "assist-query-params", "audio", "audio-encoding", "audio-input", "config", "cx-current-page", "disable-no-speech-recognized-event", "documents-metadata-filters", "effects-profile-id", "enable-word-info", "event-input", "geo-location", "knowledge-base-names", "language-code", "latitude", "longitude", "message-send-time", "model", "model-variant", "name", "phrase-hints", "pitch", "query-params", "reply-audio-config", "request-id", "reset-contexts", "sample-rate-hertz", "sentiment-analysis-request-config", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "text-input", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14353,7 +14470,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -14445,7 +14562,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -14756,6 +14873,92 @@ where } } + async fn _projects_locations_conversations_suggestions_suggest_conversation_summary(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "context-size" => Some(("contextSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "latest-message" => Some(("latestMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["context-size", "latest-message"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDialogflowV2beta1SuggestConversationSummaryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_conversations_suggestions_suggest_conversation_summary(request, opt.value_of("conversation").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_delete_agent(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_delete_agent(opt.value_of("parent").unwrap_or("")); @@ -15006,7 +15209,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15107,7 +15310,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "import-gcs-custom-metadata" => { - call = call.import_gcs_custom_metadata(arg_from_str(value.unwrap_or("false"), err, "import-gcs-custom-metadata", "boolean")); + call = call.import_gcs_custom_metadata( value.map(|v| arg_from_str(v, err, "import-gcs-custom-metadata", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -15359,7 +15562,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -15463,7 +15666,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -15660,7 +15863,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -15754,7 +15957,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -15813,7 +16016,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -15979,7 +16182,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -16082,7 +16285,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -16245,7 +16448,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -16348,7 +16551,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -16698,6 +16901,9 @@ where ("conversations-participants-suggestions-suggest-smart-replies", Some(opt)) => { call_result = self._projects_conversations_participants_suggestions_suggest_smart_replies(opt, dry_run, &mut err).await; }, + ("conversations-suggestions-suggest-conversation-summary", Some(opt)) => { + call_result = self._projects_conversations_suggestions_suggest_conversation_summary(opt, dry_run, &mut err).await; + }, ("delete-agent", Some(opt)) => { call_result = self._projects_delete_agent(opt, dry_run, &mut err).await; }, @@ -16995,6 +17201,9 @@ where ("locations-conversations-participants-suggestions-suggest-smart-replies", Some(opt)) => { call_result = self._projects_locations_conversations_participants_suggestions_suggest_smart_replies(opt, dry_run, &mut err).await; }, + ("locations-conversations-suggestions-suggest-conversation-summary", Some(opt)) => { + call_result = self._projects_locations_conversations_suggestions_suggest_conversation_summary(opt, dry_run, &mut err).await; + }, ("locations-delete-agent", Some(opt)) => { call_result = self._projects_locations_delete_agent(opt, dry_run, &mut err).await; }, @@ -17146,7 +17355,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'agent-entity-types-batch-delete', 'agent-entity-types-batch-update', 'agent-entity-types-create', 'agent-entity-types-delete', 'agent-entity-types-entities-batch-create', 'agent-entity-types-entities-batch-delete', 'agent-entity-types-entities-batch-update', 'agent-entity-types-get', 'agent-entity-types-list', 'agent-entity-types-patch', 'agent-environments-create', 'agent-environments-delete', 'agent-environments-get', 'agent-environments-get-history', 'agent-environments-intents-list', 'agent-environments-list', 'agent-environments-patch', 'agent-environments-users-sessions-contexts-create', 'agent-environments-users-sessions-contexts-delete', 'agent-environments-users-sessions-contexts-get', 'agent-environments-users-sessions-contexts-list', 'agent-environments-users-sessions-contexts-patch', 'agent-environments-users-sessions-delete-contexts', 'agent-environments-users-sessions-detect-intent', 'agent-environments-users-sessions-entity-types-create', 'agent-environments-users-sessions-entity-types-delete', 'agent-environments-users-sessions-entity-types-get', 'agent-environments-users-sessions-entity-types-list', 'agent-environments-users-sessions-entity-types-patch', 'agent-export', 'agent-get-fulfillment', 'agent-get-validation-result', 'agent-import', 'agent-intents-batch-delete', 'agent-intents-batch-update', 'agent-intents-create', 'agent-intents-delete', 'agent-intents-get', 'agent-intents-list', 'agent-intents-patch', 'agent-knowledge-bases-create', 'agent-knowledge-bases-delete', 'agent-knowledge-bases-documents-create', 'agent-knowledge-bases-documents-delete', 'agent-knowledge-bases-documents-get', 'agent-knowledge-bases-documents-list', 'agent-knowledge-bases-documents-patch', 'agent-knowledge-bases-documents-reload', 'agent-knowledge-bases-get', 'agent-knowledge-bases-list', 'agent-knowledge-bases-patch', 'agent-restore', 'agent-search', 'agent-sessions-contexts-create', 'agent-sessions-contexts-delete', 'agent-sessions-contexts-get', 'agent-sessions-contexts-list', 'agent-sessions-contexts-patch', 'agent-sessions-delete-contexts', 'agent-sessions-detect-intent', 'agent-sessions-entity-types-create', 'agent-sessions-entity-types-delete', 'agent-sessions-entity-types-get', 'agent-sessions-entity-types-list', 'agent-sessions-entity-types-patch', 'agent-train', 'agent-update-fulfillment', 'agent-versions-create', 'agent-versions-delete', 'agent-versions-get', 'agent-versions-list', 'agent-versions-patch', 'answer-records-get', 'answer-records-list', 'answer-records-patch', 'conversation-profiles-clear-suggestion-feature-config', 'conversation-profiles-create', 'conversation-profiles-delete', 'conversation-profiles-get', 'conversation-profiles-list', 'conversation-profiles-patch', 'conversation-profiles-set-suggestion-feature-config', 'conversations-complete', 'conversations-create', 'conversations-get', 'conversations-list', 'conversations-messages-batch-create', 'conversations-messages-list', 'conversations-participants-analyze-content', 'conversations-participants-create', 'conversations-participants-get', 'conversations-participants-list', 'conversations-participants-patch', 'conversations-participants-suggestions-compile', 'conversations-participants-suggestions-list', 'conversations-participants-suggestions-suggest-articles', 'conversations-participants-suggestions-suggest-faq-answers', 'conversations-participants-suggestions-suggest-smart-replies', 'delete-agent', 'get-agent', 'knowledge-bases-create', 'knowledge-bases-delete', 'knowledge-bases-documents-create', 'knowledge-bases-documents-delete', 'knowledge-bases-documents-get', 'knowledge-bases-documents-import', 'knowledge-bases-documents-list', 'knowledge-bases-documents-patch', 'knowledge-bases-documents-reload', 'knowledge-bases-get', 'knowledge-bases-list', 'knowledge-bases-patch', 'locations-agent-entity-types-batch-delete', 'locations-agent-entity-types-batch-update', 'locations-agent-entity-types-create', 'locations-agent-entity-types-delete', 'locations-agent-entity-types-entities-batch-create', 'locations-agent-entity-types-entities-batch-delete', 'locations-agent-entity-types-entities-batch-update', 'locations-agent-entity-types-get', 'locations-agent-entity-types-list', 'locations-agent-entity-types-patch', 'locations-agent-environments-create', 'locations-agent-environments-delete', 'locations-agent-environments-get', 'locations-agent-environments-get-history', 'locations-agent-environments-intents-list', 'locations-agent-environments-list', 'locations-agent-environments-patch', 'locations-agent-environments-users-sessions-contexts-create', 'locations-agent-environments-users-sessions-contexts-delete', 'locations-agent-environments-users-sessions-contexts-get', 'locations-agent-environments-users-sessions-contexts-list', 'locations-agent-environments-users-sessions-contexts-patch', 'locations-agent-environments-users-sessions-delete-contexts', 'locations-agent-environments-users-sessions-detect-intent', 'locations-agent-environments-users-sessions-entity-types-create', 'locations-agent-environments-users-sessions-entity-types-delete', 'locations-agent-environments-users-sessions-entity-types-get', 'locations-agent-environments-users-sessions-entity-types-list', 'locations-agent-environments-users-sessions-entity-types-patch', 'locations-agent-export', 'locations-agent-get-fulfillment', 'locations-agent-get-validation-result', 'locations-agent-import', 'locations-agent-intents-batch-delete', 'locations-agent-intents-batch-update', 'locations-agent-intents-create', 'locations-agent-intents-delete', 'locations-agent-intents-get', 'locations-agent-intents-list', 'locations-agent-intents-patch', 'locations-agent-restore', 'locations-agent-search', 'locations-agent-sessions-contexts-create', 'locations-agent-sessions-contexts-delete', 'locations-agent-sessions-contexts-get', 'locations-agent-sessions-contexts-list', 'locations-agent-sessions-contexts-patch', 'locations-agent-sessions-delete-contexts', 'locations-agent-sessions-detect-intent', 'locations-agent-sessions-entity-types-create', 'locations-agent-sessions-entity-types-delete', 'locations-agent-sessions-entity-types-get', 'locations-agent-sessions-entity-types-list', 'locations-agent-sessions-entity-types-patch', 'locations-agent-train', 'locations-agent-update-fulfillment', 'locations-agent-versions-create', 'locations-agent-versions-delete', 'locations-agent-versions-get', 'locations-agent-versions-list', 'locations-agent-versions-patch', 'locations-answer-records-get', 'locations-answer-records-list', 'locations-answer-records-patch', 'locations-conversation-profiles-clear-suggestion-feature-config', 'locations-conversation-profiles-create', 'locations-conversation-profiles-delete', 'locations-conversation-profiles-get', 'locations-conversation-profiles-list', 'locations-conversation-profiles-patch', 'locations-conversation-profiles-set-suggestion-feature-config', 'locations-conversations-complete', 'locations-conversations-create', 'locations-conversations-get', 'locations-conversations-list', 'locations-conversations-messages-batch-create', 'locations-conversations-messages-list', 'locations-conversations-participants-analyze-content', 'locations-conversations-participants-create', 'locations-conversations-participants-get', 'locations-conversations-participants-list', 'locations-conversations-participants-patch', 'locations-conversations-participants-suggestions-suggest-articles', 'locations-conversations-participants-suggestions-suggest-faq-answers', 'locations-conversations-participants-suggestions-suggest-smart-replies', 'locations-delete-agent', 'locations-get', 'locations-get-agent', 'locations-knowledge-bases-create', 'locations-knowledge-bases-delete', 'locations-knowledge-bases-documents-create', 'locations-knowledge-bases-documents-delete', 'locations-knowledge-bases-documents-get', 'locations-knowledge-bases-documents-import', 'locations-knowledge-bases-documents-list', 'locations-knowledge-bases-documents-patch', 'locations-knowledge-bases-documents-reload', 'locations-knowledge-bases-get', 'locations-knowledge-bases-list', 'locations-knowledge-bases-patch', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-set-agent', 'operations-cancel', 'operations-get', 'operations-list' and 'set-agent'", vec![ + ("projects", "methods: 'agent-entity-types-batch-delete', 'agent-entity-types-batch-update', 'agent-entity-types-create', 'agent-entity-types-delete', 'agent-entity-types-entities-batch-create', 'agent-entity-types-entities-batch-delete', 'agent-entity-types-entities-batch-update', 'agent-entity-types-get', 'agent-entity-types-list', 'agent-entity-types-patch', 'agent-environments-create', 'agent-environments-delete', 'agent-environments-get', 'agent-environments-get-history', 'agent-environments-intents-list', 'agent-environments-list', 'agent-environments-patch', 'agent-environments-users-sessions-contexts-create', 'agent-environments-users-sessions-contexts-delete', 'agent-environments-users-sessions-contexts-get', 'agent-environments-users-sessions-contexts-list', 'agent-environments-users-sessions-contexts-patch', 'agent-environments-users-sessions-delete-contexts', 'agent-environments-users-sessions-detect-intent', 'agent-environments-users-sessions-entity-types-create', 'agent-environments-users-sessions-entity-types-delete', 'agent-environments-users-sessions-entity-types-get', 'agent-environments-users-sessions-entity-types-list', 'agent-environments-users-sessions-entity-types-patch', 'agent-export', 'agent-get-fulfillment', 'agent-get-validation-result', 'agent-import', 'agent-intents-batch-delete', 'agent-intents-batch-update', 'agent-intents-create', 'agent-intents-delete', 'agent-intents-get', 'agent-intents-list', 'agent-intents-patch', 'agent-knowledge-bases-create', 'agent-knowledge-bases-delete', 'agent-knowledge-bases-documents-create', 'agent-knowledge-bases-documents-delete', 'agent-knowledge-bases-documents-get', 'agent-knowledge-bases-documents-list', 'agent-knowledge-bases-documents-patch', 'agent-knowledge-bases-documents-reload', 'agent-knowledge-bases-get', 'agent-knowledge-bases-list', 'agent-knowledge-bases-patch', 'agent-restore', 'agent-search', 'agent-sessions-contexts-create', 'agent-sessions-contexts-delete', 'agent-sessions-contexts-get', 'agent-sessions-contexts-list', 'agent-sessions-contexts-patch', 'agent-sessions-delete-contexts', 'agent-sessions-detect-intent', 'agent-sessions-entity-types-create', 'agent-sessions-entity-types-delete', 'agent-sessions-entity-types-get', 'agent-sessions-entity-types-list', 'agent-sessions-entity-types-patch', 'agent-train', 'agent-update-fulfillment', 'agent-versions-create', 'agent-versions-delete', 'agent-versions-get', 'agent-versions-list', 'agent-versions-patch', 'answer-records-get', 'answer-records-list', 'answer-records-patch', 'conversation-profiles-clear-suggestion-feature-config', 'conversation-profiles-create', 'conversation-profiles-delete', 'conversation-profiles-get', 'conversation-profiles-list', 'conversation-profiles-patch', 'conversation-profiles-set-suggestion-feature-config', 'conversations-complete', 'conversations-create', 'conversations-get', 'conversations-list', 'conversations-messages-batch-create', 'conversations-messages-list', 'conversations-participants-analyze-content', 'conversations-participants-create', 'conversations-participants-get', 'conversations-participants-list', 'conversations-participants-patch', 'conversations-participants-suggestions-compile', 'conversations-participants-suggestions-list', 'conversations-participants-suggestions-suggest-articles', 'conversations-participants-suggestions-suggest-faq-answers', 'conversations-participants-suggestions-suggest-smart-replies', 'conversations-suggestions-suggest-conversation-summary', 'delete-agent', 'get-agent', 'knowledge-bases-create', 'knowledge-bases-delete', 'knowledge-bases-documents-create', 'knowledge-bases-documents-delete', 'knowledge-bases-documents-get', 'knowledge-bases-documents-import', 'knowledge-bases-documents-list', 'knowledge-bases-documents-patch', 'knowledge-bases-documents-reload', 'knowledge-bases-get', 'knowledge-bases-list', 'knowledge-bases-patch', 'locations-agent-entity-types-batch-delete', 'locations-agent-entity-types-batch-update', 'locations-agent-entity-types-create', 'locations-agent-entity-types-delete', 'locations-agent-entity-types-entities-batch-create', 'locations-agent-entity-types-entities-batch-delete', 'locations-agent-entity-types-entities-batch-update', 'locations-agent-entity-types-get', 'locations-agent-entity-types-list', 'locations-agent-entity-types-patch', 'locations-agent-environments-create', 'locations-agent-environments-delete', 'locations-agent-environments-get', 'locations-agent-environments-get-history', 'locations-agent-environments-intents-list', 'locations-agent-environments-list', 'locations-agent-environments-patch', 'locations-agent-environments-users-sessions-contexts-create', 'locations-agent-environments-users-sessions-contexts-delete', 'locations-agent-environments-users-sessions-contexts-get', 'locations-agent-environments-users-sessions-contexts-list', 'locations-agent-environments-users-sessions-contexts-patch', 'locations-agent-environments-users-sessions-delete-contexts', 'locations-agent-environments-users-sessions-detect-intent', 'locations-agent-environments-users-sessions-entity-types-create', 'locations-agent-environments-users-sessions-entity-types-delete', 'locations-agent-environments-users-sessions-entity-types-get', 'locations-agent-environments-users-sessions-entity-types-list', 'locations-agent-environments-users-sessions-entity-types-patch', 'locations-agent-export', 'locations-agent-get-fulfillment', 'locations-agent-get-validation-result', 'locations-agent-import', 'locations-agent-intents-batch-delete', 'locations-agent-intents-batch-update', 'locations-agent-intents-create', 'locations-agent-intents-delete', 'locations-agent-intents-get', 'locations-agent-intents-list', 'locations-agent-intents-patch', 'locations-agent-restore', 'locations-agent-search', 'locations-agent-sessions-contexts-create', 'locations-agent-sessions-contexts-delete', 'locations-agent-sessions-contexts-get', 'locations-agent-sessions-contexts-list', 'locations-agent-sessions-contexts-patch', 'locations-agent-sessions-delete-contexts', 'locations-agent-sessions-detect-intent', 'locations-agent-sessions-entity-types-create', 'locations-agent-sessions-entity-types-delete', 'locations-agent-sessions-entity-types-get', 'locations-agent-sessions-entity-types-list', 'locations-agent-sessions-entity-types-patch', 'locations-agent-train', 'locations-agent-update-fulfillment', 'locations-agent-versions-create', 'locations-agent-versions-delete', 'locations-agent-versions-get', 'locations-agent-versions-list', 'locations-agent-versions-patch', 'locations-answer-records-get', 'locations-answer-records-list', 'locations-answer-records-patch', 'locations-conversation-profiles-clear-suggestion-feature-config', 'locations-conversation-profiles-create', 'locations-conversation-profiles-delete', 'locations-conversation-profiles-get', 'locations-conversation-profiles-list', 'locations-conversation-profiles-patch', 'locations-conversation-profiles-set-suggestion-feature-config', 'locations-conversations-complete', 'locations-conversations-create', 'locations-conversations-get', 'locations-conversations-list', 'locations-conversations-messages-batch-create', 'locations-conversations-messages-list', 'locations-conversations-participants-analyze-content', 'locations-conversations-participants-create', 'locations-conversations-participants-get', 'locations-conversations-participants-list', 'locations-conversations-participants-patch', 'locations-conversations-participants-suggestions-suggest-articles', 'locations-conversations-participants-suggestions-suggest-faq-answers', 'locations-conversations-participants-suggestions-suggest-smart-replies', 'locations-conversations-suggestions-suggest-conversation-summary', 'locations-delete-agent', 'locations-get', 'locations-get-agent', 'locations-knowledge-bases-create', 'locations-knowledge-bases-delete', 'locations-knowledge-bases-documents-create', 'locations-knowledge-bases-documents-delete', 'locations-knowledge-bases-documents-get', 'locations-knowledge-bases-documents-import', 'locations-knowledge-bases-documents-list', 'locations-knowledge-bases-documents-patch', 'locations-knowledge-bases-documents-reload', 'locations-knowledge-bases-get', 'locations-knowledge-bases-list', 'locations-knowledge-bases-patch', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-set-agent', 'operations-cancel', 'operations-get', 'operations-list' and 'set-agent'", vec![ ("agent-entity-types-batch-delete", Some(r##"Deletes entity types in the specified agent. This method is a [long-running operation](https://cloud.google.com/dialogflow/es/docs/how/long-running-operations). The returned `Operation` type has the following method-specific fields: - `metadata`: An empty [Struct message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#struct) - `response`: An [Empty message](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#empty) Note: You should always train an agent prior to sending it queries. See the [training documentation](https://cloud.google.com/dialogflow/es/docs/training)."##), "Details at http://byron.github.io/google-apis-rs/google_dialogflow2_beta1_cli/projects_agent-entity-types-batch-delete", @@ -19597,6 +19806,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("conversations-suggestions-suggest-conversation-summary", + Some(r##"Suggest summary for a conversation based on specific historical messages. The range of the messages to be used for summary can be specified in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_dialogflow2_beta1_cli/projects_conversations-suggestions-suggest-conversation-summary", + vec![ + (Some(r##"conversation"##), + None, + Some(r##"Required. The conversation to fetch suggestion for. Format: `projects//locations//conversations/`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -22075,6 +22312,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-conversations-suggestions-suggest-conversation-summary", + Some(r##"Suggest summary for a conversation based on specific historical messages. The range of the messages to be used for summary can be specified in the request."##), + "Details at http://byron.github.io/google-apis-rs/google_dialogflow2_beta1_cli/projects_locations-conversations-suggestions-suggest-conversation-summary", + vec![ + (Some(r##"conversation"##), + None, + Some(r##"Required. The conversation to fetch suggestion for. Format: `projects//locations//conversations/`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -22663,7 +22928,7 @@ async fn main() { let mut app = App::new("dialogflow2-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230110") .about("Builds conversational interfaces (for example, chatbots, and voice-powered apps and devices).") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dialogflow2_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/dialogflow2_beta1/Cargo.toml b/gen/dialogflow2_beta1/Cargo.toml index e6ca1b30ef..50982de828 100644 --- a/gen/dialogflow2_beta1/Cargo.toml +++ b/gen/dialogflow2_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dialogflow2_beta1" -version = "5.0.2-beta-1+20230110" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dialogflow (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow2_beta1" homepage = "https://cloud.google.com/dialogflow/" -documentation = "https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110" +documentation = "https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110" license = "MIT" keywords = ["dialogflow", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dialogflow2_beta1/README.md b/gen/dialogflow2_beta1/README.md index 4ea5498e22..71b315b7ac 100644 --- a/gen/dialogflow2_beta1/README.md +++ b/gen/dialogflow2_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-dialogflow2_beta1` library allows access to all features of the *Google Dialogflow* service. -This documentation was generated from *Dialogflow* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *dialogflow:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dialogflow* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *dialogflow:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dialogflow* *v2_beta1* API can be found at the [official documentation site](https://cloud.google.com/dialogflow/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/Dialogflow) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/Dialogflow) ... * projects - * [*agent entity types batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeBatchDeleteCall), [*agent entity types batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeBatchUpdateCall), [*agent entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeCreateCall), [*agent entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeDeleteCall), [*agent entity types entities batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeEntityBatchCreateCall), [*agent entity types entities batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeEntityBatchDeleteCall), [*agent entity types entities batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeEntityBatchUpdateCall), [*agent entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeGetCall), [*agent entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeListCall), [*agent entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypePatchCall), [*agent environments create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentCreateCall), [*agent environments delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentDeleteCall), [*agent environments get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentGetCall), [*agent environments get history*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentGetHistoryCall), [*agent environments intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentIntentListCall), [*agent environments list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentListCall), [*agent environments patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentPatchCall), [*agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextCreateCall), [*agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextDeleteCall), [*agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextGetCall), [*agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextListCall), [*agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextPatchCall), [*agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionDeleteContextCall), [*agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionDetectIntentCall), [*agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeCreateCall), [*agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeDeleteCall), [*agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeGetCall), [*agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeListCall), [*agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypePatchCall), [*agent export*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentExportCall), [*agent get fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentGetFulfillmentCall), [*agent get validation result*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentGetValidationResultCall), [*agent import*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentImportCall), [*agent intents batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentBatchDeleteCall), [*agent intents batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentBatchUpdateCall), [*agent intents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentCreateCall), [*agent intents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentDeleteCall), [*agent intents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentGetCall), [*agent intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentListCall), [*agent intents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentPatchCall), [*agent knowledge bases create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseCreateCall), [*agent knowledge bases delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDeleteCall), [*agent knowledge bases documents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentCreateCall), [*agent knowledge bases documents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentDeleteCall), [*agent knowledge bases documents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentGetCall), [*agent knowledge bases documents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentListCall), [*agent knowledge bases documents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentPatchCall), [*agent knowledge bases documents reload*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentReloadCall), [*agent knowledge bases get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseGetCall), [*agent knowledge bases list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseListCall), [*agent knowledge bases patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBasePatchCall), [*agent restore*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentRestoreCall), [*agent search*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSearchCall), [*agent sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextCreateCall), [*agent sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextDeleteCall), [*agent sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextGetCall), [*agent sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextListCall), [*agent sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextPatchCall), [*agent sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionDeleteContextCall), [*agent sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionDetectIntentCall), [*agent sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeCreateCall), [*agent sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeDeleteCall), [*agent sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeGetCall), [*agent sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeListCall), [*agent sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypePatchCall), [*agent train*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentTrainCall), [*agent update fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentUpdateFulfillmentCall), [*agent versions create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionCreateCall), [*agent versions delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionDeleteCall), [*agent versions get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionGetCall), [*agent versions list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionListCall), [*agent versions patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionPatchCall), [*answer records get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAnswerRecordGetCall), [*answer records list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAnswerRecordListCall), [*answer records patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectAnswerRecordPatchCall), [*conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileClearSuggestionFeatureConfigCall), [*conversation profiles create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileCreateCall), [*conversation profiles delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileDeleteCall), [*conversation profiles get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileGetCall), [*conversation profiles list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileListCall), [*conversation profiles patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationProfilePatchCall), [*conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileSetSuggestionFeatureConfigCall), [*conversations complete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationCompleteCall), [*conversations create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationCreateCall), [*conversations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationGetCall), [*conversations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationListCall), [*conversations messages batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationMessageBatchCreateCall), [*conversations messages list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationMessageListCall), [*conversations participants analyze content*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantAnalyzeContentCall), [*conversations participants create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantCreateCall), [*conversations participants get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantGetCall), [*conversations participants list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantListCall), [*conversations participants patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantPatchCall), [*conversations participants suggestions compile*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionCompileCall), [*conversations participants suggestions list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionListCall), [*conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionSuggestArticleCall), [*conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionSuggestFaqAnswerCall), [*conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionSuggestSmartReplyCall), [*conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectConversationSuggestionSuggestConversationSummaryCall), [*delete agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectDeleteAgentCall), [*get agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectGetAgentCall), [*knowledge bases create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseCreateCall), [*knowledge bases delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDeleteCall), [*knowledge bases documents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentCreateCall), [*knowledge bases documents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentDeleteCall), [*knowledge bases documents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentGetCall), [*knowledge bases documents import*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentImportCall), [*knowledge bases documents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentListCall), [*knowledge bases documents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentPatchCall), [*knowledge bases documents reload*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentReloadCall), [*knowledge bases get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseGetCall), [*knowledge bases list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseListCall), [*knowledge bases patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBasePatchCall), [*locations agent entity types batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeBatchDeleteCall), [*locations agent entity types batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeBatchUpdateCall), [*locations agent entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeCreateCall), [*locations agent entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeDeleteCall), [*locations agent entity types entities batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeEntityBatchCreateCall), [*locations agent entity types entities batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeEntityBatchDeleteCall), [*locations agent entity types entities batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeEntityBatchUpdateCall), [*locations agent entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeGetCall), [*locations agent entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeListCall), [*locations agent entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypePatchCall), [*locations agent environments create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentCreateCall), [*locations agent environments delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentDeleteCall), [*locations agent environments get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentGetCall), [*locations agent environments get history*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentGetHistoryCall), [*locations agent environments intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentIntentListCall), [*locations agent environments list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentListCall), [*locations agent environments patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentPatchCall), [*locations agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextCreateCall), [*locations agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextDeleteCall), [*locations agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextGetCall), [*locations agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextListCall), [*locations agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextPatchCall), [*locations agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionDeleteContextCall), [*locations agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionDetectIntentCall), [*locations agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeCreateCall), [*locations agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeDeleteCall), [*locations agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeGetCall), [*locations agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeListCall), [*locations agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypePatchCall), [*locations agent export*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentExportCall), [*locations agent get fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentGetFulfillmentCall), [*locations agent get validation result*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentGetValidationResultCall), [*locations agent import*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentImportCall), [*locations agent intents batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentBatchDeleteCall), [*locations agent intents batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentBatchUpdateCall), [*locations agent intents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentCreateCall), [*locations agent intents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentDeleteCall), [*locations agent intents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentGetCall), [*locations agent intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentListCall), [*locations agent intents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentPatchCall), [*locations agent restore*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentRestoreCall), [*locations agent search*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSearchCall), [*locations agent sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextCreateCall), [*locations agent sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextDeleteCall), [*locations agent sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextGetCall), [*locations agent sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextListCall), [*locations agent sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextPatchCall), [*locations agent sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionDeleteContextCall), [*locations agent sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionDetectIntentCall), [*locations agent sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeCreateCall), [*locations agent sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeDeleteCall), [*locations agent sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeGetCall), [*locations agent sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeListCall), [*locations agent sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypePatchCall), [*locations agent train*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentTrainCall), [*locations agent update fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentUpdateFulfillmentCall), [*locations agent versions create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionCreateCall), [*locations agent versions delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionDeleteCall), [*locations agent versions get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionGetCall), [*locations agent versions list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionListCall), [*locations agent versions patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionPatchCall), [*locations answer records get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAnswerRecordGetCall), [*locations answer records list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAnswerRecordListCall), [*locations answer records patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationAnswerRecordPatchCall), [*locations conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileClearSuggestionFeatureConfigCall), [*locations conversation profiles create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileCreateCall), [*locations conversation profiles delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileDeleteCall), [*locations conversation profiles get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileGetCall), [*locations conversation profiles list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileListCall), [*locations conversation profiles patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfilePatchCall), [*locations conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileSetSuggestionFeatureConfigCall), [*locations conversations complete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationCompleteCall), [*locations conversations create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationCreateCall), [*locations conversations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationGetCall), [*locations conversations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationListCall), [*locations conversations messages batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationMessageBatchCreateCall), [*locations conversations messages list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationMessageListCall), [*locations conversations participants analyze content*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantAnalyzeContentCall), [*locations conversations participants create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantCreateCall), [*locations conversations participants get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantGetCall), [*locations conversations participants list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantListCall), [*locations conversations participants patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantPatchCall), [*locations conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantSuggestionSuggestArticleCall), [*locations conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantSuggestionSuggestFaqAnswerCall), [*locations conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantSuggestionSuggestSmartReplyCall), [*locations conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationSuggestionSuggestConversationSummaryCall), [*locations delete agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationDeleteAgentCall), [*locations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationGetCall), [*locations get agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationGetAgentCall), [*locations knowledge bases create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseCreateCall), [*locations knowledge bases delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDeleteCall), [*locations knowledge bases documents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentCreateCall), [*locations knowledge bases documents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentDeleteCall), [*locations knowledge bases documents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentGetCall), [*locations knowledge bases documents import*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentImportCall), [*locations knowledge bases documents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentListCall), [*locations knowledge bases documents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentPatchCall), [*locations knowledge bases documents reload*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentReloadCall), [*locations knowledge bases get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseGetCall), [*locations knowledge bases list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseListCall), [*locations knowledge bases patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBasePatchCall), [*locations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationOperationListCall), [*locations set agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectLocationSetAgentCall), [*operations cancel*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectOperationListCall) and [*set agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/api::ProjectSetAgentCall) + * [*agent entity types batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeBatchDeleteCall), [*agent entity types batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeBatchUpdateCall), [*agent entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeCreateCall), [*agent entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeDeleteCall), [*agent entity types entities batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeEntityBatchCreateCall), [*agent entity types entities batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeEntityBatchDeleteCall), [*agent entity types entities batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeEntityBatchUpdateCall), [*agent entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeGetCall), [*agent entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypeListCall), [*agent entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEntityTypePatchCall), [*agent environments create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentCreateCall), [*agent environments delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentDeleteCall), [*agent environments get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentGetCall), [*agent environments get history*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentGetHistoryCall), [*agent environments intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentIntentListCall), [*agent environments list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentListCall), [*agent environments patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentPatchCall), [*agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextCreateCall), [*agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextDeleteCall), [*agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextGetCall), [*agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextListCall), [*agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionContextPatchCall), [*agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionDeleteContextCall), [*agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionDetectIntentCall), [*agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeCreateCall), [*agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeDeleteCall), [*agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeGetCall), [*agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypeListCall), [*agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentEnvironmentUserSessionEntityTypePatchCall), [*agent export*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentExportCall), [*agent get fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentGetFulfillmentCall), [*agent get validation result*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentGetValidationResultCall), [*agent import*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentImportCall), [*agent intents batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentBatchDeleteCall), [*agent intents batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentBatchUpdateCall), [*agent intents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentCreateCall), [*agent intents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentDeleteCall), [*agent intents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentGetCall), [*agent intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentListCall), [*agent intents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentIntentPatchCall), [*agent knowledge bases create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseCreateCall), [*agent knowledge bases delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDeleteCall), [*agent knowledge bases documents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentCreateCall), [*agent knowledge bases documents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentDeleteCall), [*agent knowledge bases documents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentGetCall), [*agent knowledge bases documents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentListCall), [*agent knowledge bases documents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentPatchCall), [*agent knowledge bases documents reload*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseDocumentReloadCall), [*agent knowledge bases get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseGetCall), [*agent knowledge bases list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBaseListCall), [*agent knowledge bases patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentKnowledgeBasePatchCall), [*agent restore*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentRestoreCall), [*agent search*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSearchCall), [*agent sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextCreateCall), [*agent sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextDeleteCall), [*agent sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextGetCall), [*agent sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextListCall), [*agent sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionContextPatchCall), [*agent sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionDeleteContextCall), [*agent sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionDetectIntentCall), [*agent sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeCreateCall), [*agent sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeDeleteCall), [*agent sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeGetCall), [*agent sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypeListCall), [*agent sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentSessionEntityTypePatchCall), [*agent train*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentTrainCall), [*agent update fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentUpdateFulfillmentCall), [*agent versions create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionCreateCall), [*agent versions delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionDeleteCall), [*agent versions get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionGetCall), [*agent versions list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionListCall), [*agent versions patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAgentVersionPatchCall), [*answer records get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAnswerRecordGetCall), [*answer records list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAnswerRecordListCall), [*answer records patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectAnswerRecordPatchCall), [*conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileClearSuggestionFeatureConfigCall), [*conversation profiles create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileCreateCall), [*conversation profiles delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileDeleteCall), [*conversation profiles get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileGetCall), [*conversation profiles list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileListCall), [*conversation profiles patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationProfilePatchCall), [*conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationProfileSetSuggestionFeatureConfigCall), [*conversations complete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationCompleteCall), [*conversations create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationCreateCall), [*conversations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationGetCall), [*conversations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationListCall), [*conversations messages batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationMessageBatchCreateCall), [*conversations messages list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationMessageListCall), [*conversations participants analyze content*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantAnalyzeContentCall), [*conversations participants create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantCreateCall), [*conversations participants get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantGetCall), [*conversations participants list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantListCall), [*conversations participants patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantPatchCall), [*conversations participants suggestions compile*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionCompileCall), [*conversations participants suggestions list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionListCall), [*conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionSuggestArticleCall), [*conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionSuggestFaqAnswerCall), [*conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationParticipantSuggestionSuggestSmartReplyCall), [*conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectConversationSuggestionSuggestConversationSummaryCall), [*delete agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectDeleteAgentCall), [*get agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectGetAgentCall), [*knowledge bases create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseCreateCall), [*knowledge bases delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDeleteCall), [*knowledge bases documents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentCreateCall), [*knowledge bases documents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentDeleteCall), [*knowledge bases documents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentGetCall), [*knowledge bases documents import*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentImportCall), [*knowledge bases documents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentListCall), [*knowledge bases documents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentPatchCall), [*knowledge bases documents reload*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseDocumentReloadCall), [*knowledge bases get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseGetCall), [*knowledge bases list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBaseListCall), [*knowledge bases patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectKnowledgeBasePatchCall), [*locations agent entity types batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeBatchDeleteCall), [*locations agent entity types batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeBatchUpdateCall), [*locations agent entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeCreateCall), [*locations agent entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeDeleteCall), [*locations agent entity types entities batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeEntityBatchCreateCall), [*locations agent entity types entities batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeEntityBatchDeleteCall), [*locations agent entity types entities batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeEntityBatchUpdateCall), [*locations agent entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeGetCall), [*locations agent entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypeListCall), [*locations agent entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEntityTypePatchCall), [*locations agent environments create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentCreateCall), [*locations agent environments delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentDeleteCall), [*locations agent environments get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentGetCall), [*locations agent environments get history*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentGetHistoryCall), [*locations agent environments intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentIntentListCall), [*locations agent environments list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentListCall), [*locations agent environments patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentPatchCall), [*locations agent environments users sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextCreateCall), [*locations agent environments users sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextDeleteCall), [*locations agent environments users sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextGetCall), [*locations agent environments users sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextListCall), [*locations agent environments users sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionContextPatchCall), [*locations agent environments users sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionDeleteContextCall), [*locations agent environments users sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionDetectIntentCall), [*locations agent environments users sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeCreateCall), [*locations agent environments users sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeDeleteCall), [*locations agent environments users sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeGetCall), [*locations agent environments users sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypeListCall), [*locations agent environments users sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentEnvironmentUserSessionEntityTypePatchCall), [*locations agent export*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentExportCall), [*locations agent get fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentGetFulfillmentCall), [*locations agent get validation result*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentGetValidationResultCall), [*locations agent import*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentImportCall), [*locations agent intents batch delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentBatchDeleteCall), [*locations agent intents batch update*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentBatchUpdateCall), [*locations agent intents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentCreateCall), [*locations agent intents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentDeleteCall), [*locations agent intents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentGetCall), [*locations agent intents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentListCall), [*locations agent intents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentIntentPatchCall), [*locations agent restore*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentRestoreCall), [*locations agent search*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSearchCall), [*locations agent sessions contexts create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextCreateCall), [*locations agent sessions contexts delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextDeleteCall), [*locations agent sessions contexts get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextGetCall), [*locations agent sessions contexts list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextListCall), [*locations agent sessions contexts patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionContextPatchCall), [*locations agent sessions delete contexts*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionDeleteContextCall), [*locations agent sessions detect intent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionDetectIntentCall), [*locations agent sessions entity types create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeCreateCall), [*locations agent sessions entity types delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeDeleteCall), [*locations agent sessions entity types get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeGetCall), [*locations agent sessions entity types list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypeListCall), [*locations agent sessions entity types patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentSessionEntityTypePatchCall), [*locations agent train*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentTrainCall), [*locations agent update fulfillment*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentUpdateFulfillmentCall), [*locations agent versions create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionCreateCall), [*locations agent versions delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionDeleteCall), [*locations agent versions get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionGetCall), [*locations agent versions list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionListCall), [*locations agent versions patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAgentVersionPatchCall), [*locations answer records get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAnswerRecordGetCall), [*locations answer records list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAnswerRecordListCall), [*locations answer records patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationAnswerRecordPatchCall), [*locations conversation profiles clear suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileClearSuggestionFeatureConfigCall), [*locations conversation profiles create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileCreateCall), [*locations conversation profiles delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileDeleteCall), [*locations conversation profiles get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileGetCall), [*locations conversation profiles list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileListCall), [*locations conversation profiles patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfilePatchCall), [*locations conversation profiles set suggestion feature config*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationProfileSetSuggestionFeatureConfigCall), [*locations conversations complete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationCompleteCall), [*locations conversations create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationCreateCall), [*locations conversations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationGetCall), [*locations conversations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationListCall), [*locations conversations messages batch create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationMessageBatchCreateCall), [*locations conversations messages list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationMessageListCall), [*locations conversations participants analyze content*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantAnalyzeContentCall), [*locations conversations participants create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantCreateCall), [*locations conversations participants get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantGetCall), [*locations conversations participants list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantListCall), [*locations conversations participants patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantPatchCall), [*locations conversations participants suggestions suggest articles*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantSuggestionSuggestArticleCall), [*locations conversations participants suggestions suggest faq answers*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantSuggestionSuggestFaqAnswerCall), [*locations conversations participants suggestions suggest smart replies*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationParticipantSuggestionSuggestSmartReplyCall), [*locations conversations suggestions suggest conversation summary*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationConversationSuggestionSuggestConversationSummaryCall), [*locations delete agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationDeleteAgentCall), [*locations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationGetCall), [*locations get agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationGetAgentCall), [*locations knowledge bases create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseCreateCall), [*locations knowledge bases delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDeleteCall), [*locations knowledge bases documents create*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentCreateCall), [*locations knowledge bases documents delete*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentDeleteCall), [*locations knowledge bases documents get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentGetCall), [*locations knowledge bases documents import*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentImportCall), [*locations knowledge bases documents list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentListCall), [*locations knowledge bases documents patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentPatchCall), [*locations knowledge bases documents reload*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseDocumentReloadCall), [*locations knowledge bases get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseGetCall), [*locations knowledge bases list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBaseListCall), [*locations knowledge bases patch*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationKnowledgeBasePatchCall), [*locations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationOperationListCall), [*locations set agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectLocationSetAgentCall), [*operations cancel*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectOperationListCall) and [*set agent*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/api::ProjectSetAgentCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/Dialogflow)** +* **[Hub](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/Dialogflow)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Part)** + * **[Parts](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -166,17 +166,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -186,29 +186,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dialogflow2_beta1/5.0.2-beta-1+20230110/google_dialogflow2_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-dialogflow2_beta1/5.0.2+20230110/google_dialogflow2_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dialogflow2_beta1/src/api.rs b/gen/dialogflow2_beta1/src/api.rs index eac6baf49b..af5929e281 100644 --- a/gen/dialogflow2_beta1/src/api.rs +++ b/gen/dialogflow2_beta1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> Dialogflow { Dialogflow { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dialogflow.googleapis.com/".to_string(), _root_url: "https://dialogflow.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> Dialogflow { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dialogflow2_beta1/src/client.rs b/gen/dialogflow2_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dialogflow2_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dialogflow2_beta1/src/lib.rs b/gen/dialogflow2_beta1/src/lib.rs index 2b165a19a4..0c05a47bda 100644 --- a/gen/dialogflow2_beta1/src/lib.rs +++ b/gen/dialogflow2_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dialogflow* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *dialogflow:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dialogflow* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *dialogflow:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dialogflow* *v2_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/dialogflow/). diff --git a/gen/dialogflow3-cli/Cargo.toml b/gen/dialogflow3-cli/Cargo.toml index 0f5ea6330d..21f9b2ba10 100644 --- a/gen/dialogflow3-cli/Cargo.toml +++ b/gen/dialogflow3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dialogflow3-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dialogflow (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow3-cli" @@ -20,13 +20,13 @@ name = "dialogflow3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dialogflow3] path = "../dialogflow3" -version = "4.0.1+20220228" +version = "5.0.2+20230110" + diff --git a/gen/dialogflow3-cli/README.md b/gen/dialogflow3-cli/README.md index 5e372d45b4..8fb3de2aa9 100644 --- a/gen/dialogflow3-cli/README.md +++ b/gen/dialogflow3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dialogflow* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dialogflow* API at revision *20230110*. The CLI is at version *5.0.2*. ```bash dialogflow3 [options] diff --git a/gen/dialogflow3-cli/mkdocs.yml b/gen/dialogflow3-cli/mkdocs.yml index 6792c43039..e49f0a3939 100644 --- a/gen/dialogflow3-cli/mkdocs.yml +++ b/gen/dialogflow3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dialogflow v4.0.1+20220228 +site_name: Dialogflow v5.0.2+20230110 site_url: http://byron.github.io/google-apis-rs/google-dialogflow3-cli site_description: A complete library to interact with Dialogflow (protocol v3) @@ -7,120 +7,121 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-agents-changelogs-get.md', 'Projects', 'Locations Agents Changelogs Get'] -- ['projects_locations-agents-changelogs-list.md', 'Projects', 'Locations Agents Changelogs List'] -- ['projects_locations-agents-create.md', 'Projects', 'Locations Agents Create'] -- ['projects_locations-agents-delete.md', 'Projects', 'Locations Agents Delete'] -- ['projects_locations-agents-entity-types-create.md', 'Projects', 'Locations Agents Entity Types Create'] -- ['projects_locations-agents-entity-types-delete.md', 'Projects', 'Locations Agents Entity Types Delete'] -- ['projects_locations-agents-entity-types-get.md', 'Projects', 'Locations Agents Entity Types Get'] -- ['projects_locations-agents-entity-types-list.md', 'Projects', 'Locations Agents Entity Types List'] -- ['projects_locations-agents-entity-types-patch.md', 'Projects', 'Locations Agents Entity Types Patch'] -- ['projects_locations-agents-environments-continuous-test-results-list.md', 'Projects', 'Locations Agents Environments Continuous Test Results List'] -- ['projects_locations-agents-environments-create.md', 'Projects', 'Locations Agents Environments Create'] -- ['projects_locations-agents-environments-delete.md', 'Projects', 'Locations Agents Environments Delete'] -- ['projects_locations-agents-environments-deploy-flow.md', 'Projects', 'Locations Agents Environments Deploy Flow'] -- ['projects_locations-agents-environments-deployments-get.md', 'Projects', 'Locations Agents Environments Deployments Get'] -- ['projects_locations-agents-environments-deployments-list.md', 'Projects', 'Locations Agents Environments Deployments List'] -- ['projects_locations-agents-environments-experiments-create.md', 'Projects', 'Locations Agents Environments Experiments Create'] -- ['projects_locations-agents-environments-experiments-delete.md', 'Projects', 'Locations Agents Environments Experiments Delete'] -- ['projects_locations-agents-environments-experiments-get.md', 'Projects', 'Locations Agents Environments Experiments Get'] -- ['projects_locations-agents-environments-experiments-list.md', 'Projects', 'Locations Agents Environments Experiments List'] -- ['projects_locations-agents-environments-experiments-patch.md', 'Projects', 'Locations Agents Environments Experiments Patch'] -- ['projects_locations-agents-environments-experiments-start.md', 'Projects', 'Locations Agents Environments Experiments Start'] -- ['projects_locations-agents-environments-experiments-stop.md', 'Projects', 'Locations Agents Environments Experiments Stop'] -- ['projects_locations-agents-environments-get.md', 'Projects', 'Locations Agents Environments Get'] -- ['projects_locations-agents-environments-list.md', 'Projects', 'Locations Agents Environments List'] -- ['projects_locations-agents-environments-lookup-environment-history.md', 'Projects', 'Locations Agents Environments Lookup Environment History'] -- ['projects_locations-agents-environments-patch.md', 'Projects', 'Locations Agents Environments Patch'] -- ['projects_locations-agents-environments-run-continuous-test.md', 'Projects', 'Locations Agents Environments Run Continuous Test'] -- ['projects_locations-agents-environments-sessions-detect-intent.md', 'Projects', 'Locations Agents Environments Sessions Detect Intent'] -- ['projects_locations-agents-environments-sessions-entity-types-create.md', 'Projects', 'Locations Agents Environments Sessions Entity Types Create'] -- ['projects_locations-agents-environments-sessions-entity-types-delete.md', 'Projects', 'Locations Agents Environments Sessions Entity Types Delete'] -- ['projects_locations-agents-environments-sessions-entity-types-get.md', 'Projects', 'Locations Agents Environments Sessions Entity Types Get'] -- ['projects_locations-agents-environments-sessions-entity-types-list.md', 'Projects', 'Locations Agents Environments Sessions Entity Types List'] -- ['projects_locations-agents-environments-sessions-entity-types-patch.md', 'Projects', 'Locations Agents Environments Sessions Entity Types Patch'] -- ['projects_locations-agents-environments-sessions-fulfill-intent.md', 'Projects', 'Locations Agents Environments Sessions Fulfill Intent'] -- ['projects_locations-agents-environments-sessions-match-intent.md', 'Projects', 'Locations Agents Environments Sessions Match Intent'] -- ['projects_locations-agents-export.md', 'Projects', 'Locations Agents Export'] -- ['projects_locations-agents-flows-create.md', 'Projects', 'Locations Agents Flows Create'] -- ['projects_locations-agents-flows-delete.md', 'Projects', 'Locations Agents Flows Delete'] -- ['projects_locations-agents-flows-export.md', 'Projects', 'Locations Agents Flows Export'] -- ['projects_locations-agents-flows-get.md', 'Projects', 'Locations Agents Flows Get'] -- ['projects_locations-agents-flows-get-validation-result.md', 'Projects', 'Locations Agents Flows Get Validation Result'] -- ['projects_locations-agents-flows-import.md', 'Projects', 'Locations Agents Flows Import'] -- ['projects_locations-agents-flows-list.md', 'Projects', 'Locations Agents Flows List'] -- ['projects_locations-agents-flows-pages-create.md', 'Projects', 'Locations Agents Flows Pages Create'] -- ['projects_locations-agents-flows-pages-delete.md', 'Projects', 'Locations Agents Flows Pages Delete'] -- ['projects_locations-agents-flows-pages-get.md', 'Projects', 'Locations Agents Flows Pages Get'] -- ['projects_locations-agents-flows-pages-list.md', 'Projects', 'Locations Agents Flows Pages List'] -- ['projects_locations-agents-flows-pages-patch.md', 'Projects', 'Locations Agents Flows Pages Patch'] -- ['projects_locations-agents-flows-patch.md', 'Projects', 'Locations Agents Flows Patch'] -- ['projects_locations-agents-flows-train.md', 'Projects', 'Locations Agents Flows Train'] -- ['projects_locations-agents-flows-transition-route-groups-create.md', 'Projects', 'Locations Agents Flows Transition Route Groups Create'] -- ['projects_locations-agents-flows-transition-route-groups-delete.md', 'Projects', 'Locations Agents Flows Transition Route Groups Delete'] -- ['projects_locations-agents-flows-transition-route-groups-get.md', 'Projects', 'Locations Agents Flows Transition Route Groups Get'] -- ['projects_locations-agents-flows-transition-route-groups-list.md', 'Projects', 'Locations Agents Flows Transition Route Groups List'] -- ['projects_locations-agents-flows-transition-route-groups-patch.md', 'Projects', 'Locations Agents Flows Transition Route Groups Patch'] -- ['projects_locations-agents-flows-validate.md', 'Projects', 'Locations Agents Flows Validate'] -- ['projects_locations-agents-flows-versions-compare-versions.md', 'Projects', 'Locations Agents Flows Versions Compare Versions'] -- ['projects_locations-agents-flows-versions-create.md', 'Projects', 'Locations Agents Flows Versions Create'] -- ['projects_locations-agents-flows-versions-delete.md', 'Projects', 'Locations Agents Flows Versions Delete'] -- ['projects_locations-agents-flows-versions-get.md', 'Projects', 'Locations Agents Flows Versions Get'] -- ['projects_locations-agents-flows-versions-list.md', 'Projects', 'Locations Agents Flows Versions List'] -- ['projects_locations-agents-flows-versions-load.md', 'Projects', 'Locations Agents Flows Versions Load'] -- ['projects_locations-agents-flows-versions-patch.md', 'Projects', 'Locations Agents Flows Versions Patch'] -- ['projects_locations-agents-get.md', 'Projects', 'Locations Agents Get'] -- ['projects_locations-agents-get-validation-result.md', 'Projects', 'Locations Agents Get Validation Result'] -- ['projects_locations-agents-intents-create.md', 'Projects', 'Locations Agents Intents Create'] -- ['projects_locations-agents-intents-delete.md', 'Projects', 'Locations Agents Intents Delete'] -- ['projects_locations-agents-intents-get.md', 'Projects', 'Locations Agents Intents Get'] -- ['projects_locations-agents-intents-list.md', 'Projects', 'Locations Agents Intents List'] -- ['projects_locations-agents-intents-patch.md', 'Projects', 'Locations Agents Intents Patch'] -- ['projects_locations-agents-list.md', 'Projects', 'Locations Agents List'] -- ['projects_locations-agents-patch.md', 'Projects', 'Locations Agents Patch'] -- ['projects_locations-agents-restore.md', 'Projects', 'Locations Agents Restore'] -- ['projects_locations-agents-sessions-detect-intent.md', 'Projects', 'Locations Agents Sessions Detect Intent'] -- ['projects_locations-agents-sessions-entity-types-create.md', 'Projects', 'Locations Agents Sessions Entity Types Create'] -- ['projects_locations-agents-sessions-entity-types-delete.md', 'Projects', 'Locations Agents Sessions Entity Types Delete'] -- ['projects_locations-agents-sessions-entity-types-get.md', 'Projects', 'Locations Agents Sessions Entity Types Get'] -- ['projects_locations-agents-sessions-entity-types-list.md', 'Projects', 'Locations Agents Sessions Entity Types List'] -- ['projects_locations-agents-sessions-entity-types-patch.md', 'Projects', 'Locations Agents Sessions Entity Types Patch'] -- ['projects_locations-agents-sessions-fulfill-intent.md', 'Projects', 'Locations Agents Sessions Fulfill Intent'] -- ['projects_locations-agents-sessions-match-intent.md', 'Projects', 'Locations Agents Sessions Match Intent'] -- ['projects_locations-agents-test-cases-batch-delete.md', 'Projects', 'Locations Agents Test Cases Batch Delete'] -- ['projects_locations-agents-test-cases-batch-run.md', 'Projects', 'Locations Agents Test Cases Batch Run'] -- ['projects_locations-agents-test-cases-calculate-coverage.md', 'Projects', 'Locations Agents Test Cases Calculate Coverage'] -- ['projects_locations-agents-test-cases-create.md', 'Projects', 'Locations Agents Test Cases Create'] -- ['projects_locations-agents-test-cases-export.md', 'Projects', 'Locations Agents Test Cases Export'] -- ['projects_locations-agents-test-cases-get.md', 'Projects', 'Locations Agents Test Cases Get'] -- ['projects_locations-agents-test-cases-import.md', 'Projects', 'Locations Agents Test Cases Import'] -- ['projects_locations-agents-test-cases-list.md', 'Projects', 'Locations Agents Test Cases List'] -- ['projects_locations-agents-test-cases-patch.md', 'Projects', 'Locations Agents Test Cases Patch'] -- ['projects_locations-agents-test-cases-results-get.md', 'Projects', 'Locations Agents Test Cases Results Get'] -- ['projects_locations-agents-test-cases-results-list.md', 'Projects', 'Locations Agents Test Cases Results List'] -- ['projects_locations-agents-test-cases-run.md', 'Projects', 'Locations Agents Test Cases Run'] -- ['projects_locations-agents-validate.md', 'Projects', 'Locations Agents Validate'] -- ['projects_locations-agents-webhooks-create.md', 'Projects', 'Locations Agents Webhooks Create'] -- ['projects_locations-agents-webhooks-delete.md', 'Projects', 'Locations Agents Webhooks Delete'] -- ['projects_locations-agents-webhooks-get.md', 'Projects', 'Locations Agents Webhooks Get'] -- ['projects_locations-agents-webhooks-list.md', 'Projects', 'Locations Agents Webhooks List'] -- ['projects_locations-agents-webhooks-patch.md', 'Projects', 'Locations Agents Webhooks Patch'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-security-settings-create.md', 'Projects', 'Locations Security Settings Create'] -- ['projects_locations-security-settings-delete.md', 'Projects', 'Locations Security Settings Delete'] -- ['projects_locations-security-settings-get.md', 'Projects', 'Locations Security Settings Get'] -- ['projects_locations-security-settings-list.md', 'Projects', 'Locations Security Settings List'] -- ['projects_locations-security-settings-patch.md', 'Projects', 'Locations Security Settings Patch'] -- ['projects_operations-cancel.md', 'Projects', 'Operations Cancel'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Agents Changelogs Get': 'projects_locations-agents-changelogs-get.md' + - 'Locations Agents Changelogs List': 'projects_locations-agents-changelogs-list.md' + - 'Locations Agents Create': 'projects_locations-agents-create.md' + - 'Locations Agents Delete': 'projects_locations-agents-delete.md' + - 'Locations Agents Entity Types Create': 'projects_locations-agents-entity-types-create.md' + - 'Locations Agents Entity Types Delete': 'projects_locations-agents-entity-types-delete.md' + - 'Locations Agents Entity Types Get': 'projects_locations-agents-entity-types-get.md' + - 'Locations Agents Entity Types List': 'projects_locations-agents-entity-types-list.md' + - 'Locations Agents Entity Types Patch': 'projects_locations-agents-entity-types-patch.md' + - 'Locations Agents Environments Continuous Test Results List': 'projects_locations-agents-environments-continuous-test-results-list.md' + - 'Locations Agents Environments Create': 'projects_locations-agents-environments-create.md' + - 'Locations Agents Environments Delete': 'projects_locations-agents-environments-delete.md' + - 'Locations Agents Environments Deploy Flow': 'projects_locations-agents-environments-deploy-flow.md' + - 'Locations Agents Environments Deployments Get': 'projects_locations-agents-environments-deployments-get.md' + - 'Locations Agents Environments Deployments List': 'projects_locations-agents-environments-deployments-list.md' + - 'Locations Agents Environments Experiments Create': 'projects_locations-agents-environments-experiments-create.md' + - 'Locations Agents Environments Experiments Delete': 'projects_locations-agents-environments-experiments-delete.md' + - 'Locations Agents Environments Experiments Get': 'projects_locations-agents-environments-experiments-get.md' + - 'Locations Agents Environments Experiments List': 'projects_locations-agents-environments-experiments-list.md' + - 'Locations Agents Environments Experiments Patch': 'projects_locations-agents-environments-experiments-patch.md' + - 'Locations Agents Environments Experiments Start': 'projects_locations-agents-environments-experiments-start.md' + - 'Locations Agents Environments Experiments Stop': 'projects_locations-agents-environments-experiments-stop.md' + - 'Locations Agents Environments Get': 'projects_locations-agents-environments-get.md' + - 'Locations Agents Environments List': 'projects_locations-agents-environments-list.md' + - 'Locations Agents Environments Lookup Environment History': 'projects_locations-agents-environments-lookup-environment-history.md' + - 'Locations Agents Environments Patch': 'projects_locations-agents-environments-patch.md' + - 'Locations Agents Environments Run Continuous Test': 'projects_locations-agents-environments-run-continuous-test.md' + - 'Locations Agents Environments Sessions Detect Intent': 'projects_locations-agents-environments-sessions-detect-intent.md' + - 'Locations Agents Environments Sessions Entity Types Create': 'projects_locations-agents-environments-sessions-entity-types-create.md' + - 'Locations Agents Environments Sessions Entity Types Delete': 'projects_locations-agents-environments-sessions-entity-types-delete.md' + - 'Locations Agents Environments Sessions Entity Types Get': 'projects_locations-agents-environments-sessions-entity-types-get.md' + - 'Locations Agents Environments Sessions Entity Types List': 'projects_locations-agents-environments-sessions-entity-types-list.md' + - 'Locations Agents Environments Sessions Entity Types Patch': 'projects_locations-agents-environments-sessions-entity-types-patch.md' + - 'Locations Agents Environments Sessions Fulfill Intent': 'projects_locations-agents-environments-sessions-fulfill-intent.md' + - 'Locations Agents Environments Sessions Match Intent': 'projects_locations-agents-environments-sessions-match-intent.md' + - 'Locations Agents Export': 'projects_locations-agents-export.md' + - 'Locations Agents Flows Create': 'projects_locations-agents-flows-create.md' + - 'Locations Agents Flows Delete': 'projects_locations-agents-flows-delete.md' + - 'Locations Agents Flows Export': 'projects_locations-agents-flows-export.md' + - 'Locations Agents Flows Get': 'projects_locations-agents-flows-get.md' + - 'Locations Agents Flows Get Validation Result': 'projects_locations-agents-flows-get-validation-result.md' + - 'Locations Agents Flows Import': 'projects_locations-agents-flows-import.md' + - 'Locations Agents Flows List': 'projects_locations-agents-flows-list.md' + - 'Locations Agents Flows Pages Create': 'projects_locations-agents-flows-pages-create.md' + - 'Locations Agents Flows Pages Delete': 'projects_locations-agents-flows-pages-delete.md' + - 'Locations Agents Flows Pages Get': 'projects_locations-agents-flows-pages-get.md' + - 'Locations Agents Flows Pages List': 'projects_locations-agents-flows-pages-list.md' + - 'Locations Agents Flows Pages Patch': 'projects_locations-agents-flows-pages-patch.md' + - 'Locations Agents Flows Patch': 'projects_locations-agents-flows-patch.md' + - 'Locations Agents Flows Train': 'projects_locations-agents-flows-train.md' + - 'Locations Agents Flows Transition Route Groups Create': 'projects_locations-agents-flows-transition-route-groups-create.md' + - 'Locations Agents Flows Transition Route Groups Delete': 'projects_locations-agents-flows-transition-route-groups-delete.md' + - 'Locations Agents Flows Transition Route Groups Get': 'projects_locations-agents-flows-transition-route-groups-get.md' + - 'Locations Agents Flows Transition Route Groups List': 'projects_locations-agents-flows-transition-route-groups-list.md' + - 'Locations Agents Flows Transition Route Groups Patch': 'projects_locations-agents-flows-transition-route-groups-patch.md' + - 'Locations Agents Flows Validate': 'projects_locations-agents-flows-validate.md' + - 'Locations Agents Flows Versions Compare Versions': 'projects_locations-agents-flows-versions-compare-versions.md' + - 'Locations Agents Flows Versions Create': 'projects_locations-agents-flows-versions-create.md' + - 'Locations Agents Flows Versions Delete': 'projects_locations-agents-flows-versions-delete.md' + - 'Locations Agents Flows Versions Get': 'projects_locations-agents-flows-versions-get.md' + - 'Locations Agents Flows Versions List': 'projects_locations-agents-flows-versions-list.md' + - 'Locations Agents Flows Versions Load': 'projects_locations-agents-flows-versions-load.md' + - 'Locations Agents Flows Versions Patch': 'projects_locations-agents-flows-versions-patch.md' + - 'Locations Agents Get': 'projects_locations-agents-get.md' + - 'Locations Agents Get Validation Result': 'projects_locations-agents-get-validation-result.md' + - 'Locations Agents Intents Create': 'projects_locations-agents-intents-create.md' + - 'Locations Agents Intents Delete': 'projects_locations-agents-intents-delete.md' + - 'Locations Agents Intents Get': 'projects_locations-agents-intents-get.md' + - 'Locations Agents Intents List': 'projects_locations-agents-intents-list.md' + - 'Locations Agents Intents Patch': 'projects_locations-agents-intents-patch.md' + - 'Locations Agents List': 'projects_locations-agents-list.md' + - 'Locations Agents Patch': 'projects_locations-agents-patch.md' + - 'Locations Agents Restore': 'projects_locations-agents-restore.md' + - 'Locations Agents Sessions Detect Intent': 'projects_locations-agents-sessions-detect-intent.md' + - 'Locations Agents Sessions Entity Types Create': 'projects_locations-agents-sessions-entity-types-create.md' + - 'Locations Agents Sessions Entity Types Delete': 'projects_locations-agents-sessions-entity-types-delete.md' + - 'Locations Agents Sessions Entity Types Get': 'projects_locations-agents-sessions-entity-types-get.md' + - 'Locations Agents Sessions Entity Types List': 'projects_locations-agents-sessions-entity-types-list.md' + - 'Locations Agents Sessions Entity Types Patch': 'projects_locations-agents-sessions-entity-types-patch.md' + - 'Locations Agents Sessions Fulfill Intent': 'projects_locations-agents-sessions-fulfill-intent.md' + - 'Locations Agents Sessions Match Intent': 'projects_locations-agents-sessions-match-intent.md' + - 'Locations Agents Test Cases Batch Delete': 'projects_locations-agents-test-cases-batch-delete.md' + - 'Locations Agents Test Cases Batch Run': 'projects_locations-agents-test-cases-batch-run.md' + - 'Locations Agents Test Cases Calculate Coverage': 'projects_locations-agents-test-cases-calculate-coverage.md' + - 'Locations Agents Test Cases Create': 'projects_locations-agents-test-cases-create.md' + - 'Locations Agents Test Cases Export': 'projects_locations-agents-test-cases-export.md' + - 'Locations Agents Test Cases Get': 'projects_locations-agents-test-cases-get.md' + - 'Locations Agents Test Cases Import': 'projects_locations-agents-test-cases-import.md' + - 'Locations Agents Test Cases List': 'projects_locations-agents-test-cases-list.md' + - 'Locations Agents Test Cases Patch': 'projects_locations-agents-test-cases-patch.md' + - 'Locations Agents Test Cases Results Get': 'projects_locations-agents-test-cases-results-get.md' + - 'Locations Agents Test Cases Results List': 'projects_locations-agents-test-cases-results-list.md' + - 'Locations Agents Test Cases Run': 'projects_locations-agents-test-cases-run.md' + - 'Locations Agents Validate': 'projects_locations-agents-validate.md' + - 'Locations Agents Webhooks Create': 'projects_locations-agents-webhooks-create.md' + - 'Locations Agents Webhooks Delete': 'projects_locations-agents-webhooks-delete.md' + - 'Locations Agents Webhooks Get': 'projects_locations-agents-webhooks-get.md' + - 'Locations Agents Webhooks List': 'projects_locations-agents-webhooks-list.md' + - 'Locations Agents Webhooks Patch': 'projects_locations-agents-webhooks-patch.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Security Settings Create': 'projects_locations-security-settings-create.md' + - 'Locations Security Settings Delete': 'projects_locations-security-settings-delete.md' + - 'Locations Security Settings Get': 'projects_locations-security-settings-get.md' + - 'Locations Security Settings List': 'projects_locations-security-settings-list.md' + - 'Locations Security Settings Patch': 'projects_locations-security-settings-patch.md' + - 'Operations Cancel': 'projects_operations-cancel.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' theme: readthedocs diff --git a/gen/dialogflow3-cli/src/client.rs b/gen/dialogflow3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dialogflow3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dialogflow3-cli/src/main.rs b/gen/dialogflow3-cli/src/main.rs index 43a081dc3d..a1026ad85e 100644 --- a/gen/dialogflow3-cli/src/main.rs +++ b/gen/dialogflow3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dialogflow3::{api, Error, oauth2}; +use google_dialogflow3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -196,6 +195,7 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "enable-spell-correction" => Some(("enableSpellCorrection", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-logging" => Some(("enableStackdriverLogging", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "locked" => Some(("locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "speech-to-text-settings.enable-speech-adaptation" => Some(("speechToTextSettings.enableSpeechAdaptation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -203,7 +203,7 @@ where "supported-language-codes" => Some(("supportedLanguageCodes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-settings", "avatar-uri", "default-language-code", "description", "display-name", "enable-interaction-logging", "enable-speech-adaptation", "enable-spell-correction", "enable-stackdriver-logging", "logging-settings", "name", "security-settings", "speech-to-text-settings", "start-flow", "supported-language-codes", "time-zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-settings", "avatar-uri", "default-language-code", "description", "display-name", "enable-interaction-logging", "enable-speech-adaptation", "enable-spell-correction", "enable-stackdriver-logging", "locked", "logging-settings", "name", "security-settings", "speech-to-text-settings", "start-flow", "supported-language-codes", "time-zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -416,7 +416,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -531,7 +531,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -628,7 +628,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -690,7 +690,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1029,7 +1029,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1293,7 +1293,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1398,7 +1398,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1677,7 +1677,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1736,7 +1736,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1831,7 +1831,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2010,6 +2010,7 @@ where "query-input.language-code" => Some(("queryInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-input.text.text" => Some(("queryInput.text.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.analyze-query-text-sentiment" => Some(("queryParams.analyzeQueryTextSentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "query-params.channel" => Some(("queryParams.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.current-page" => Some(("queryParams.currentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.disable-webhook" => Some(("queryParams.disableWebhook", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query-params.flow-versions" => Some(("queryParams.flowVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2018,7 +2019,7 @@ where "query-params.time-zone" => Some(("queryParams.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.webhook-headers" => Some(("queryParams.webhookHeaders", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "config", "current-page", "digits", "disable-webhook", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "channel", "config", "current-page", "digits", "disable-webhook", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2278,7 +2279,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2368,7 +2369,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2465,6 +2466,7 @@ where "match-intent-request.query-input.language-code" => Some(("matchIntentRequest.queryInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-input.text.text" => Some(("matchIntentRequest.queryInput.text.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-params.analyze-query-text-sentiment" => Some(("matchIntentRequest.queryParams.analyzeQueryTextSentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "match-intent-request.query-params.channel" => Some(("matchIntentRequest.queryParams.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-params.current-page" => Some(("matchIntentRequest.queryParams.currentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-params.disable-webhook" => Some(("matchIntentRequest.queryParams.disableWebhook", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "match-intent-request.query-params.flow-versions" => Some(("matchIntentRequest.queryParams.flowVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2481,7 +2483,7 @@ where "output-audio-config.synthesize-speech-config.voice.ssml-gender" => Some(("outputAudioConfig.synthesizeSpeechConfig.voice.ssmlGender", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "output-audio-config.synthesize-speech-config.volume-gain-db" => Some(("outputAudioConfig.synthesizeSpeechConfig.volumeGainDb", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "confidence", "config", "current-page", "description", "digits", "disable-webhook", "display-name", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "is-fallback", "labels", "language-code", "latitude", "longitude", "match", "match-intent-request", "match-type", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "priority", "query-input", "query-params", "resolved-input", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "channel", "confidence", "config", "current-page", "description", "digits", "disable-webhook", "display-name", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "is-fallback", "labels", "language-code", "latitude", "longitude", "match", "match-intent-request", "match-type", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "priority", "query-input", "query-params", "resolved-input", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2579,6 +2581,7 @@ where "query-input.language-code" => Some(("queryInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-input.text.text" => Some(("queryInput.text.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.analyze-query-text-sentiment" => Some(("queryParams.analyzeQueryTextSentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "query-params.channel" => Some(("queryParams.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.current-page" => Some(("queryParams.currentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.disable-webhook" => Some(("queryParams.disableWebhook", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query-params.flow-versions" => Some(("queryParams.flowVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -2587,7 +2590,7 @@ where "query-params.time-zone" => Some(("queryParams.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.webhook-headers" => Some(("queryParams.webhookHeaders", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "config", "current-page", "digits", "disable-webhook", "dtmf", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "phrase-hints", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "text", "time-zone", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "channel", "config", "current-page", "digits", "disable-webhook", "dtmf", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "phrase-hints", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "text", "time-zone", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2671,9 +2674,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "agent-uri" => Some(("agentUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "data-format" => Some(("dataFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "environment" => Some(("environment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-uri", "environment"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["agent-uri", "data-format", "environment"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2835,7 +2839,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3179,7 +3183,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3332,7 +3336,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3447,7 +3451,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3544,7 +3548,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3642,7 +3646,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -3875,7 +3879,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3990,7 +3994,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -4083,7 +4087,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -4512,7 +4516,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4693,7 +4697,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5062,7 +5066,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -5162,7 +5166,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -5224,7 +5228,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5304,6 +5308,7 @@ where "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "enable-spell-correction" => Some(("enableSpellCorrection", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-stackdriver-logging" => Some(("enableStackdriverLogging", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "locked" => Some(("locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-settings" => Some(("securitySettings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "speech-to-text-settings.enable-speech-adaptation" => Some(("speechToTextSettings.enableSpeechAdaptation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -5311,7 +5316,7 @@ where "supported-language-codes" => Some(("supportedLanguageCodes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-settings", "avatar-uri", "default-language-code", "description", "display-name", "enable-interaction-logging", "enable-speech-adaptation", "enable-spell-correction", "enable-stackdriver-logging", "logging-settings", "name", "security-settings", "speech-to-text-settings", "start-flow", "supported-language-codes", "time-zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advanced-settings", "avatar-uri", "default-language-code", "description", "display-name", "enable-interaction-logging", "enable-speech-adaptation", "enable-spell-correction", "enable-stackdriver-logging", "locked", "logging-settings", "name", "security-settings", "speech-to-text-settings", "start-flow", "supported-language-codes", "time-zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5326,7 +5331,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5508,6 +5513,7 @@ where "query-input.language-code" => Some(("queryInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-input.text.text" => Some(("queryInput.text.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.analyze-query-text-sentiment" => Some(("queryParams.analyzeQueryTextSentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "query-params.channel" => Some(("queryParams.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.current-page" => Some(("queryParams.currentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.disable-webhook" => Some(("queryParams.disableWebhook", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query-params.flow-versions" => Some(("queryParams.flowVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -5516,7 +5522,7 @@ where "query-params.time-zone" => Some(("queryParams.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.webhook-headers" => Some(("queryParams.webhookHeaders", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "config", "current-page", "digits", "disable-webhook", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "channel", "config", "current-page", "digits", "disable-webhook", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5776,7 +5782,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5866,7 +5872,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5963,6 +5969,7 @@ where "match-intent-request.query-input.language-code" => Some(("matchIntentRequest.queryInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-input.text.text" => Some(("matchIntentRequest.queryInput.text.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-params.analyze-query-text-sentiment" => Some(("matchIntentRequest.queryParams.analyzeQueryTextSentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "match-intent-request.query-params.channel" => Some(("matchIntentRequest.queryParams.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-params.current-page" => Some(("matchIntentRequest.queryParams.currentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "match-intent-request.query-params.disable-webhook" => Some(("matchIntentRequest.queryParams.disableWebhook", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "match-intent-request.query-params.flow-versions" => Some(("matchIntentRequest.queryParams.flowVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -5979,7 +5986,7 @@ where "output-audio-config.synthesize-speech-config.voice.ssml-gender" => Some(("outputAudioConfig.synthesizeSpeechConfig.voice.ssmlGender", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "output-audio-config.synthesize-speech-config.volume-gain-db" => Some(("outputAudioConfig.synthesizeSpeechConfig.volumeGainDb", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "confidence", "config", "current-page", "description", "digits", "disable-webhook", "display-name", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "is-fallback", "labels", "language-code", "latitude", "longitude", "match", "match-intent-request", "match-type", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "priority", "query-input", "query-params", "resolved-input", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "channel", "confidence", "config", "current-page", "description", "digits", "disable-webhook", "display-name", "dtmf", "effects-profile-id", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "is-fallback", "labels", "language-code", "latitude", "longitude", "match", "match-intent-request", "match-type", "model", "model-variant", "name", "output-audio-config", "phrase-hints", "pitch", "priority", "query-input", "query-params", "resolved-input", "sample-rate-hertz", "single-utterance", "speaking-rate", "ssml-gender", "synthesize-speech-config", "text", "time-zone", "voice", "volume-gain-db", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6077,6 +6084,7 @@ where "query-input.language-code" => Some(("queryInput.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-input.text.text" => Some(("queryInput.text.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.analyze-query-text-sentiment" => Some(("queryParams.analyzeQueryTextSentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "query-params.channel" => Some(("queryParams.channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.current-page" => Some(("queryParams.currentPage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.disable-webhook" => Some(("queryParams.disableWebhook", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query-params.flow-versions" => Some(("queryParams.flowVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -6085,7 +6093,7 @@ where "query-params.time-zone" => Some(("queryParams.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-params.webhook-headers" => Some(("queryParams.webhookHeaders", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "config", "current-page", "digits", "disable-webhook", "dtmf", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "phrase-hints", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "text", "time-zone", "webhook-headers"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["analyze-query-text-sentiment", "audio", "audio-encoding", "channel", "config", "current-page", "digits", "disable-webhook", "dtmf", "enable-word-info", "event", "finish-digit", "flow-versions", "geo-location", "intent", "language-code", "latitude", "longitude", "model", "model-variant", "phrase-hints", "query-input", "query-params", "sample-rate-hertz", "single-utterance", "text", "time-zone", "webhook-headers"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6405,9 +6413,10 @@ where "notes" => Some(("notes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tags" => Some(("tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "test-config.flow" => Some(("testConfig.flow", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "test-config.page" => Some(("testConfig.page", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "test-config.tracking-parameters" => Some(("testConfig.trackingParameters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "display-name", "environment", "flow", "last-test-result", "name", "notes", "tags", "test-config", "test-result", "test-time", "tracking-parameters"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "display-name", "environment", "flow", "last-test-result", "name", "notes", "page", "tags", "test-config", "test-result", "test-time", "tracking-parameters"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6705,7 +6714,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6787,9 +6796,10 @@ where "notes" => Some(("notes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tags" => Some(("tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "test-config.flow" => Some(("testConfig.flow", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "test-config.page" => Some(("testConfig.page", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "test-config.tracking-parameters" => Some(("testConfig.trackingParameters", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "display-name", "environment", "flow", "last-test-result", "name", "notes", "tags", "test-config", "test-result", "test-time", "tracking-parameters"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["creation-time", "display-name", "environment", "flow", "last-test-result", "name", "notes", "page", "tags", "test-config", "test-result", "test-time", "tracking-parameters"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6804,7 +6814,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6915,7 +6925,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7243,7 +7253,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -7354,7 +7364,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7457,7 +7467,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7568,7 +7578,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7734,7 +7744,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7809,6 +7819,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "audio-export-settings.audio-export-pattern" => Some(("audioExportSettings.audioExportPattern", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-export-settings.audio-format" => Some(("audioExportSettings.audioFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-export-settings.enable-audio-redaction" => Some(("audioExportSettings.enableAudioRedaction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "audio-export-settings.gcs-bucket" => Some(("audioExportSettings.gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deidentify-template" => Some(("deidentifyTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "insights-export-settings.enable-insights-export" => Some(("insightsExportSettings.enableInsightsExport", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -7819,7 +7833,7 @@ where "redaction-strategy" => Some(("redactionStrategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "retention-window-days" => Some(("retentionWindowDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["deidentify-template", "display-name", "enable-insights-export", "insights-export-settings", "inspect-template", "name", "purge-data-types", "redaction-scope", "redaction-strategy", "retention-window-days"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["audio-export-pattern", "audio-export-settings", "audio-format", "deidentify-template", "display-name", "enable-audio-redaction", "enable-insights-export", "gcs-bucket", "insights-export-settings", "inspect-template", "name", "purge-data-types", "redaction-scope", "redaction-strategy", "retention-window-days"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7993,7 +8007,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8065,6 +8079,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "audio-export-settings.audio-export-pattern" => Some(("audioExportSettings.audioExportPattern", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-export-settings.audio-format" => Some(("audioExportSettings.audioFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-export-settings.enable-audio-redaction" => Some(("audioExportSettings.enableAudioRedaction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "audio-export-settings.gcs-bucket" => Some(("audioExportSettings.gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deidentify-template" => Some(("deidentifyTemplate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "insights-export-settings.enable-insights-export" => Some(("insightsExportSettings.enableInsightsExport", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -8075,7 +8093,7 @@ where "redaction-strategy" => Some(("redactionStrategy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "retention-window-days" => Some(("retentionWindowDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["deidentify-template", "display-name", "enable-insights-export", "insights-export-settings", "inspect-template", "name", "purge-data-types", "redaction-scope", "redaction-strategy", "retention-window-days"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["audio-export-pattern", "audio-export-settings", "audio-format", "deidentify-template", "display-name", "enable-audio-redaction", "enable-insights-export", "gcs-bucket", "insights-export-settings", "inspect-template", "name", "purge-data-types", "redaction-scope", "redaction-strategy", "retention-window-days"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -8090,7 +8108,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8253,7 +8271,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -10907,7 +10925,7 @@ async fn main() { Some(false)), ]), ("locations-agents-test-cases-import", - Some(r##"Imports the test cases from a Cloud Storage bucket or a local file. It always creates new test cases and won't overwite any existing ones. The provided ID in the imported test case is neglected. This method is a [long-running operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). The returned `Operation` type has the following method-specific fields: - `metadata`: ImportTestCasesMetadata - `response`: ImportTestCasesResponse"##), + Some(r##"Imports the test cases from a Cloud Storage bucket or a local file. It always creates new test cases and won't overwrite any existing ones. The provided ID in the imported test case is neglected. This method is a [long-running operation](https://cloud.google.com/dialogflow/cx/docs/how/long-running-operation). The returned `Operation` type has the following method-specific fields: - `metadata`: ImportTestCasesMetadata - `response`: ImportTestCasesResponse"##), "Details at http://byron.github.io/google-apis-rs/google_dialogflow3_cli/projects_locations-agents-test-cases-import", vec![ (Some(r##"parent"##), @@ -11510,7 +11528,7 @@ async fn main() { let mut app = App::new("dialogflow3") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230110") .about("Builds conversational interfaces (for example, chatbots, and voice-powered apps and devices).") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dialogflow3_cli") .arg(Arg::with_name("url") diff --git a/gen/dialogflow3/Cargo.toml b/gen/dialogflow3/Cargo.toml index 84e07663f6..6cf91b134c 100644 --- a/gen/dialogflow3/Cargo.toml +++ b/gen/dialogflow3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dialogflow3" -version = "5.0.2-beta-1+20230110" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dialogflow (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dialogflow3" homepage = "https://cloud.google.com/dialogflow/" -documentation = "https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110" +documentation = "https://docs.rs/google-dialogflow3/5.0.2+20230110" license = "MIT" keywords = ["dialogflow", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dialogflow3/README.md b/gen/dialogflow3/README.md index 196e326f5d..825ad93bb8 100644 --- a/gen/dialogflow3/README.md +++ b/gen/dialogflow3/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-dialogflow3` library allows access to all features of the *Google Dialogflow* service. -This documentation was generated from *Dialogflow* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *dialogflow:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dialogflow* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *dialogflow:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dialogflow* *v3* API can be found at the [official documentation site](https://cloud.google.com/dialogflow/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/Dialogflow) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/Dialogflow) ... * projects - * [*locations agents changelogs get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentChangelogGetCall), [*locations agents changelogs list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentChangelogListCall), [*locations agents create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentCreateCall), [*locations agents delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentDeleteCall), [*locations agents entity types create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeCreateCall), [*locations agents entity types delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeDeleteCall), [*locations agents entity types get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeGetCall), [*locations agents entity types list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeListCall), [*locations agents entity types patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypePatchCall), [*locations agents environments continuous test results list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentContinuousTestResultListCall), [*locations agents environments create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentCreateCall), [*locations agents environments delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeleteCall), [*locations agents environments deploy flow*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeployFlowCall), [*locations agents environments deployments get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeploymentGetCall), [*locations agents environments deployments list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeploymentListCall), [*locations agents environments experiments create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentCreateCall), [*locations agents environments experiments delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentDeleteCall), [*locations agents environments experiments get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentGetCall), [*locations agents environments experiments list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentListCall), [*locations agents environments experiments patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentPatchCall), [*locations agents environments experiments start*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentStartCall), [*locations agents environments experiments stop*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentStopCall), [*locations agents environments get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentGetCall), [*locations agents environments list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentListCall), [*locations agents environments lookup environment history*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentLookupEnvironmentHistoryCall), [*locations agents environments patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentPatchCall), [*locations agents environments run continuous test*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentRunContinuousTestCall), [*locations agents environments sessions detect intent*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionDetectIntentCall), [*locations agents environments sessions entity types create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeCreateCall), [*locations agents environments sessions entity types delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeDeleteCall), [*locations agents environments sessions entity types get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeGetCall), [*locations agents environments sessions entity types list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeListCall), [*locations agents environments sessions entity types patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypePatchCall), [*locations agents environments sessions fulfill intent*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionFulfillIntentCall), [*locations agents environments sessions match intent*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionMatchIntentCall), [*locations agents export*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentExportCall), [*locations agents flows create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowCreateCall), [*locations agents flows delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowDeleteCall), [*locations agents flows export*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowExportCall), [*locations agents flows get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowGetCall), [*locations agents flows get validation result*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowGetValidationResultCall), [*locations agents flows import*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowImportCall), [*locations agents flows list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowListCall), [*locations agents flows pages create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageCreateCall), [*locations agents flows pages delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageDeleteCall), [*locations agents flows pages get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageGetCall), [*locations agents flows pages list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageListCall), [*locations agents flows pages patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPagePatchCall), [*locations agents flows patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPatchCall), [*locations agents flows train*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTrainCall), [*locations agents flows transition route groups create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupCreateCall), [*locations agents flows transition route groups delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupDeleteCall), [*locations agents flows transition route groups get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupGetCall), [*locations agents flows transition route groups list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupListCall), [*locations agents flows transition route groups patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupPatchCall), [*locations agents flows validate*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowValidateCall), [*locations agents flows versions compare versions*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionCompareVersionCall), [*locations agents flows versions create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionCreateCall), [*locations agents flows versions delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionDeleteCall), [*locations agents flows versions get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionGetCall), [*locations agents flows versions list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionListCall), [*locations agents flows versions load*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionLoadCall), [*locations agents flows versions patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionPatchCall), [*locations agents get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentGetCall), [*locations agents get validation result*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentGetValidationResultCall), [*locations agents intents create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentIntentCreateCall), [*locations agents intents delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentIntentDeleteCall), [*locations agents intents get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentIntentGetCall), [*locations agents intents list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentIntentListCall), [*locations agents intents patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentIntentPatchCall), [*locations agents list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentListCall), [*locations agents patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentPatchCall), [*locations agents restore*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentRestoreCall), [*locations agents sessions detect intent*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionDetectIntentCall), [*locations agents sessions entity types create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeCreateCall), [*locations agents sessions entity types delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeDeleteCall), [*locations agents sessions entity types get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeGetCall), [*locations agents sessions entity types list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeListCall), [*locations agents sessions entity types patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypePatchCall), [*locations agents sessions fulfill intent*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionFulfillIntentCall), [*locations agents sessions match intent*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentSessionMatchIntentCall), [*locations agents test cases batch delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseBatchDeleteCall), [*locations agents test cases batch run*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseBatchRunCall), [*locations agents test cases calculate coverage*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseCalculateCoverageCall), [*locations agents test cases create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseCreateCall), [*locations agents test cases export*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseExportCall), [*locations agents test cases get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseGetCall), [*locations agents test cases import*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseImportCall), [*locations agents test cases list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseListCall), [*locations agents test cases patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCasePatchCall), [*locations agents test cases results get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseResultGetCall), [*locations agents test cases results list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseResultListCall), [*locations agents test cases run*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseRunCall), [*locations agents validate*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentValidateCall), [*locations agents webhooks create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookCreateCall), [*locations agents webhooks delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookDeleteCall), [*locations agents webhooks get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookGetCall), [*locations agents webhooks list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookListCall), [*locations agents webhooks patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookPatchCall), [*locations get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationOperationListCall), [*locations security settings create*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingCreateCall), [*locations security settings delete*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingDeleteCall), [*locations security settings get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingGetCall), [*locations security settings list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingListCall), [*locations security settings patch*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingPatchCall), [*operations cancel*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectOperationGetCall) and [*operations list*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/api::ProjectOperationListCall) + * [*locations agents changelogs get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentChangelogGetCall), [*locations agents changelogs list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentChangelogListCall), [*locations agents create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentCreateCall), [*locations agents delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentDeleteCall), [*locations agents entity types create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeCreateCall), [*locations agents entity types delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeDeleteCall), [*locations agents entity types get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeGetCall), [*locations agents entity types list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypeListCall), [*locations agents entity types patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEntityTypePatchCall), [*locations agents environments continuous test results list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentContinuousTestResultListCall), [*locations agents environments create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentCreateCall), [*locations agents environments delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeleteCall), [*locations agents environments deploy flow*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeployFlowCall), [*locations agents environments deployments get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeploymentGetCall), [*locations agents environments deployments list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentDeploymentListCall), [*locations agents environments experiments create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentCreateCall), [*locations agents environments experiments delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentDeleteCall), [*locations agents environments experiments get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentGetCall), [*locations agents environments experiments list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentListCall), [*locations agents environments experiments patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentPatchCall), [*locations agents environments experiments start*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentStartCall), [*locations agents environments experiments stop*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentExperimentStopCall), [*locations agents environments get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentGetCall), [*locations agents environments list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentListCall), [*locations agents environments lookup environment history*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentLookupEnvironmentHistoryCall), [*locations agents environments patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentPatchCall), [*locations agents environments run continuous test*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentRunContinuousTestCall), [*locations agents environments sessions detect intent*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionDetectIntentCall), [*locations agents environments sessions entity types create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeCreateCall), [*locations agents environments sessions entity types delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeDeleteCall), [*locations agents environments sessions entity types get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeGetCall), [*locations agents environments sessions entity types list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypeListCall), [*locations agents environments sessions entity types patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionEntityTypePatchCall), [*locations agents environments sessions fulfill intent*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionFulfillIntentCall), [*locations agents environments sessions match intent*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentEnvironmentSessionMatchIntentCall), [*locations agents export*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentExportCall), [*locations agents flows create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowCreateCall), [*locations agents flows delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowDeleteCall), [*locations agents flows export*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowExportCall), [*locations agents flows get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowGetCall), [*locations agents flows get validation result*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowGetValidationResultCall), [*locations agents flows import*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowImportCall), [*locations agents flows list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowListCall), [*locations agents flows pages create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageCreateCall), [*locations agents flows pages delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageDeleteCall), [*locations agents flows pages get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageGetCall), [*locations agents flows pages list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPageListCall), [*locations agents flows pages patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPagePatchCall), [*locations agents flows patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowPatchCall), [*locations agents flows train*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTrainCall), [*locations agents flows transition route groups create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupCreateCall), [*locations agents flows transition route groups delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupDeleteCall), [*locations agents flows transition route groups get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupGetCall), [*locations agents flows transition route groups list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupListCall), [*locations agents flows transition route groups patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowTransitionRouteGroupPatchCall), [*locations agents flows validate*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowValidateCall), [*locations agents flows versions compare versions*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionCompareVersionCall), [*locations agents flows versions create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionCreateCall), [*locations agents flows versions delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionDeleteCall), [*locations agents flows versions get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionGetCall), [*locations agents flows versions list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionListCall), [*locations agents flows versions load*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionLoadCall), [*locations agents flows versions patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentFlowVersionPatchCall), [*locations agents get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentGetCall), [*locations agents get validation result*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentGetValidationResultCall), [*locations agents intents create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentIntentCreateCall), [*locations agents intents delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentIntentDeleteCall), [*locations agents intents get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentIntentGetCall), [*locations agents intents list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentIntentListCall), [*locations agents intents patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentIntentPatchCall), [*locations agents list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentListCall), [*locations agents patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentPatchCall), [*locations agents restore*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentRestoreCall), [*locations agents sessions detect intent*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionDetectIntentCall), [*locations agents sessions entity types create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeCreateCall), [*locations agents sessions entity types delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeDeleteCall), [*locations agents sessions entity types get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeGetCall), [*locations agents sessions entity types list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypeListCall), [*locations agents sessions entity types patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionEntityTypePatchCall), [*locations agents sessions fulfill intent*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionFulfillIntentCall), [*locations agents sessions match intent*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentSessionMatchIntentCall), [*locations agents test cases batch delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseBatchDeleteCall), [*locations agents test cases batch run*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseBatchRunCall), [*locations agents test cases calculate coverage*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseCalculateCoverageCall), [*locations agents test cases create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseCreateCall), [*locations agents test cases export*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseExportCall), [*locations agents test cases get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseGetCall), [*locations agents test cases import*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseImportCall), [*locations agents test cases list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseListCall), [*locations agents test cases patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCasePatchCall), [*locations agents test cases results get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseResultGetCall), [*locations agents test cases results list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseResultListCall), [*locations agents test cases run*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentTestCaseRunCall), [*locations agents validate*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentValidateCall), [*locations agents webhooks create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookCreateCall), [*locations agents webhooks delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookDeleteCall), [*locations agents webhooks get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookGetCall), [*locations agents webhooks list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookListCall), [*locations agents webhooks patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationAgentWebhookPatchCall), [*locations get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationOperationListCall), [*locations security settings create*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingCreateCall), [*locations security settings delete*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingDeleteCall), [*locations security settings get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingGetCall), [*locations security settings list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingListCall), [*locations security settings patch*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectLocationSecuritySettingPatchCall), [*operations cancel*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectOperationGetCall) and [*operations list*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/api::ProjectOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/Dialogflow)** +* **[Hub](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/Dialogflow)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::CallBuilder) -* **[Resources](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::CallBuilder) +* **[Resources](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Part)** + * **[Parts](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -141,17 +141,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -161,29 +161,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Delegate) to the -[Method Builder](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Delegate) to the +[Method Builder](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::RequestValue) and -[decodable](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::RequestValue) and +[decodable](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dialogflow3/5.0.2-beta-1+20230110/google_dialogflow3/client::RequestValue) are moved +* [request values](https://docs.rs/google-dialogflow3/5.0.2+20230110/google_dialogflow3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dialogflow3/src/api.rs b/gen/dialogflow3/src/api.rs index 711aeecf87..94356fdebb 100644 --- a/gen/dialogflow3/src/api.rs +++ b/gen/dialogflow3/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> Dialogflow { Dialogflow { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dialogflow.googleapis.com/".to_string(), _root_url: "https://dialogflow.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> Dialogflow { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dialogflow3/src/client.rs b/gen/dialogflow3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dialogflow3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dialogflow3/src/lib.rs b/gen/dialogflow3/src/lib.rs index f4b8b04b37..0480a4426e 100644 --- a/gen/dialogflow3/src/lib.rs +++ b/gen/dialogflow3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dialogflow* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *dialogflow:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dialogflow* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *dialogflow:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dialogflow* *v3* API can be found at the //! [official documentation site](https://cloud.google.com/dialogflow/). diff --git a/gen/digitalassetlinks1-cli/Cargo.toml b/gen/digitalassetlinks1-cli/Cargo.toml index 0f3b60c45c..e85893fc99 100644 --- a/gen/digitalassetlinks1-cli/Cargo.toml +++ b/gen/digitalassetlinks1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-digitalassetlinks1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with digitalassetlinks (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/digitalassetlinks1-cli" @@ -20,13 +20,13 @@ name = "digitalassetlinks1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-digitalassetlinks1] path = "../digitalassetlinks1" -version = "4.0.1+20220301" +version = "5.0.2+20230117" + diff --git a/gen/digitalassetlinks1-cli/README.md b/gen/digitalassetlinks1-cli/README.md index dd1343685a..50c4b52202 100644 --- a/gen/digitalassetlinks1-cli/README.md +++ b/gen/digitalassetlinks1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *digitalassetlinks* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *digitalassetlinks* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash digitalassetlinks1 [options] diff --git a/gen/digitalassetlinks1-cli/mkdocs.yml b/gen/digitalassetlinks1-cli/mkdocs.yml index e6e4161745..da58eb70dd 100644 --- a/gen/digitalassetlinks1-cli/mkdocs.yml +++ b/gen/digitalassetlinks1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: digitalassetlinks v4.0.1+20220301 +site_name: digitalassetlinks v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-digitalassetlinks1-cli site_description: A complete library to interact with digitalassetlinks (protocol v1) @@ -7,11 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/digitalassetlink docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['assetlinks_bulk-check.md', 'Assetlinks', 'Bulk Check'] -- ['assetlinks_check.md', 'Assetlinks', 'Check'] -- ['statements_list.md', 'Statements', 'List'] +nav: +- Home: 'index.md' +- 'Assetlinks': + - 'Bulk Check': 'assetlinks_bulk-check.md' + - 'Check': 'assetlinks_check.md' +- 'Statements': + - 'List': 'statements_list.md' theme: readthedocs diff --git a/gen/digitalassetlinks1-cli/src/client.rs b/gen/digitalassetlinks1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/digitalassetlinks1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/digitalassetlinks1-cli/src/main.rs b/gen/digitalassetlinks1-cli/src/main.rs index d0c5625824..2274cb65ee 100644 --- a/gen/digitalassetlinks1-cli/src/main.rs +++ b/gen/digitalassetlinks1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_digitalassetlinks1::{api, Error, oauth2}; +use google_digitalassetlinks1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -441,7 +440,7 @@ async fn main() { let mut app = App::new("digitalassetlinks1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230117") .about("Discovers relationships between online assets such as websites or mobile apps.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_digitalassetlinks1_cli") .arg(Arg::with_name("folder") diff --git a/gen/digitalassetlinks1/Cargo.toml b/gen/digitalassetlinks1/Cargo.toml index b392cdaba9..f404d650a4 100644 --- a/gen/digitalassetlinks1/Cargo.toml +++ b/gen/digitalassetlinks1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-digitalassetlinks1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with digitalassetlinks (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/digitalassetlinks1" homepage = "https://developers.google.com/digital-asset-links/" -documentation = "https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-digitalassetlinks1/5.0.2+20230117" license = "MIT" keywords = ["digitalassetlinks", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/digitalassetlinks1/README.md b/gen/digitalassetlinks1/README.md index e6c14a503c..da3c06046d 100644 --- a/gen/digitalassetlinks1/README.md +++ b/gen/digitalassetlinks1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-digitalassetlinks1` library allows access to all features of the *Google digitalassetlinks* service. -This documentation was generated from *digitalassetlinks* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *digitalassetlinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *digitalassetlinks* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *digitalassetlinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *digitalassetlinks* *v1* API can be found at the [official documentation site](https://developers.google.com/digital-asset-links/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/Digitalassetlinks) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/Digitalassetlinks) ... * assetlinks - * [*bulk check*](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/api::AssetlinkBulkCheckCall) and [*check*](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/api::AssetlinkCheckCall) -* [statements](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/api::Statement) - * [*list*](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/api::StatementListCall) + * [*bulk check*](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/api::AssetlinkBulkCheckCall) and [*check*](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/api::AssetlinkCheckCall) +* [statements](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/api::Statement) + * [*list*](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/api::StatementListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/Digitalassetlinks)** +* **[Hub](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/Digitalassetlinks)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::CallBuilder) -* **[Resources](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::CallBuilder) +* **[Resources](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Part)** + * **[Parts](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Delegate) to the -[Method Builder](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Delegate) to the +[Method Builder](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::RequestValue) and -[decodable](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::RequestValue) and +[decodable](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-digitalassetlinks1/5.0.2-beta-1+20230117/google_digitalassetlinks1/client::RequestValue) are moved +* [request values](https://docs.rs/google-digitalassetlinks1/5.0.2+20230117/google_digitalassetlinks1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/digitalassetlinks1/src/api.rs b/gen/digitalassetlinks1/src/api.rs index 39fde36e0a..dab4b042ce 100644 --- a/gen/digitalassetlinks1/src/api.rs +++ b/gen/digitalassetlinks1/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> Digitalassetlinks { Digitalassetlinks { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://digitalassetlinks.googleapis.com/".to_string(), _root_url: "https://digitalassetlinks.googleapis.com/".to_string(), } @@ -117,7 +117,7 @@ impl<'a, S> Digitalassetlinks { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/digitalassetlinks1/src/client.rs b/gen/digitalassetlinks1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/digitalassetlinks1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/digitalassetlinks1/src/lib.rs b/gen/digitalassetlinks1/src/lib.rs index 87ea3e6bbb..97559df148 100644 --- a/gen/digitalassetlinks1/src/lib.rs +++ b/gen/digitalassetlinks1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *digitalassetlinks* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *digitalassetlinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *digitalassetlinks* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *digitalassetlinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *digitalassetlinks* *v1* API can be found at the //! [official documentation site](https://developers.google.com/digital-asset-links/). diff --git a/gen/discovery1-cli/Cargo.toml b/gen/discovery1-cli/Cargo.toml index da3db04308..383119c94e 100644 --- a/gen/discovery1-cli/Cargo.toml +++ b/gen/discovery1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-discovery1-cli" -version = "4.0.1+20200806" +version = "5.0.2+20200806" authors = ["Sebastian Thiel "] description = "A complete library to interact with discovery (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/discovery1-cli" @@ -20,13 +20,13 @@ name = "discovery1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-discovery1] path = "../discovery1" -version = "4.0.1+20200806" +version = "5.0.2+20200806" + diff --git a/gen/discovery1-cli/README.md b/gen/discovery1-cli/README.md index fe62dcaf7a..fd0a8dad37 100644 --- a/gen/discovery1-cli/README.md +++ b/gen/discovery1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *discovery* API at revision *20200806*. The CLI is at version *4.0.1*. +This documentation was generated from the *discovery* API at revision *20200806*. The CLI is at version *5.0.2*. ```bash discovery1 [options] diff --git a/gen/discovery1-cli/mkdocs.yml b/gen/discovery1-cli/mkdocs.yml index 38b1ed8848..ca2361be23 100644 --- a/gen/discovery1-cli/mkdocs.yml +++ b/gen/discovery1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: discovery v4.0.1+20200806 +site_name: discovery v5.0.2+20200806 site_url: http://byron.github.io/google-apis-rs/google-discovery1-cli site_description: A complete library to interact with discovery (protocol v1) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/discovery1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['apis_get-rest.md', 'Apis', 'Get Rest'] -- ['apis_list.md', 'Apis', 'List'] +nav: +- Home: 'index.md' +- 'Apis': + - 'Get Rest': 'apis_get-rest.md' + - 'List': 'apis_list.md' theme: readthedocs diff --git a/gen/discovery1-cli/src/client.rs b/gen/discovery1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/discovery1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/discovery1-cli/src/main.rs b/gen/discovery1-cli/src/main.rs index 8c6d8ba84b..4e3127932f 100644 --- a/gen/discovery1-cli/src/main.rs +++ b/gen/discovery1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_discovery1::{api, Error, oauth2}; +use google_discovery1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -107,7 +106,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "preferred" => { - call = call.preferred(arg_from_str(value.unwrap_or("false"), err, "preferred", "boolean")); + call = call.preferred( value.map(|v| arg_from_str(v, err, "preferred", "boolean")).unwrap_or(false)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -296,7 +295,7 @@ async fn main() { let mut app = App::new("discovery1") .author("Sebastian Thiel ") - .version("4.0.1+20200806") + .version("5.0.2+20200806") .about("Provides information about other Google APIs, such as what APIs are available, the resource, and method details for each API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_discovery1_cli") .arg(Arg::with_name("folder") diff --git a/gen/discovery1/Cargo.toml b/gen/discovery1/Cargo.toml index 59da92185c..868d890552 100644 --- a/gen/discovery1/Cargo.toml +++ b/gen/discovery1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-discovery1" -version = "5.0.2-beta-1+20200806" +version = "5.0.2+20200806" authors = ["Sebastian Thiel "] description = "A complete library to interact with discovery (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/discovery1" homepage = "https://developers.google.com/discovery/" -documentation = "https://docs.rs/google-discovery1/5.0.2-beta-1+20200806" +documentation = "https://docs.rs/google-discovery1/5.0.2+20200806" license = "MIT" keywords = ["discovery", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/discovery1/README.md b/gen/discovery1/README.md index 3d96e230a7..ca5660422e 100644 --- a/gen/discovery1/README.md +++ b/gen/discovery1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-discovery1` library allows access to all features of the *Google discovery* service. -This documentation was generated from *discovery* crate version *5.0.2-beta-1+20200806*, where *20200806* is the exact revision of the *discovery:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *discovery* crate version *5.0.2+20200806*, where *20200806* is the exact revision of the *discovery:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *discovery* *v1* API can be found at the [official documentation site](https://developers.google.com/discovery/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/Discovery) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/Discovery) ... * apis - * [*get rest*](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/api::ApiGetRestCall) and [*list*](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/api::ApiListCall) + * [*get rest*](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/api::ApiGetRestCall) and [*list*](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/api::ApiListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/Discovery)** +* **[Hub](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/Discovery)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::CallBuilder) -* **[Resources](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::CallBuilder) +* **[Resources](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Part)** + * **[Parts](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -118,17 +118,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -138,29 +138,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Delegate) to the -[Method Builder](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Delegate) to the +[Method Builder](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::RequestValue) and -[decodable](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::RequestValue) and +[decodable](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-discovery1/5.0.2-beta-1+20200806/google_discovery1/client::RequestValue) are moved +* [request values](https://docs.rs/google-discovery1/5.0.2+20200806/google_discovery1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/discovery1/src/api.rs b/gen/discovery1/src/api.rs index 5eeaac2f6a..c3c1bce4b7 100644 --- a/gen/discovery1/src/api.rs +++ b/gen/discovery1/src/api.rs @@ -97,7 +97,7 @@ impl<'a, S> Discovery { Discovery { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/discovery/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -108,7 +108,7 @@ impl<'a, S> Discovery { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/discovery1/src/client.rs b/gen/discovery1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/discovery1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/discovery1/src/lib.rs b/gen/discovery1/src/lib.rs index aeeaa73425..615ac71e16 100644 --- a/gen/discovery1/src/lib.rs +++ b/gen/discovery1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *discovery* crate version *5.0.2-beta-1+20200806*, where *20200806* is the exact revision of the *discovery:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *discovery* crate version *5.0.2+20200806*, where *20200806* is the exact revision of the *discovery:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *discovery* *v1* API can be found at the //! [official documentation site](https://developers.google.com/discovery/). diff --git a/gen/displayvideo1-cli/Cargo.toml b/gen/displayvideo1-cli/Cargo.toml index 6eb0cea61e..7781aaade8 100644 --- a/gen/displayvideo1-cli/Cargo.toml +++ b/gen/displayvideo1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-displayvideo1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Display Video (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/displayvideo1-cli" @@ -20,13 +20,13 @@ name = "displayvideo1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-displayvideo1] path = "../displayvideo1" -version = "4.0.1+20220303" +version = "5.0.2+20230119" + diff --git a/gen/displayvideo1-cli/README.md b/gen/displayvideo1-cli/README.md index fc8d04c23d..8854af9e3e 100644 --- a/gen/displayvideo1-cli/README.md +++ b/gen/displayvideo1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Display Video* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Display Video* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash displayvideo1 [options] @@ -138,6 +138,12 @@ displayvideo1 [options] google-audiences get [-p ]... [-o ] list [-p ]... [-o ] + guaranteed-orders + create (-r )... [-p ]... [-o ] + edit-guaranteed-order-read-accessors (-r )... [-p ]... [-o ] + get [-p ]... [-o ] + list [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] inventory-source-groups assigned-inventory-sources-bulk-edit (-r )... [-p ]... [-o ] assigned-inventory-sources-create (-r )... [-p ]... [-o ] @@ -149,8 +155,11 @@ displayvideo1 [options] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] inventory-sources + create (-r )... [-p ]... [-o ] + edit-inventory-source-read-write-accessors (-r )... [-p ]... [-o ] get [-p ]... [-o ] list [-p ]... [-o ] + patch (-r )... [-p ]... [-o ] media download [-p ]... [-o ] upload (-r )... (-u simple -f [-m ]) [-p ]... [-o ] diff --git a/gen/displayvideo1-cli/mkdocs.yml b/gen/displayvideo1-cli/mkdocs.yml index 9c38795196..04857f39a9 100644 --- a/gen/displayvideo1-cli/mkdocs.yml +++ b/gen/displayvideo1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Display Video v4.0.1+20220303 +site_name: Display Video v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-displayvideo1-cli site_description: A complete library to interact with Display Video (protocol v1) @@ -7,150 +7,173 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/displayvideo1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['advertisers_assets-upload.md', 'Advertisers', 'Assets Upload'] -- ['advertisers_audit.md', 'Advertisers', 'Audit'] -- ['advertisers_bulk-edit-advertiser-assigned-targeting-options.md', 'Advertisers', 'Bulk Edit Advertiser Assigned Targeting Options'] -- ['advertisers_bulk-list-advertiser-assigned-targeting-options.md', 'Advertisers', 'Bulk List Advertiser Assigned Targeting Options'] -- ['advertisers_campaigns-bulk-list-campaign-assigned-targeting-options.md', 'Advertisers', 'Campaigns Bulk List Campaign Assigned Targeting Options'] -- ['advertisers_campaigns-create.md', 'Advertisers', 'Campaigns Create'] -- ['advertisers_campaigns-delete.md', 'Advertisers', 'Campaigns Delete'] -- ['advertisers_campaigns-get.md', 'Advertisers', 'Campaigns Get'] -- ['advertisers_campaigns-list.md', 'Advertisers', 'Campaigns List'] -- ['advertisers_campaigns-patch.md', 'Advertisers', 'Campaigns Patch'] -- ['advertisers_campaigns-targeting-types-assigned-targeting-options-get.md', 'Advertisers', 'Campaigns Targeting Types Assigned Targeting Options Get'] -- ['advertisers_campaigns-targeting-types-assigned-targeting-options-list.md', 'Advertisers', 'Campaigns Targeting Types Assigned Targeting Options List'] -- ['advertisers_channels-create.md', 'Advertisers', 'Channels Create'] -- ['advertisers_channels-get.md', 'Advertisers', 'Channels Get'] -- ['advertisers_channels-list.md', 'Advertisers', 'Channels List'] -- ['advertisers_channels-patch.md', 'Advertisers', 'Channels Patch'] -- ['advertisers_channels-sites-bulk-edit.md', 'Advertisers', 'Channels Sites Bulk Edit'] -- ['advertisers_channels-sites-create.md', 'Advertisers', 'Channels Sites Create'] -- ['advertisers_channels-sites-delete.md', 'Advertisers', 'Channels Sites Delete'] -- ['advertisers_channels-sites-list.md', 'Advertisers', 'Channels Sites List'] -- ['advertisers_channels-sites-replace.md', 'Advertisers', 'Channels Sites Replace'] -- ['advertisers_create.md', 'Advertisers', 'Create'] -- ['advertisers_creatives-create.md', 'Advertisers', 'Creatives Create'] -- ['advertisers_creatives-delete.md', 'Advertisers', 'Creatives Delete'] -- ['advertisers_creatives-get.md', 'Advertisers', 'Creatives Get'] -- ['advertisers_creatives-list.md', 'Advertisers', 'Creatives List'] -- ['advertisers_creatives-patch.md', 'Advertisers', 'Creatives Patch'] -- ['advertisers_delete.md', 'Advertisers', 'Delete'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_insertion-orders-bulk-list-insertion-order-assigned-targeting-options.md', 'Advertisers', 'Insertion Orders Bulk List Insertion Order Assigned Targeting Options'] -- ['advertisers_insertion-orders-create.md', 'Advertisers', 'Insertion Orders Create'] -- ['advertisers_insertion-orders-delete.md', 'Advertisers', 'Insertion Orders Delete'] -- ['advertisers_insertion-orders-get.md', 'Advertisers', 'Insertion Orders Get'] -- ['advertisers_insertion-orders-list.md', 'Advertisers', 'Insertion Orders List'] -- ['advertisers_insertion-orders-patch.md', 'Advertisers', 'Insertion Orders Patch'] -- ['advertisers_insertion-orders-targeting-types-assigned-targeting-options-get.md', 'Advertisers', 'Insertion Orders Targeting Types Assigned Targeting Options Get'] -- ['advertisers_insertion-orders-targeting-types-assigned-targeting-options-list.md', 'Advertisers', 'Insertion Orders Targeting Types Assigned Targeting Options List'] -- ['advertisers_invoices-list.md', 'Advertisers', 'Invoices List'] -- ['advertisers_invoices-lookup-invoice-currency.md', 'Advertisers', 'Invoices Lookup Invoice Currency'] -- ['advertisers_line-items-bulk-edit-line-item-assigned-targeting-options.md', 'Advertisers', 'Line Items Bulk Edit Line Item Assigned Targeting Options'] -- ['advertisers_line-items-bulk-list-line-item-assigned-targeting-options.md', 'Advertisers', 'Line Items Bulk List Line Item Assigned Targeting Options'] -- ['advertisers_line-items-create.md', 'Advertisers', 'Line Items Create'] -- ['advertisers_line-items-delete.md', 'Advertisers', 'Line Items Delete'] -- ['advertisers_line-items-generate-default.md', 'Advertisers', 'Line Items Generate Default'] -- ['advertisers_line-items-get.md', 'Advertisers', 'Line Items Get'] -- ['advertisers_line-items-list.md', 'Advertisers', 'Line Items List'] -- ['advertisers_line-items-patch.md', 'Advertisers', 'Line Items Patch'] -- ['advertisers_line-items-targeting-types-assigned-targeting-options-create.md', 'Advertisers', 'Line Items Targeting Types Assigned Targeting Options Create'] -- ['advertisers_line-items-targeting-types-assigned-targeting-options-delete.md', 'Advertisers', 'Line Items Targeting Types Assigned Targeting Options Delete'] -- ['advertisers_line-items-targeting-types-assigned-targeting-options-get.md', 'Advertisers', 'Line Items Targeting Types Assigned Targeting Options Get'] -- ['advertisers_line-items-targeting-types-assigned-targeting-options-list.md', 'Advertisers', 'Line Items Targeting Types Assigned Targeting Options List'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['advertisers_location-lists-assigned-locations-bulk-edit.md', 'Advertisers', 'Location Lists Assigned Locations Bulk Edit'] -- ['advertisers_location-lists-assigned-locations-create.md', 'Advertisers', 'Location Lists Assigned Locations Create'] -- ['advertisers_location-lists-assigned-locations-delete.md', 'Advertisers', 'Location Lists Assigned Locations Delete'] -- ['advertisers_location-lists-assigned-locations-list.md', 'Advertisers', 'Location Lists Assigned Locations List'] -- ['advertisers_location-lists-create.md', 'Advertisers', 'Location Lists Create'] -- ['advertisers_location-lists-get.md', 'Advertisers', 'Location Lists Get'] -- ['advertisers_location-lists-list.md', 'Advertisers', 'Location Lists List'] -- ['advertisers_location-lists-patch.md', 'Advertisers', 'Location Lists Patch'] -- ['advertisers_manual-triggers-activate.md', 'Advertisers', 'Manual Triggers Activate'] -- ['advertisers_manual-triggers-create.md', 'Advertisers', 'Manual Triggers Create'] -- ['advertisers_manual-triggers-deactivate.md', 'Advertisers', 'Manual Triggers Deactivate'] -- ['advertisers_manual-triggers-get.md', 'Advertisers', 'Manual Triggers Get'] -- ['advertisers_manual-triggers-list.md', 'Advertisers', 'Manual Triggers List'] -- ['advertisers_manual-triggers-patch.md', 'Advertisers', 'Manual Triggers Patch'] -- ['advertisers_negative-keyword-lists-create.md', 'Advertisers', 'Negative Keyword Lists Create'] -- ['advertisers_negative-keyword-lists-delete.md', 'Advertisers', 'Negative Keyword Lists Delete'] -- ['advertisers_negative-keyword-lists-get.md', 'Advertisers', 'Negative Keyword Lists Get'] -- ['advertisers_negative-keyword-lists-list.md', 'Advertisers', 'Negative Keyword Lists List'] -- ['advertisers_negative-keyword-lists-negative-keywords-bulk-edit.md', 'Advertisers', 'Negative Keyword Lists Negative Keywords Bulk Edit'] -- ['advertisers_negative-keyword-lists-negative-keywords-create.md', 'Advertisers', 'Negative Keyword Lists Negative Keywords Create'] -- ['advertisers_negative-keyword-lists-negative-keywords-delete.md', 'Advertisers', 'Negative Keyword Lists Negative Keywords Delete'] -- ['advertisers_negative-keyword-lists-negative-keywords-list.md', 'Advertisers', 'Negative Keyword Lists Negative Keywords List'] -- ['advertisers_negative-keyword-lists-negative-keywords-replace.md', 'Advertisers', 'Negative Keyword Lists Negative Keywords Replace'] -- ['advertisers_negative-keyword-lists-patch.md', 'Advertisers', 'Negative Keyword Lists Patch'] -- ['advertisers_patch.md', 'Advertisers', 'Patch'] -- ['advertisers_targeting-types-assigned-targeting-options-create.md', 'Advertisers', 'Targeting Types Assigned Targeting Options Create'] -- ['advertisers_targeting-types-assigned-targeting-options-delete.md', 'Advertisers', 'Targeting Types Assigned Targeting Options Delete'] -- ['advertisers_targeting-types-assigned-targeting-options-get.md', 'Advertisers', 'Targeting Types Assigned Targeting Options Get'] -- ['advertisers_targeting-types-assigned-targeting-options-list.md', 'Advertisers', 'Targeting Types Assigned Targeting Options List'] -- ['combined-audiences_get.md', 'Combined Audiences', 'Get'] -- ['combined-audiences_list.md', 'Combined Audiences', 'List'] -- ['custom-bidding-algorithms_create.md', 'Custom Bidding Algorithms', 'Create'] -- ['custom-bidding-algorithms_get.md', 'Custom Bidding Algorithms', 'Get'] -- ['custom-bidding-algorithms_list.md', 'Custom Bidding Algorithms', 'List'] -- ['custom-bidding-algorithms_patch.md', 'Custom Bidding Algorithms', 'Patch'] -- ['custom-bidding-algorithms_scripts-create.md', 'Custom Bidding Algorithms', 'Scripts Create'] -- ['custom-bidding-algorithms_scripts-get.md', 'Custom Bidding Algorithms', 'Scripts Get'] -- ['custom-bidding-algorithms_scripts-list.md', 'Custom Bidding Algorithms', 'Scripts List'] -- ['custom-bidding-algorithms_upload-script.md', 'Custom Bidding Algorithms', 'Upload Script'] -- ['custom-lists_get.md', 'Custom Lists', 'Get'] -- ['custom-lists_list.md', 'Custom Lists', 'List'] -- ['first-and-third-party-audiences_create.md', 'First And Third Party Audiences', 'Create'] -- ['first-and-third-party-audiences_edit-customer-match-members.md', 'First And Third Party Audiences', 'Edit Customer Match Members'] -- ['first-and-third-party-audiences_get.md', 'First And Third Party Audiences', 'Get'] -- ['first-and-third-party-audiences_list.md', 'First And Third Party Audiences', 'List'] -- ['first-and-third-party-audiences_patch.md', 'First And Third Party Audiences', 'Patch'] -- ['floodlight-groups_get.md', 'Floodlight Groups', 'Get'] -- ['floodlight-groups_patch.md', 'Floodlight Groups', 'Patch'] -- ['google-audiences_get.md', 'Google Audiences', 'Get'] -- ['google-audiences_list.md', 'Google Audiences', 'List'] -- ['inventory-source-groups_assigned-inventory-sources-bulk-edit.md', 'Inventory Source Groups', 'Assigned Inventory Sources Bulk Edit'] -- ['inventory-source-groups_assigned-inventory-sources-create.md', 'Inventory Source Groups', 'Assigned Inventory Sources Create'] -- ['inventory-source-groups_assigned-inventory-sources-delete.md', 'Inventory Source Groups', 'Assigned Inventory Sources Delete'] -- ['inventory-source-groups_assigned-inventory-sources-list.md', 'Inventory Source Groups', 'Assigned Inventory Sources List'] -- ['inventory-source-groups_create.md', 'Inventory Source Groups', 'Create'] -- ['inventory-source-groups_delete.md', 'Inventory Source Groups', 'Delete'] -- ['inventory-source-groups_get.md', 'Inventory Source Groups', 'Get'] -- ['inventory-source-groups_list.md', 'Inventory Source Groups', 'List'] -- ['inventory-source-groups_patch.md', 'Inventory Source Groups', 'Patch'] -- ['inventory-sources_get.md', 'Inventory Sources', 'Get'] -- ['inventory-sources_list.md', 'Inventory Sources', 'List'] -- ['media_download.md', 'Media', 'Download'] -- ['media_upload.md', 'Media', 'Upload'] -- ['partners_bulk-edit-partner-assigned-targeting-options.md', 'Partners', 'Bulk Edit Partner Assigned Targeting Options'] -- ['partners_channels-create.md', 'Partners', 'Channels Create'] -- ['partners_channels-get.md', 'Partners', 'Channels Get'] -- ['partners_channels-list.md', 'Partners', 'Channels List'] -- ['partners_channels-patch.md', 'Partners', 'Channels Patch'] -- ['partners_channels-sites-bulk-edit.md', 'Partners', 'Channels Sites Bulk Edit'] -- ['partners_channels-sites-create.md', 'Partners', 'Channels Sites Create'] -- ['partners_channels-sites-delete.md', 'Partners', 'Channels Sites Delete'] -- ['partners_channels-sites-list.md', 'Partners', 'Channels Sites List'] -- ['partners_channels-sites-replace.md', 'Partners', 'Channels Sites Replace'] -- ['partners_get.md', 'Partners', 'Get'] -- ['partners_list.md', 'Partners', 'List'] -- ['partners_targeting-types-assigned-targeting-options-create.md', 'Partners', 'Targeting Types Assigned Targeting Options Create'] -- ['partners_targeting-types-assigned-targeting-options-delete.md', 'Partners', 'Targeting Types Assigned Targeting Options Delete'] -- ['partners_targeting-types-assigned-targeting-options-get.md', 'Partners', 'Targeting Types Assigned Targeting Options Get'] -- ['partners_targeting-types-assigned-targeting-options-list.md', 'Partners', 'Targeting Types Assigned Targeting Options List'] -- ['sdfdownloadtasks_create.md', 'Sdfdownloadtasks', 'Create'] -- ['sdfdownloadtasks_operations-get.md', 'Sdfdownloadtasks', 'Operations Get'] -- ['targeting-types_targeting-options-get.md', 'Targeting Types', 'Targeting Options Get'] -- ['targeting-types_targeting-options-list.md', 'Targeting Types', 'Targeting Options List'] -- ['targeting-types_targeting-options-search.md', 'Targeting Types', 'Targeting Options Search'] -- ['users_bulk-edit-assigned-user-roles.md', 'Users', 'Bulk Edit Assigned User Roles'] -- ['users_create.md', 'Users', 'Create'] -- ['users_delete.md', 'Users', 'Delete'] -- ['users_get.md', 'Users', 'Get'] -- ['users_list.md', 'Users', 'List'] -- ['users_patch.md', 'Users', 'Patch'] +nav: +- Home: 'index.md' +- 'Advertisers': + - 'Assets Upload': 'advertisers_assets-upload.md' + - 'Audit': 'advertisers_audit.md' + - 'Bulk Edit Advertiser Assigned Targeting Options': 'advertisers_bulk-edit-advertiser-assigned-targeting-options.md' + - 'Bulk List Advertiser Assigned Targeting Options': 'advertisers_bulk-list-advertiser-assigned-targeting-options.md' + - 'Campaigns Bulk List Campaign Assigned Targeting Options': 'advertisers_campaigns-bulk-list-campaign-assigned-targeting-options.md' + - 'Campaigns Create': 'advertisers_campaigns-create.md' + - 'Campaigns Delete': 'advertisers_campaigns-delete.md' + - 'Campaigns Get': 'advertisers_campaigns-get.md' + - 'Campaigns List': 'advertisers_campaigns-list.md' + - 'Campaigns Patch': 'advertisers_campaigns-patch.md' + - 'Campaigns Targeting Types Assigned Targeting Options Get': 'advertisers_campaigns-targeting-types-assigned-targeting-options-get.md' + - 'Campaigns Targeting Types Assigned Targeting Options List': 'advertisers_campaigns-targeting-types-assigned-targeting-options-list.md' + - 'Channels Create': 'advertisers_channels-create.md' + - 'Channels Get': 'advertisers_channels-get.md' + - 'Channels List': 'advertisers_channels-list.md' + - 'Channels Patch': 'advertisers_channels-patch.md' + - 'Channels Sites Bulk Edit': 'advertisers_channels-sites-bulk-edit.md' + - 'Channels Sites Create': 'advertisers_channels-sites-create.md' + - 'Channels Sites Delete': 'advertisers_channels-sites-delete.md' + - 'Channels Sites List': 'advertisers_channels-sites-list.md' + - 'Channels Sites Replace': 'advertisers_channels-sites-replace.md' + - 'Create': 'advertisers_create.md' + - 'Creatives Create': 'advertisers_creatives-create.md' + - 'Creatives Delete': 'advertisers_creatives-delete.md' + - 'Creatives Get': 'advertisers_creatives-get.md' + - 'Creatives List': 'advertisers_creatives-list.md' + - 'Creatives Patch': 'advertisers_creatives-patch.md' + - 'Delete': 'advertisers_delete.md' + - 'Get': 'advertisers_get.md' + - 'Insertion Orders Bulk List Insertion Order Assigned Targeting Options': 'advertisers_insertion-orders-bulk-list-insertion-order-assigned-targeting-options.md' + - 'Insertion Orders Create': 'advertisers_insertion-orders-create.md' + - 'Insertion Orders Delete': 'advertisers_insertion-orders-delete.md' + - 'Insertion Orders Get': 'advertisers_insertion-orders-get.md' + - 'Insertion Orders List': 'advertisers_insertion-orders-list.md' + - 'Insertion Orders Patch': 'advertisers_insertion-orders-patch.md' + - 'Insertion Orders Targeting Types Assigned Targeting Options Get': 'advertisers_insertion-orders-targeting-types-assigned-targeting-options-get.md' + - 'Insertion Orders Targeting Types Assigned Targeting Options List': 'advertisers_insertion-orders-targeting-types-assigned-targeting-options-list.md' + - 'Invoices List': 'advertisers_invoices-list.md' + - 'Invoices Lookup Invoice Currency': 'advertisers_invoices-lookup-invoice-currency.md' + - 'Line Items Bulk Edit Line Item Assigned Targeting Options': 'advertisers_line-items-bulk-edit-line-item-assigned-targeting-options.md' + - 'Line Items Bulk List Line Item Assigned Targeting Options': 'advertisers_line-items-bulk-list-line-item-assigned-targeting-options.md' + - 'Line Items Create': 'advertisers_line-items-create.md' + - 'Line Items Delete': 'advertisers_line-items-delete.md' + - 'Line Items Generate Default': 'advertisers_line-items-generate-default.md' + - 'Line Items Get': 'advertisers_line-items-get.md' + - 'Line Items List': 'advertisers_line-items-list.md' + - 'Line Items Patch': 'advertisers_line-items-patch.md' + - 'Line Items Targeting Types Assigned Targeting Options Create': 'advertisers_line-items-targeting-types-assigned-targeting-options-create.md' + - 'Line Items Targeting Types Assigned Targeting Options Delete': 'advertisers_line-items-targeting-types-assigned-targeting-options-delete.md' + - 'Line Items Targeting Types Assigned Targeting Options Get': 'advertisers_line-items-targeting-types-assigned-targeting-options-get.md' + - 'Line Items Targeting Types Assigned Targeting Options List': 'advertisers_line-items-targeting-types-assigned-targeting-options-list.md' + - 'List': 'advertisers_list.md' + - 'Location Lists Assigned Locations Bulk Edit': 'advertisers_location-lists-assigned-locations-bulk-edit.md' + - 'Location Lists Assigned Locations Create': 'advertisers_location-lists-assigned-locations-create.md' + - 'Location Lists Assigned Locations Delete': 'advertisers_location-lists-assigned-locations-delete.md' + - 'Location Lists Assigned Locations List': 'advertisers_location-lists-assigned-locations-list.md' + - 'Location Lists Create': 'advertisers_location-lists-create.md' + - 'Location Lists Get': 'advertisers_location-lists-get.md' + - 'Location Lists List': 'advertisers_location-lists-list.md' + - 'Location Lists Patch': 'advertisers_location-lists-patch.md' + - 'Manual Triggers Activate': 'advertisers_manual-triggers-activate.md' + - 'Manual Triggers Create': 'advertisers_manual-triggers-create.md' + - 'Manual Triggers Deactivate': 'advertisers_manual-triggers-deactivate.md' + - 'Manual Triggers Get': 'advertisers_manual-triggers-get.md' + - 'Manual Triggers List': 'advertisers_manual-triggers-list.md' + - 'Manual Triggers Patch': 'advertisers_manual-triggers-patch.md' + - 'Negative Keyword Lists Create': 'advertisers_negative-keyword-lists-create.md' + - 'Negative Keyword Lists Delete': 'advertisers_negative-keyword-lists-delete.md' + - 'Negative Keyword Lists Get': 'advertisers_negative-keyword-lists-get.md' + - 'Negative Keyword Lists List': 'advertisers_negative-keyword-lists-list.md' + - 'Negative Keyword Lists Negative Keywords Bulk Edit': 'advertisers_negative-keyword-lists-negative-keywords-bulk-edit.md' + - 'Negative Keyword Lists Negative Keywords Create': 'advertisers_negative-keyword-lists-negative-keywords-create.md' + - 'Negative Keyword Lists Negative Keywords Delete': 'advertisers_negative-keyword-lists-negative-keywords-delete.md' + - 'Negative Keyword Lists Negative Keywords List': 'advertisers_negative-keyword-lists-negative-keywords-list.md' + - 'Negative Keyword Lists Negative Keywords Replace': 'advertisers_negative-keyword-lists-negative-keywords-replace.md' + - 'Negative Keyword Lists Patch': 'advertisers_negative-keyword-lists-patch.md' + - 'Patch': 'advertisers_patch.md' + - 'Targeting Types Assigned Targeting Options Create': 'advertisers_targeting-types-assigned-targeting-options-create.md' + - 'Targeting Types Assigned Targeting Options Delete': 'advertisers_targeting-types-assigned-targeting-options-delete.md' + - 'Targeting Types Assigned Targeting Options Get': 'advertisers_targeting-types-assigned-targeting-options-get.md' + - 'Targeting Types Assigned Targeting Options List': 'advertisers_targeting-types-assigned-targeting-options-list.md' +- 'Combined Audiences': + - 'Get': 'combined-audiences_get.md' + - 'List': 'combined-audiences_list.md' +- 'Custom Bidding Algorithms': + - 'Create': 'custom-bidding-algorithms_create.md' + - 'Get': 'custom-bidding-algorithms_get.md' + - 'List': 'custom-bidding-algorithms_list.md' + - 'Patch': 'custom-bidding-algorithms_patch.md' + - 'Scripts Create': 'custom-bidding-algorithms_scripts-create.md' + - 'Scripts Get': 'custom-bidding-algorithms_scripts-get.md' + - 'Scripts List': 'custom-bidding-algorithms_scripts-list.md' + - 'Upload Script': 'custom-bidding-algorithms_upload-script.md' +- 'Custom Lists': + - 'Get': 'custom-lists_get.md' + - 'List': 'custom-lists_list.md' +- 'First And Third Party Audiences': + - 'Create': 'first-and-third-party-audiences_create.md' + - 'Edit Customer Match Members': 'first-and-third-party-audiences_edit-customer-match-members.md' + - 'Get': 'first-and-third-party-audiences_get.md' + - 'List': 'first-and-third-party-audiences_list.md' + - 'Patch': 'first-and-third-party-audiences_patch.md' +- 'Floodlight Groups': + - 'Get': 'floodlight-groups_get.md' + - 'Patch': 'floodlight-groups_patch.md' +- 'Google Audiences': + - 'Get': 'google-audiences_get.md' + - 'List': 'google-audiences_list.md' +- 'Guaranteed Orders': + - 'Create': 'guaranteed-orders_create.md' + - 'Edit Guaranteed Order Read Accessors': 'guaranteed-orders_edit-guaranteed-order-read-accessors.md' + - 'Get': 'guaranteed-orders_get.md' + - 'List': 'guaranteed-orders_list.md' + - 'Patch': 'guaranteed-orders_patch.md' +- 'Inventory Source Groups': + - 'Assigned Inventory Sources Bulk Edit': 'inventory-source-groups_assigned-inventory-sources-bulk-edit.md' + - 'Assigned Inventory Sources Create': 'inventory-source-groups_assigned-inventory-sources-create.md' + - 'Assigned Inventory Sources Delete': 'inventory-source-groups_assigned-inventory-sources-delete.md' + - 'Assigned Inventory Sources List': 'inventory-source-groups_assigned-inventory-sources-list.md' + - 'Create': 'inventory-source-groups_create.md' + - 'Delete': 'inventory-source-groups_delete.md' + - 'Get': 'inventory-source-groups_get.md' + - 'List': 'inventory-source-groups_list.md' + - 'Patch': 'inventory-source-groups_patch.md' +- 'Inventory Sources': + - 'Create': 'inventory-sources_create.md' + - 'Edit Inventory Source Read Write Accessors': 'inventory-sources_edit-inventory-source-read-write-accessors.md' + - 'Get': 'inventory-sources_get.md' + - 'List': 'inventory-sources_list.md' + - 'Patch': 'inventory-sources_patch.md' +- 'Media': + - 'Download': 'media_download.md' + - 'Upload': 'media_upload.md' +- 'Partners': + - 'Bulk Edit Partner Assigned Targeting Options': 'partners_bulk-edit-partner-assigned-targeting-options.md' + - 'Channels Create': 'partners_channels-create.md' + - 'Channels Get': 'partners_channels-get.md' + - 'Channels List': 'partners_channels-list.md' + - 'Channels Patch': 'partners_channels-patch.md' + - 'Channels Sites Bulk Edit': 'partners_channels-sites-bulk-edit.md' + - 'Channels Sites Create': 'partners_channels-sites-create.md' + - 'Channels Sites Delete': 'partners_channels-sites-delete.md' + - 'Channels Sites List': 'partners_channels-sites-list.md' + - 'Channels Sites Replace': 'partners_channels-sites-replace.md' + - 'Get': 'partners_get.md' + - 'List': 'partners_list.md' + - 'Targeting Types Assigned Targeting Options Create': 'partners_targeting-types-assigned-targeting-options-create.md' + - 'Targeting Types Assigned Targeting Options Delete': 'partners_targeting-types-assigned-targeting-options-delete.md' + - 'Targeting Types Assigned Targeting Options Get': 'partners_targeting-types-assigned-targeting-options-get.md' + - 'Targeting Types Assigned Targeting Options List': 'partners_targeting-types-assigned-targeting-options-list.md' +- 'Sdfdownloadtasks': + - 'Create': 'sdfdownloadtasks_create.md' + - 'Operations Get': 'sdfdownloadtasks_operations-get.md' +- 'Targeting Types': + - 'Targeting Options Get': 'targeting-types_targeting-options-get.md' + - 'Targeting Options List': 'targeting-types_targeting-options-list.md' + - 'Targeting Options Search': 'targeting-types_targeting-options-search.md' +- 'Users': + - 'Bulk Edit Assigned User Roles': 'users_bulk-edit-assigned-user-roles.md' + - 'Create': 'users_create.md' + - 'Delete': 'users_delete.md' + - 'Get': 'users_get.md' + - 'List': 'users_list.md' + - 'Patch': 'users_patch.md' theme: readthedocs diff --git a/gen/displayvideo1-cli/src/client.rs b/gen/displayvideo1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/displayvideo1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/displayvideo1-cli/src/main.rs b/gen/displayvideo1-cli/src/main.rs index cd0189dec5..fa4ceacf8e 100644 --- a/gen/displayvideo1-cli/src/main.rs +++ b/gen/displayvideo1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_displayvideo1::{api, Error, oauth2}; +use google_displayvideo1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -146,7 +145,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -289,7 +288,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -354,7 +353,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -629,7 +628,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -745,7 +744,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -856,7 +855,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -957,7 +956,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1013,7 +1012,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1069,13 +1068,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1176,10 +1175,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1356,7 +1355,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1412,7 +1411,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1468,13 +1467,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1980,7 +1979,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2130,7 +2129,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2293,7 +2292,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2381,6 +2380,7 @@ where "bid-strategy.performance-goal-auto-bid.max-average-cpm-bid-amount-micros" => Some(("bidStrategy.performanceGoalAutoBid.maxAverageCpmBidAmountMicros", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bid-strategy.performance-goal-auto-bid.performance-goal-amount-micros" => Some(("bidStrategy.performanceGoalAutoBid.performanceGoalAmountMicros", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bid-strategy.performance-goal-auto-bid.performance-goal-type" => Some(("bidStrategy.performanceGoalAutoBid.performanceGoalType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "billable-outcome" => Some(("billableOutcome", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "budget.automation-type" => Some(("budget.automationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "budget.budget-unit" => Some(("budget.budgetUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "campaign-id" => Some(("campaignId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2406,7 +2406,7 @@ where "reservation-type" => Some(("reservationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advertiser-id", "automation-type", "bid-amount-micros", "bid-strategy", "budget", "budget-unit", "campaign-id", "custom-bidding-algorithm-id", "daily-max-impressions", "daily-max-micros", "details", "display-name", "entity-status", "fixed-bid", "frequency-cap", "insertion-order-id", "insertion-order-type", "integration-code", "integration-details", "max-average-cpm-bid-amount-micros", "max-impressions", "maximize-spend-auto-bid", "name", "pacing", "pacing-period", "pacing-type", "performance-goal", "performance-goal-amount-micros", "performance-goal-auto-bid", "performance-goal-percentage-micros", "performance-goal-string", "performance-goal-type", "raise-bid-for-deals", "reservation-type", "time-unit", "time-unit-count", "unlimited", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advertiser-id", "automation-type", "bid-amount-micros", "bid-strategy", "billable-outcome", "budget", "budget-unit", "campaign-id", "custom-bidding-algorithm-id", "daily-max-impressions", "daily-max-micros", "details", "display-name", "entity-status", "fixed-bid", "frequency-cap", "insertion-order-id", "insertion-order-type", "integration-code", "integration-details", "max-average-cpm-bid-amount-micros", "max-impressions", "maximize-spend-auto-bid", "name", "pacing", "pacing-period", "pacing-type", "performance-goal", "performance-goal-amount-micros", "performance-goal-auto-bid", "performance-goal-percentage-micros", "performance-goal-string", "performance-goal-type", "raise-bid-for-deals", "reservation-type", "time-unit", "time-unit-count", "unlimited", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2580,7 +2580,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2668,6 +2668,7 @@ where "bid-strategy.performance-goal-auto-bid.max-average-cpm-bid-amount-micros" => Some(("bidStrategy.performanceGoalAutoBid.maxAverageCpmBidAmountMicros", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bid-strategy.performance-goal-auto-bid.performance-goal-amount-micros" => Some(("bidStrategy.performanceGoalAutoBid.performanceGoalAmountMicros", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bid-strategy.performance-goal-auto-bid.performance-goal-type" => Some(("bidStrategy.performanceGoalAutoBid.performanceGoalType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "billable-outcome" => Some(("billableOutcome", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "budget.automation-type" => Some(("budget.automationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "budget.budget-unit" => Some(("budget.budgetUnit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "campaign-id" => Some(("campaignId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2693,7 +2694,7 @@ where "reservation-type" => Some(("reservationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["advertiser-id", "automation-type", "bid-amount-micros", "bid-strategy", "budget", "budget-unit", "campaign-id", "custom-bidding-algorithm-id", "daily-max-impressions", "daily-max-micros", "details", "display-name", "entity-status", "fixed-bid", "frequency-cap", "insertion-order-id", "insertion-order-type", "integration-code", "integration-details", "max-average-cpm-bid-amount-micros", "max-impressions", "maximize-spend-auto-bid", "name", "pacing", "pacing-period", "pacing-type", "performance-goal", "performance-goal-amount-micros", "performance-goal-auto-bid", "performance-goal-percentage-micros", "performance-goal-string", "performance-goal-type", "raise-bid-for-deals", "reservation-type", "time-unit", "time-unit-count", "unlimited", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["advertiser-id", "automation-type", "bid-amount-micros", "bid-strategy", "billable-outcome", "budget", "budget-unit", "campaign-id", "custom-bidding-algorithm-id", "daily-max-impressions", "daily-max-micros", "details", "display-name", "entity-status", "fixed-bid", "frequency-cap", "insertion-order-id", "insertion-order-type", "integration-code", "integration-details", "max-average-cpm-bid-amount-micros", "max-impressions", "maximize-spend-auto-bid", "name", "pacing", "pacing-period", "pacing-type", "performance-goal", "performance-goal-amount-micros", "performance-goal-auto-bid", "performance-goal-percentage-micros", "performance-goal-string", "performance-goal-type", "raise-bid-for-deals", "reservation-type", "time-unit", "time-unit-count", "unlimited", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2708,7 +2709,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2819,7 +2820,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2884,7 +2885,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "loi-sapin-invoice-type" => { call = call.loi_sapin_invoice_type(value.unwrap_or("")); @@ -3089,7 +3090,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3486,7 +3487,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3633,7 +3634,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3734,12 +3735,19 @@ where "category-details.targeting-option-id" => Some(("categoryDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "channel-details.channel-id" => Some(("channelDetails.channelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "channel-details.negative" => Some(("channelDetails.negative", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "content-duration-details.content-duration" => Some(("contentDurationDetails.contentDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-duration-details.targeting-option-id" => Some(("contentDurationDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-genre-details.display-name" => Some(("contentGenreDetails.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-genre-details.negative" => Some(("contentGenreDetails.negative", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "content-genre-details.targeting-option-id" => Some(("contentGenreDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.ad-type" => Some(("contentInstreamPositionDetails.adType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.content-instream-position" => Some(("contentInstreamPositionDetails.contentInstreamPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.targeting-option-id" => Some(("contentInstreamPositionDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.ad-type" => Some(("contentOutstreamPositionDetails.adType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.content-outstream-position" => Some(("contentOutstreamPositionDetails.contentOutstreamPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.targeting-option-id" => Some(("contentOutstreamPositionDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-stream-type-details.content-stream-type" => Some(("contentStreamTypeDetails.contentStreamType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-stream-type-details.targeting-option-id" => Some(("contentStreamTypeDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "day-and-time-details.day-of-week" => Some(("dayAndTimeDetails.dayOfWeek", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "day-and-time-details.end-hour" => Some(("dayAndTimeDetails.endHour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "day-and-time-details.start-hour" => Some(("dayAndTimeDetails.startHour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -3836,7 +3844,7 @@ where "viewability-details.targeting-option-id" => Some(("viewabilityDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "viewability-details.viewability" => Some(("viewabilityDetails.viewability", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ad-type", "adloox", "age-range", "age-range-details", "app-category-details", "app-details", "app-id", "app-platform", "app-star-rating", "assigned-targeting-option-id", "audio-content-type", "audio-content-type-details", "authorized-seller-status", "authorized-seller-status-details", "avoid-insufficient-option", "avoid-insufficient-star-rating", "avoid-unknown-brand-safety-category", "avoided-age-ratings", "avoided-fraud-option", "avoided-high-severity-categories", "avoided-medium-severity-categories", "avoided-star-rating", "brand-safety-categories", "browser-details", "business-chain-details", "carrier-and-isp-details", "category-details", "channel-details", "channel-id", "content-instream-position", "content-instream-position-details", "content-outstream-position", "content-outstream-position-details", "content-position", "content-rating-tier", "custom-segment-id", "day-and-time-details", "day-of-week", "device-make-model-details", "device-type", "device-type-details", "digital-content-label-exclusion-details", "display-name", "display-viewability", "double-verify", "end-hour", "environment", "environment-details", "exchange-details", "exclude-unrateable", "excluded-ad-fraud-risk", "excluded-adloox-categories", "excluded-adult-risk", "excluded-alcohol-risk", "excluded-drugs-risk", "excluded-gambling-risk", "excluded-hate-speech-risk", "excluded-illegal-downloads-risk", "excluded-offensive-language-risk", "excluded-targeting-option-id", "excluded-violence-risk", "fraud-invalid-traffic", "gender", "gender-details", "geo-region-details", "geo-region-type", "household-income", "household-income-details", "iab", "inheritance", "integral-ad-science", "inventory-source-details", "inventory-source-group-details", "inventory-source-group-id", "inventory-source-id", "keyword", "keyword-details", "language-details", "latitude", "longitude", "name", "native-content-position-details", "negative", "negative-keyword-list-details", "negative-keyword-list-id", "omid", "omid-details", "on-screen-position", "on-screen-position-details", "operating-system-details", "parental-status", "parental-status-details", "player-impression-rate", "poi-details", "proximity-location-list-details", "proximity-location-list-id", "proximity-radius-amount", "proximity-radius-range", "proximity-radius-unit", "regional-location-list-details", "regional-location-list-id", "sensitive-category", "sensitive-category-exclusion-details", "start-hour", "sub-exchange-details", "targeting-option-id", "targeting-type", "third-party-verifier-details", "time-zone-resolution", "traq-score-option", "url", "url-details", "user-rewarded-content", "user-rewarded-content-details", "video-iab", "video-player-size", "video-player-size-details", "video-viewability", "video-viewable-rate", "viewability", "viewability-details", "viewable-during"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ad-type", "adloox", "age-range", "age-range-details", "app-category-details", "app-details", "app-id", "app-platform", "app-star-rating", "assigned-targeting-option-id", "audio-content-type", "audio-content-type-details", "authorized-seller-status", "authorized-seller-status-details", "avoid-insufficient-option", "avoid-insufficient-star-rating", "avoid-unknown-brand-safety-category", "avoided-age-ratings", "avoided-fraud-option", "avoided-high-severity-categories", "avoided-medium-severity-categories", "avoided-star-rating", "brand-safety-categories", "browser-details", "business-chain-details", "carrier-and-isp-details", "category-details", "channel-details", "channel-id", "content-duration", "content-duration-details", "content-genre-details", "content-instream-position", "content-instream-position-details", "content-outstream-position", "content-outstream-position-details", "content-position", "content-rating-tier", "content-stream-type", "content-stream-type-details", "custom-segment-id", "day-and-time-details", "day-of-week", "device-make-model-details", "device-type", "device-type-details", "digital-content-label-exclusion-details", "display-name", "display-viewability", "double-verify", "end-hour", "environment", "environment-details", "exchange-details", "exclude-unrateable", "excluded-ad-fraud-risk", "excluded-adloox-categories", "excluded-adult-risk", "excluded-alcohol-risk", "excluded-drugs-risk", "excluded-gambling-risk", "excluded-hate-speech-risk", "excluded-illegal-downloads-risk", "excluded-offensive-language-risk", "excluded-targeting-option-id", "excluded-violence-risk", "fraud-invalid-traffic", "gender", "gender-details", "geo-region-details", "geo-region-type", "household-income", "household-income-details", "iab", "inheritance", "integral-ad-science", "inventory-source-details", "inventory-source-group-details", "inventory-source-group-id", "inventory-source-id", "keyword", "keyword-details", "language-details", "latitude", "longitude", "name", "native-content-position-details", "negative", "negative-keyword-list-details", "negative-keyword-list-id", "omid", "omid-details", "on-screen-position", "on-screen-position-details", "operating-system-details", "parental-status", "parental-status-details", "player-impression-rate", "poi-details", "proximity-location-list-details", "proximity-location-list-id", "proximity-radius-amount", "proximity-radius-range", "proximity-radius-unit", "regional-location-list-details", "regional-location-list-id", "sensitive-category", "sensitive-category-exclusion-details", "start-hour", "sub-exchange-details", "targeting-option-id", "targeting-type", "third-party-verifier-details", "time-zone-resolution", "traq-score-option", "url", "url-details", "user-rewarded-content", "user-rewarded-content-details", "video-iab", "video-player-size", "video-player-size-details", "video-viewability", "video-viewable-rate", "viewability", "viewability-details", "viewable-during"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4010,7 +4018,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4072,13 +4080,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4367,7 +4375,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4573,7 +4581,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4672,7 +4680,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5042,7 +5050,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -5143,7 +5151,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5395,7 +5403,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5677,7 +5685,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -5860,7 +5868,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5975,7 +5983,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6076,12 +6084,19 @@ where "category-details.targeting-option-id" => Some(("categoryDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "channel-details.channel-id" => Some(("channelDetails.channelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "channel-details.negative" => Some(("channelDetails.negative", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "content-duration-details.content-duration" => Some(("contentDurationDetails.contentDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-duration-details.targeting-option-id" => Some(("contentDurationDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-genre-details.display-name" => Some(("contentGenreDetails.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-genre-details.negative" => Some(("contentGenreDetails.negative", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "content-genre-details.targeting-option-id" => Some(("contentGenreDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.ad-type" => Some(("contentInstreamPositionDetails.adType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.content-instream-position" => Some(("contentInstreamPositionDetails.contentInstreamPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.targeting-option-id" => Some(("contentInstreamPositionDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.ad-type" => Some(("contentOutstreamPositionDetails.adType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.content-outstream-position" => Some(("contentOutstreamPositionDetails.contentOutstreamPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.targeting-option-id" => Some(("contentOutstreamPositionDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-stream-type-details.content-stream-type" => Some(("contentStreamTypeDetails.contentStreamType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-stream-type-details.targeting-option-id" => Some(("contentStreamTypeDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "day-and-time-details.day-of-week" => Some(("dayAndTimeDetails.dayOfWeek", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "day-and-time-details.end-hour" => Some(("dayAndTimeDetails.endHour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "day-and-time-details.start-hour" => Some(("dayAndTimeDetails.startHour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -6178,7 +6193,7 @@ where "viewability-details.targeting-option-id" => Some(("viewabilityDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "viewability-details.viewability" => Some(("viewabilityDetails.viewability", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ad-type", "adloox", "age-range", "age-range-details", "app-category-details", "app-details", "app-id", "app-platform", "app-star-rating", "assigned-targeting-option-id", "audio-content-type", "audio-content-type-details", "authorized-seller-status", "authorized-seller-status-details", "avoid-insufficient-option", "avoid-insufficient-star-rating", "avoid-unknown-brand-safety-category", "avoided-age-ratings", "avoided-fraud-option", "avoided-high-severity-categories", "avoided-medium-severity-categories", "avoided-star-rating", "brand-safety-categories", "browser-details", "business-chain-details", "carrier-and-isp-details", "category-details", "channel-details", "channel-id", "content-instream-position", "content-instream-position-details", "content-outstream-position", "content-outstream-position-details", "content-position", "content-rating-tier", "custom-segment-id", "day-and-time-details", "day-of-week", "device-make-model-details", "device-type", "device-type-details", "digital-content-label-exclusion-details", "display-name", "display-viewability", "double-verify", "end-hour", "environment", "environment-details", "exchange-details", "exclude-unrateable", "excluded-ad-fraud-risk", "excluded-adloox-categories", "excluded-adult-risk", "excluded-alcohol-risk", "excluded-drugs-risk", "excluded-gambling-risk", "excluded-hate-speech-risk", "excluded-illegal-downloads-risk", "excluded-offensive-language-risk", "excluded-targeting-option-id", "excluded-violence-risk", "fraud-invalid-traffic", "gender", "gender-details", "geo-region-details", "geo-region-type", "household-income", "household-income-details", "iab", "inheritance", "integral-ad-science", "inventory-source-details", "inventory-source-group-details", "inventory-source-group-id", "inventory-source-id", "keyword", "keyword-details", "language-details", "latitude", "longitude", "name", "native-content-position-details", "negative", "negative-keyword-list-details", "negative-keyword-list-id", "omid", "omid-details", "on-screen-position", "on-screen-position-details", "operating-system-details", "parental-status", "parental-status-details", "player-impression-rate", "poi-details", "proximity-location-list-details", "proximity-location-list-id", "proximity-radius-amount", "proximity-radius-range", "proximity-radius-unit", "regional-location-list-details", "regional-location-list-id", "sensitive-category", "sensitive-category-exclusion-details", "start-hour", "sub-exchange-details", "targeting-option-id", "targeting-type", "third-party-verifier-details", "time-zone-resolution", "traq-score-option", "url", "url-details", "user-rewarded-content", "user-rewarded-content-details", "video-iab", "video-player-size", "video-player-size-details", "video-viewability", "video-viewable-rate", "viewability", "viewability-details", "viewable-during"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ad-type", "adloox", "age-range", "age-range-details", "app-category-details", "app-details", "app-id", "app-platform", "app-star-rating", "assigned-targeting-option-id", "audio-content-type", "audio-content-type-details", "authorized-seller-status", "authorized-seller-status-details", "avoid-insufficient-option", "avoid-insufficient-star-rating", "avoid-unknown-brand-safety-category", "avoided-age-ratings", "avoided-fraud-option", "avoided-high-severity-categories", "avoided-medium-severity-categories", "avoided-star-rating", "brand-safety-categories", "browser-details", "business-chain-details", "carrier-and-isp-details", "category-details", "channel-details", "channel-id", "content-duration", "content-duration-details", "content-genre-details", "content-instream-position", "content-instream-position-details", "content-outstream-position", "content-outstream-position-details", "content-position", "content-rating-tier", "content-stream-type", "content-stream-type-details", "custom-segment-id", "day-and-time-details", "day-of-week", "device-make-model-details", "device-type", "device-type-details", "digital-content-label-exclusion-details", "display-name", "display-viewability", "double-verify", "end-hour", "environment", "environment-details", "exchange-details", "exclude-unrateable", "excluded-ad-fraud-risk", "excluded-adloox-categories", "excluded-adult-risk", "excluded-alcohol-risk", "excluded-drugs-risk", "excluded-gambling-risk", "excluded-hate-speech-risk", "excluded-illegal-downloads-risk", "excluded-offensive-language-risk", "excluded-targeting-option-id", "excluded-violence-risk", "fraud-invalid-traffic", "gender", "gender-details", "geo-region-details", "geo-region-type", "household-income", "household-income-details", "iab", "inheritance", "integral-ad-science", "inventory-source-details", "inventory-source-group-details", "inventory-source-group-id", "inventory-source-id", "keyword", "keyword-details", "language-details", "latitude", "longitude", "name", "native-content-position-details", "negative", "negative-keyword-list-details", "negative-keyword-list-id", "omid", "omid-details", "on-screen-position", "on-screen-position-details", "operating-system-details", "parental-status", "parental-status-details", "player-impression-rate", "poi-details", "proximity-location-list-details", "proximity-location-list-id", "proximity-radius-amount", "proximity-radius-range", "proximity-radius-unit", "regional-location-list-details", "regional-location-list-id", "sensitive-category", "sensitive-category-exclusion-details", "start-hour", "sub-exchange-details", "targeting-option-id", "targeting-type", "third-party-verifier-details", "time-zone-resolution", "traq-score-option", "url", "url-details", "user-rewarded-content", "user-rewarded-content-details", "video-iab", "video-player-size", "video-player-size-details", "video-viewability", "video-viewable-rate", "viewability", "viewability-details", "viewable-during"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6352,7 +6367,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -6414,10 +6429,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6473,13 +6488,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -6488,7 +6503,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6637,10 +6652,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6696,13 +6711,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -6711,7 +6726,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6808,7 +6823,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6903,10 +6918,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6962,10 +6977,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7021,19 +7036,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7089,10 +7104,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7148,7 +7163,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7207,7 +7222,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -7216,7 +7231,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7321,7 +7336,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7463,10 +7478,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7522,13 +7537,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -7537,7 +7552,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7642,10 +7657,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7701,7 +7716,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7800,10 +7815,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7859,10 +7874,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7918,13 +7933,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -7933,7 +7948,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7982,6 +7997,441 @@ where } } + async fn _guaranteed_orders_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "default-advertiser-id" => Some(("defaultAdvertiserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "default-campaign-id" => Some(("defaultCampaignId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "exchange" => Some(("exchange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "guaranteed-order-id" => Some(("guaranteedOrderId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "legacy-guaranteed-order-id" => Some(("legacyGuaranteedOrderId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "publisher-name" => Some(("publisherName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-access-inherited" => Some(("readAccessInherited", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "read-advertiser-ids" => Some(("readAdvertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-write-advertiser-id" => Some(("readWriteAdvertiserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-write-partner-id" => Some(("readWritePartnerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.config-status" => Some(("status.configStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-pause-reason" => Some(("status.entityPauseReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-status" => Some(("status.entityStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["config-status", "default-advertiser-id", "default-campaign-id", "display-name", "entity-pause-reason", "entity-status", "exchange", "guaranteed-order-id", "legacy-guaranteed-order-id", "name", "publisher-name", "read-access-inherited", "read-advertiser-ids", "read-write-advertiser-id", "read-write-partner-id", "status", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GuaranteedOrder = json::value::from_value(object).unwrap(); + let mut call = self.hub.guaranteed_orders().create(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "partner-id" => { + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); + }, + "advertiser-id" => { + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["advertiser-id", "partner-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _guaranteed_orders_edit_guaranteed_order_read_accessors(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "added-advertisers" => Some(("addedAdvertisers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "partner-id" => Some(("partnerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-access-inherited" => Some(("readAccessInherited", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "removed-advertisers" => Some(("removedAdvertisers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["added-advertisers", "partner-id", "read-access-inherited", "removed-advertisers"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::EditGuaranteedOrderReadAccessorsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.guaranteed_orders().edit_guaranteed_order_read_accessors(request, opt.value_of("guaranteed-order-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _guaranteed_orders_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.guaranteed_orders().get(opt.value_of("guaranteed-order-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "partner-id" => { + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); + }, + "advertiser-id" => { + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["advertiser-id", "partner-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _guaranteed_orders_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.guaranteed_orders().list(); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "partner-id" => { + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + "advertiser-id" => { + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["advertiser-id", "filter", "order-by", "page-size", "page-token", "partner-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _guaranteed_orders_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "default-advertiser-id" => Some(("defaultAdvertiserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "default-campaign-id" => Some(("defaultCampaignId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "exchange" => Some(("exchange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "guaranteed-order-id" => Some(("guaranteedOrderId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "legacy-guaranteed-order-id" => Some(("legacyGuaranteedOrderId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "publisher-name" => Some(("publisherName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-access-inherited" => Some(("readAccessInherited", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "read-advertiser-ids" => Some(("readAdvertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-write-advertiser-id" => Some(("readWriteAdvertiserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-write-partner-id" => Some(("readWritePartnerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.config-status" => Some(("status.configStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-pause-reason" => Some(("status.entityPauseReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-status" => Some(("status.entityStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["config-status", "default-advertiser-id", "default-campaign-id", "display-name", "entity-pause-reason", "entity-status", "exchange", "guaranteed-order-id", "legacy-guaranteed-order-id", "name", "publisher-name", "read-access-inherited", "read-advertiser-ids", "read-write-advertiser-id", "read-write-partner-id", "status", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GuaranteedOrder = json::value::from_value(object).unwrap(); + let mut call = self.hub.guaranteed_orders().patch(request, opt.value_of("guaranteed-order-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "partner-id" => { + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); + }, + "advertiser-id" => { + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["advertiser-id", "partner-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _inventory_source_groups_assigned_inventory_sources_bulk_edit(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -8111,10 +8561,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8170,10 +8620,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8229,13 +8679,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -8244,7 +8694,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8335,10 +8785,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8394,10 +8844,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8453,10 +8903,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8512,13 +8962,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -8527,7 +8977,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8618,13 +9068,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8673,6 +9123,217 @@ where } } + async fn _inventory_sources_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "commitment" => Some(("commitment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "deal-id" => Some(("dealId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "delivery-method" => Some(("deliveryMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "exchange" => Some(("exchange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "guaranteed-order-id" => Some(("guaranteedOrderId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "inventory-source-id" => Some(("inventorySourceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "inventory-source-product-type" => Some(("inventorySourceProductType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "inventory-source-type" => Some(("inventorySourceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "publisher-name" => Some(("publisherName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.inventory-source-rate-type" => Some(("rateDetails.inventorySourceRateType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.minimum-spend.currency-code" => Some(("rateDetails.minimumSpend.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.minimum-spend.nanos" => Some(("rateDetails.minimumSpend.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rate-details.minimum-spend.units" => Some(("rateDetails.minimumSpend.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.rate.currency-code" => Some(("rateDetails.rate.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.rate.nanos" => Some(("rateDetails.rate.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rate-details.rate.units" => Some(("rateDetails.rate.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.units-purchased" => Some(("rateDetails.unitsPurchased", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-advertiser-ids" => Some(("readAdvertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-partner-ids" => Some(("readPartnerIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-write-accessors.advertisers.advertiser-ids" => Some(("readWriteAccessors.advertisers.advertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-write-accessors.partner.partner-id" => Some(("readWriteAccessors.partner.partnerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.config-status" => Some(("status.configStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-pause-reason" => Some(("status.entityPauseReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-status" => Some(("status.entityStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.seller-pause-reason" => Some(("status.sellerPauseReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.seller-status" => Some(("status.sellerStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sub-site-property-id" => Some(("subSitePropertyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.end-time" => Some(("timeRange.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.start-time" => Some(("timeRange.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["advertiser-ids", "advertisers", "commitment", "config-status", "currency-code", "deal-id", "delivery-method", "display-name", "end-time", "entity-pause-reason", "entity-status", "exchange", "guaranteed-order-id", "inventory-source-id", "inventory-source-product-type", "inventory-source-rate-type", "inventory-source-type", "minimum-spend", "name", "nanos", "partner", "partner-id", "publisher-name", "rate", "rate-details", "read-advertiser-ids", "read-partner-ids", "read-write-accessors", "seller-pause-reason", "seller-status", "start-time", "status", "sub-site-property-id", "time-range", "units", "units-purchased", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InventorySource = json::value::from_value(object).unwrap(); + let mut call = self.hub.inventory_sources().create(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "partner-id" => { + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); + }, + "advertiser-id" => { + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["advertiser-id", "partner-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _inventory_sources_edit_inventory_source_read_write_accessors(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "advertisers-update.added-advertisers" => Some(("advertisersUpdate.addedAdvertisers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "advertisers-update.removed-advertisers" => Some(("advertisersUpdate.removedAdvertisers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "assign-partner" => Some(("assignPartner", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "partner-id" => Some(("partnerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["added-advertisers", "advertisers-update", "assign-partner", "partner-id", "removed-advertisers"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::EditInventorySourceReadWriteAccessorsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.inventory_sources().edit_inventory_source_read_write_accessors(request, opt.value_of("inventory-source-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _inventory_sources_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.inventory_sources().get(opt.value_of("inventory-source-id").unwrap_or("")); @@ -8680,7 +9341,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8736,13 +9397,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "partner-id" => { - call = call.partner_id(value.unwrap_or("")); + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -8751,7 +9412,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8800,6 +9461,132 @@ where } } + async fn _inventory_sources_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "commitment" => Some(("commitment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "deal-id" => Some(("dealId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "delivery-method" => Some(("deliveryMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "exchange" => Some(("exchange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "guaranteed-order-id" => Some(("guaranteedOrderId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "inventory-source-id" => Some(("inventorySourceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "inventory-source-product-type" => Some(("inventorySourceProductType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "inventory-source-type" => Some(("inventorySourceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "publisher-name" => Some(("publisherName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.inventory-source-rate-type" => Some(("rateDetails.inventorySourceRateType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.minimum-spend.currency-code" => Some(("rateDetails.minimumSpend.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.minimum-spend.nanos" => Some(("rateDetails.minimumSpend.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rate-details.minimum-spend.units" => Some(("rateDetails.minimumSpend.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.rate.currency-code" => Some(("rateDetails.rate.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.rate.nanos" => Some(("rateDetails.rate.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "rate-details.rate.units" => Some(("rateDetails.rate.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rate-details.units-purchased" => Some(("rateDetails.unitsPurchased", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-advertiser-ids" => Some(("readAdvertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-partner-ids" => Some(("readPartnerIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-write-accessors.advertisers.advertiser-ids" => Some(("readWriteAccessors.advertisers.advertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "read-write-accessors.partner.partner-id" => Some(("readWriteAccessors.partner.partnerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.config-status" => Some(("status.configStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-pause-reason" => Some(("status.entityPauseReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.entity-status" => Some(("status.entityStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.seller-pause-reason" => Some(("status.sellerPauseReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "status.seller-status" => Some(("status.sellerStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sub-site-property-id" => Some(("subSitePropertyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.end-time" => Some(("timeRange.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-range.start-time" => Some(("timeRange.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["advertiser-ids", "advertisers", "commitment", "config-status", "currency-code", "deal-id", "delivery-method", "display-name", "end-time", "entity-pause-reason", "entity-status", "exchange", "guaranteed-order-id", "inventory-source-id", "inventory-source-product-type", "inventory-source-rate-type", "inventory-source-type", "minimum-spend", "name", "nanos", "partner", "partner-id", "publisher-name", "rate", "rate-details", "read-advertiser-ids", "read-partner-ids", "read-write-accessors", "seller-pause-reason", "seller-status", "start-time", "status", "sub-site-property-id", "time-range", "units", "units-purchased", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InventorySource = json::value::from_value(object).unwrap(); + let mut call = self.hub.inventory_sources().patch(request, opt.value_of("inventory-source-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "partner-id" => { + call = call.partner_id( value.map(|v| arg_from_str(v, err, "partner-id", "int64")).unwrap_or(-0)); + }, + "advertiser-id" => { + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["advertiser-id", "partner-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _media_download(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut download_mode = false; @@ -9080,7 +9867,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9136,7 +9923,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9195,7 +9982,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -9204,7 +9991,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9299,10 +10086,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9479,7 +10266,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9535,7 +10322,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9594,7 +10381,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -9603,7 +10390,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9800,7 +10587,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -9907,12 +10694,19 @@ where "category-details.targeting-option-id" => Some(("categoryDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "channel-details.channel-id" => Some(("channelDetails.channelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "channel-details.negative" => Some(("channelDetails.negative", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "content-duration-details.content-duration" => Some(("contentDurationDetails.contentDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-duration-details.targeting-option-id" => Some(("contentDurationDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-genre-details.display-name" => Some(("contentGenreDetails.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-genre-details.negative" => Some(("contentGenreDetails.negative", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "content-genre-details.targeting-option-id" => Some(("contentGenreDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.ad-type" => Some(("contentInstreamPositionDetails.adType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.content-instream-position" => Some(("contentInstreamPositionDetails.contentInstreamPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-instream-position-details.targeting-option-id" => Some(("contentInstreamPositionDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.ad-type" => Some(("contentOutstreamPositionDetails.adType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.content-outstream-position" => Some(("contentOutstreamPositionDetails.contentOutstreamPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "content-outstream-position-details.targeting-option-id" => Some(("contentOutstreamPositionDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-stream-type-details.content-stream-type" => Some(("contentStreamTypeDetails.contentStreamType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "content-stream-type-details.targeting-option-id" => Some(("contentStreamTypeDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "day-and-time-details.day-of-week" => Some(("dayAndTimeDetails.dayOfWeek", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "day-and-time-details.end-hour" => Some(("dayAndTimeDetails.endHour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "day-and-time-details.start-hour" => Some(("dayAndTimeDetails.startHour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -10009,7 +10803,7 @@ where "viewability-details.targeting-option-id" => Some(("viewabilityDetails.targetingOptionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "viewability-details.viewability" => Some(("viewabilityDetails.viewability", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ad-type", "adloox", "age-range", "age-range-details", "app-category-details", "app-details", "app-id", "app-platform", "app-star-rating", "assigned-targeting-option-id", "audio-content-type", "audio-content-type-details", "authorized-seller-status", "authorized-seller-status-details", "avoid-insufficient-option", "avoid-insufficient-star-rating", "avoid-unknown-brand-safety-category", "avoided-age-ratings", "avoided-fraud-option", "avoided-high-severity-categories", "avoided-medium-severity-categories", "avoided-star-rating", "brand-safety-categories", "browser-details", "business-chain-details", "carrier-and-isp-details", "category-details", "channel-details", "channel-id", "content-instream-position", "content-instream-position-details", "content-outstream-position", "content-outstream-position-details", "content-position", "content-rating-tier", "custom-segment-id", "day-and-time-details", "day-of-week", "device-make-model-details", "device-type", "device-type-details", "digital-content-label-exclusion-details", "display-name", "display-viewability", "double-verify", "end-hour", "environment", "environment-details", "exchange-details", "exclude-unrateable", "excluded-ad-fraud-risk", "excluded-adloox-categories", "excluded-adult-risk", "excluded-alcohol-risk", "excluded-drugs-risk", "excluded-gambling-risk", "excluded-hate-speech-risk", "excluded-illegal-downloads-risk", "excluded-offensive-language-risk", "excluded-targeting-option-id", "excluded-violence-risk", "fraud-invalid-traffic", "gender", "gender-details", "geo-region-details", "geo-region-type", "household-income", "household-income-details", "iab", "inheritance", "integral-ad-science", "inventory-source-details", "inventory-source-group-details", "inventory-source-group-id", "inventory-source-id", "keyword", "keyword-details", "language-details", "latitude", "longitude", "name", "native-content-position-details", "negative", "negative-keyword-list-details", "negative-keyword-list-id", "omid", "omid-details", "on-screen-position", "on-screen-position-details", "operating-system-details", "parental-status", "parental-status-details", "player-impression-rate", "poi-details", "proximity-location-list-details", "proximity-location-list-id", "proximity-radius-amount", "proximity-radius-range", "proximity-radius-unit", "regional-location-list-details", "regional-location-list-id", "sensitive-category", "sensitive-category-exclusion-details", "start-hour", "sub-exchange-details", "targeting-option-id", "targeting-type", "third-party-verifier-details", "time-zone-resolution", "traq-score-option", "url", "url-details", "user-rewarded-content", "user-rewarded-content-details", "video-iab", "video-player-size", "video-player-size-details", "video-viewability", "video-viewable-rate", "viewability", "viewability-details", "viewable-during"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ad-type", "adloox", "age-range", "age-range-details", "app-category-details", "app-details", "app-id", "app-platform", "app-star-rating", "assigned-targeting-option-id", "audio-content-type", "audio-content-type-details", "authorized-seller-status", "authorized-seller-status-details", "avoid-insufficient-option", "avoid-insufficient-star-rating", "avoid-unknown-brand-safety-category", "avoided-age-ratings", "avoided-fraud-option", "avoided-high-severity-categories", "avoided-medium-severity-categories", "avoided-star-rating", "brand-safety-categories", "browser-details", "business-chain-details", "carrier-and-isp-details", "category-details", "channel-details", "channel-id", "content-duration", "content-duration-details", "content-genre-details", "content-instream-position", "content-instream-position-details", "content-outstream-position", "content-outstream-position-details", "content-position", "content-rating-tier", "content-stream-type", "content-stream-type-details", "custom-segment-id", "day-and-time-details", "day-of-week", "device-make-model-details", "device-type", "device-type-details", "digital-content-label-exclusion-details", "display-name", "display-viewability", "double-verify", "end-hour", "environment", "environment-details", "exchange-details", "exclude-unrateable", "excluded-ad-fraud-risk", "excluded-adloox-categories", "excluded-adult-risk", "excluded-alcohol-risk", "excluded-drugs-risk", "excluded-gambling-risk", "excluded-hate-speech-risk", "excluded-illegal-downloads-risk", "excluded-offensive-language-risk", "excluded-targeting-option-id", "excluded-violence-risk", "fraud-invalid-traffic", "gender", "gender-details", "geo-region-details", "geo-region-type", "household-income", "household-income-details", "iab", "inheritance", "integral-ad-science", "inventory-source-details", "inventory-source-group-details", "inventory-source-group-id", "inventory-source-id", "keyword", "keyword-details", "language-details", "latitude", "longitude", "name", "native-content-position-details", "negative", "negative-keyword-list-details", "negative-keyword-list-id", "omid", "omid-details", "on-screen-position", "on-screen-position-details", "operating-system-details", "parental-status", "parental-status-details", "player-impression-rate", "poi-details", "proximity-location-list-details", "proximity-location-list-id", "proximity-radius-amount", "proximity-radius-range", "proximity-radius-unit", "regional-location-list-details", "regional-location-list-id", "sensitive-category", "sensitive-category-exclusion-details", "start-hour", "sub-exchange-details", "targeting-option-id", "targeting-type", "third-party-verifier-details", "time-zone-resolution", "traq-score-option", "url", "url-details", "user-rewarded-content", "user-rewarded-content-details", "video-iab", "video-player-size", "video-player-size-details", "video-viewability", "video-viewable-rate", "viewability", "viewability-details", "viewable-during"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -10183,7 +10977,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -10394,7 +11188,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10453,7 +11247,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -10462,7 +11256,7 @@ where call = call.filter(value.unwrap_or("")); }, "advertiser-id" => { - call = call.advertiser_id(value.unwrap_or("")); + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10889,7 +11683,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -10987,7 +11781,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -11403,6 +12197,29 @@ where } } }, + ("guaranteed-orders", Some(opt)) => { + match opt.subcommand() { + ("create", Some(opt)) => { + call_result = self._guaranteed_orders_create(opt, dry_run, &mut err).await; + }, + ("edit-guaranteed-order-read-accessors", Some(opt)) => { + call_result = self._guaranteed_orders_edit_guaranteed_order_read_accessors(opt, dry_run, &mut err).await; + }, + ("get", Some(opt)) => { + call_result = self._guaranteed_orders_get(opt, dry_run, &mut err).await; + }, + ("list", Some(opt)) => { + call_result = self._guaranteed_orders_list(opt, dry_run, &mut err).await; + }, + ("patch", Some(opt)) => { + call_result = self._guaranteed_orders_patch(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("guaranteed-orders".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("inventory-source-groups", Some(opt)) => { match opt.subcommand() { ("assigned-inventory-sources-bulk-edit", Some(opt)) => { @@ -11440,12 +12257,21 @@ where }, ("inventory-sources", Some(opt)) => { match opt.subcommand() { + ("create", Some(opt)) => { + call_result = self._inventory_sources_create(opt, dry_run, &mut err).await; + }, + ("edit-inventory-source-read-write-accessors", Some(opt)) => { + call_result = self._inventory_sources_edit_inventory_source_read_write_accessors(opt, dry_run, &mut err).await; + }, ("get", Some(opt)) => { call_result = self._inventory_sources_get(opt, dry_run, &mut err).await; }, ("list", Some(opt)) => { call_result = self._inventory_sources_list(opt, dry_run, &mut err).await; }, + ("patch", Some(opt)) => { + call_result = self._inventory_sources_patch(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("inventory-sources".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -12552,7 +13378,7 @@ async fn main() { (Some(r##"insertion-order-id"##), None, - Some(r##"The ID of the insertion order we need to delete."##), + Some(r##"The ID of the insertion order to delete."##), Some(true), Some(false)), @@ -12771,7 +13597,7 @@ async fn main() { Some(false)), ]), ("line-items-bulk-edit-line-item-assigned-targeting-options", - Some(r##"Bulk edits targeting options under a single line item. The operation will delete the assigned targeting options provided in BulkEditLineItemAssignedTargetingOptionsRequest.delete_requests and then create the assigned targeting options provided in BulkEditLineItemAssignedTargetingOptionsRequest.create_requests. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * BulkEditLineItemAssignedTargetingOptions * UpdateLineItem * CreateLineItemAssignedTargetingOption * DeleteLineItemAssignedTargetingOption"##), + Some(r##"Bulk edits targeting options under a single line item. The operation will delete the assigned targeting options provided in BulkEditLineItemAssignedTargetingOptionsRequest.delete_requests and then create the assigned targeting options provided in BulkEditLineItemAssignedTargetingOptionsRequest.create_requests. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * UpdateLineItem * CreateLineItemAssignedTargetingOption * DeleteLineItemAssignedTargetingOption"##), "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/advertisers_line-items-bulk-edit-line-item-assigned-targeting-options", vec![ (Some(r##"advertiser-id"##), @@ -12872,7 +13698,7 @@ async fn main() { (Some(r##"line-item-id"##), None, - Some(r##"The ID of the line item we need to fetch."##), + Some(r##"The ID of the line item to delete."##), Some(true), Some(false)), @@ -12967,7 +13793,7 @@ async fn main() { Some(false)), ]), ("line-items-patch", - Some(r##"Updates an existing line item. Returns the updated line item if successful. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * BulkEditLineItemAssignedTargetingOptions * UpdateLineItem * CreateLineItemAssignedTargetingOption * DeleteLineItemAssignedTargetingOption"##), + Some(r##"Updates an existing line item. Returns the updated line item if successful. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * BulkEditAssignedTargetingOptions * BulkUpdateLineItems * CreateLineItemAssignedTargetingOption * DeleteLineItemAssignedTargetingOption"##), "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/advertisers_line-items-patch", vec![ (Some(r##"advertiser-id"##), @@ -13001,7 +13827,7 @@ async fn main() { Some(false)), ]), ("line-items-targeting-types-assigned-targeting-options-create", - Some(r##"Assigns a targeting option to a line item. Returns the assigned targeting option if successful. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * BulkEditLineItemAssignedTargetingOptions * UpdateLineItem * CreateLineItemAssignedTargetingOption * DeleteLineItemAssignedTargetingOption"##), + Some(r##"Assigns a targeting option to a line item. Returns the assigned targeting option if successful. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * BulkEditAssignedTargetingOptions * BulkUpdate * UpdateLineItem * DeleteLineItemAssignedTargetingOption"##), "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/advertisers_line-items-targeting-types-assigned-targeting-options-create", vec![ (Some(r##"advertiser-id"##), @@ -13041,7 +13867,7 @@ async fn main() { Some(false)), ]), ("line-items-targeting-types-assigned-targeting-options-delete", - Some(r##"Deletes an assigned targeting option from a line item. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * BulkEditLineItemAssignedTargetingOptions * UpdateLineItem * CreateLineItemAssignedTargetingOption * DeleteLineItemAssignedTargetingOption"##), + Some(r##"Deletes an assigned targeting option from a line item. Requests to this endpoint cannot be made concurrently with the following requests updating the same line item: * BulkEditAssignedTargetingOptions * BulkUpdate * UpdateLineItem * CreateLineItemAssignedTargetingOption"##), "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/advertisers_line-items-targeting-types-assigned-targeting-options-delete", vec![ (Some(r##"advertiser-id"##), @@ -14542,6 +15368,125 @@ async fn main() { ]), ]), + ("guaranteed-orders", "methods: 'create', 'edit-guaranteed-order-read-accessors', 'get', 'list' and 'patch'", vec![ + ("create", + Some(r##"Creates a new guaranteed order. Returns the newly created guaranteed order if successful."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/guaranteed-orders_create", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("edit-guaranteed-order-read-accessors", + Some(r##"Edits read advertisers of a guaranteed order."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/guaranteed-orders_edit-guaranteed-order-read-accessors", + vec![ + (Some(r##"guaranteed-order-id"##), + None, + Some(r##"Required. The ID of the guaranteed order to edit. The ID is of the format `{exchange}-{legacy_guaranteed_order_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Gets a guaranteed order."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/guaranteed-orders_get", + vec![ + (Some(r##"guaranteed-order-id"##), + None, + Some(r##"Required. The ID of the guaranteed order to fetch. The ID is of the format `{exchange}-{legacy_guaranteed_order_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list", + Some(r##"Lists guaranteed orders that are accessible to the current user. The order is defined by the order_by parameter. If a filter by entity_status is not specified, guaranteed orders with entity status `ENTITY_STATUS_ARCHIVED` will not be included in the results."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/guaranteed-orders_list", + vec![ + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Updates an existing guaranteed order. Returns the updated guaranteed order if successful."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/guaranteed-orders_patch", + vec![ + (Some(r##"guaranteed-order-id"##), + None, + Some(r##"Output only. The unique identifier of the guaranteed order. The guaranteed order IDs have the format `{exchange}-{legacy_guaranteed_order_id}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("inventory-source-groups", "methods: 'assigned-inventory-sources-bulk-edit', 'assigned-inventory-sources-create', 'assigned-inventory-sources-delete', 'assigned-inventory-sources-list', 'create', 'delete', 'get', 'list' and 'patch'", vec![ ("assigned-inventory-sources-bulk-edit", Some(r##"Bulk edits multiple assignments between inventory sources and a single inventory source group. The operation will delete the assigned inventory sources provided in BulkEditAssignedInventorySourcesRequest.deleted_assigned_inventory_sources and then create the assigned inventory sources provided in BulkEditAssignedInventorySourcesRequest.created_assigned_inventory_sources."##), @@ -14761,7 +15706,57 @@ async fn main() { ]), ]), - ("inventory-sources", "methods: 'get' and 'list'", vec![ + ("inventory-sources", "methods: 'create', 'edit-inventory-source-read-write-accessors', 'get', 'list' and 'patch'", vec![ + ("create", + Some(r##"Creates a new inventory source. Returns the newly created inventory source if successful."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/inventory-sources_create", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("edit-inventory-source-read-write-accessors", + Some(r##"Edits read/write accessors of an inventory source. Returns the updated read_write_accessors for the inventory source."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/inventory-sources_edit-inventory-source-read-write-accessors", + vec![ + (Some(r##"inventory-source-id"##), + None, + Some(r##"Required. The ID of inventory source to update."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("get", Some(r##"Gets an inventory source."##), "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/inventory-sources_get", @@ -14794,6 +15789,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("patch", + Some(r##"Updates an existing inventory source. Returns the updated inventory source if successful."##), + "Details at http://byron.github.io/google-apis-rs/google_displayvideo1_cli/inventory-sources_patch", + vec![ + (Some(r##"inventory-source-id"##), + None, + Some(r##"Output only. The unique ID of the inventory source. Assigned by the system."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -15609,7 +16632,7 @@ async fn main() { let mut app = App::new("displayvideo1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230119") .about("Display & Video 360 API allows users to automate complex Display & Video 360 workflows, such as creating insertion orders and setting targeting options for individual line items.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_displayvideo1_cli") .arg(Arg::with_name("url") diff --git a/gen/displayvideo1/Cargo.toml b/gen/displayvideo1/Cargo.toml index aee13902b8..60e720aee1 100644 --- a/gen/displayvideo1/Cargo.toml +++ b/gen/displayvideo1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-displayvideo1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Display Video (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/displayvideo1" homepage = "https://developers.google.com/display-video/" -documentation = "https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-displayvideo1/5.0.2+20230119" license = "MIT" keywords = ["displayvideo", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/displayvideo1/README.md b/gen/displayvideo1/README.md index 52946371d0..ba3bb669e0 100644 --- a/gen/displayvideo1/README.md +++ b/gen/displayvideo1/README.md @@ -5,54 +5,54 @@ DO NOT EDIT ! --> The `google-displayvideo1` library allows access to all features of the *Google Display Video* service. -This documentation was generated from *Display Video* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *displayvideo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Display Video* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *displayvideo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Display Video* *v1* API can be found at the [official documentation site](https://developers.google.com/display-video/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/DisplayVideo) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/DisplayVideo) ... -* [advertisers](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::Advertiser) - * [*assets upload*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserAssetUploadCall), [*audit*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserAuditCall), [*bulk edit advertiser assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserBulkEditAdvertiserAssignedTargetingOptionCall), [*bulk list advertiser assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserBulkListAdvertiserAssignedTargetingOptionCall), [*campaigns bulk list campaign assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignBulkListCampaignAssignedTargetingOptionCall), [*campaigns create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignCreateCall), [*campaigns delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignDeleteCall), [*campaigns get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignGetCall), [*campaigns list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignListCall), [*campaigns patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignPatchCall), [*campaigns targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignTargetingTypeAssignedTargetingOptionGetCall), [*campaigns targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCampaignTargetingTypeAssignedTargetingOptionListCall), [*channels create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelCreateCall), [*channels get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelGetCall), [*channels list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelListCall), [*channels patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelPatchCall), [*channels sites bulk edit*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelSiteBulkEditCall), [*channels sites create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelSiteCreateCall), [*channels sites delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelSiteDeleteCall), [*channels sites list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelSiteListCall), [*channels sites replace*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserChannelSiteReplaceCall), [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCreateCall), [*creatives create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCreativeCreateCall), [*creatives delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCreativeDeleteCall), [*creatives get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCreativeGetCall), [*creatives list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCreativeListCall), [*creatives patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserCreativePatchCall), [*delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserDeleteCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserGetCall), [*insertion orders bulk list insertion order assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderBulkListInsertionOrderAssignedTargetingOptionCall), [*insertion orders create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderCreateCall), [*insertion orders delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderDeleteCall), [*insertion orders get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderGetCall), [*insertion orders list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderListCall), [*insertion orders patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderPatchCall), [*insertion orders targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderTargetingTypeAssignedTargetingOptionGetCall), [*insertion orders targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInsertionOrderTargetingTypeAssignedTargetingOptionListCall), [*invoices list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInvoiceListCall), [*invoices lookup invoice currency*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserInvoiceLookupInvoiceCurrencyCall), [*line items bulk edit line item assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemBulkEditLineItemAssignedTargetingOptionCall), [*line items bulk list line item assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemBulkListLineItemAssignedTargetingOptionCall), [*line items create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemCreateCall), [*line items delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemDeleteCall), [*line items generate default*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemGenerateDefaultCall), [*line items get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemGetCall), [*line items list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemListCall), [*line items patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemPatchCall), [*line items targeting types assigned targeting options create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionCreateCall), [*line items targeting types assigned targeting options delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionDeleteCall), [*line items targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionGetCall), [*line items targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionListCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserListCall), [*location lists assigned locations bulk edit*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationBulkEditCall), [*location lists assigned locations create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationCreateCall), [*location lists assigned locations delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationDeleteCall), [*location lists assigned locations list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationListCall), [*location lists create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListCreateCall), [*location lists get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListGetCall), [*location lists list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListListCall), [*location lists patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserLocationListPatchCall), [*manual triggers activate*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserManualTriggerActivateCall), [*manual triggers create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserManualTriggerCreateCall), [*manual triggers deactivate*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserManualTriggerDeactivateCall), [*manual triggers get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserManualTriggerGetCall), [*manual triggers list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserManualTriggerListCall), [*manual triggers patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserManualTriggerPatchCall), [*negative keyword lists create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListCreateCall), [*negative keyword lists delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListDeleteCall), [*negative keyword lists get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListGetCall), [*negative keyword lists list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListListCall), [*negative keyword lists negative keywords bulk edit*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordBulkEditCall), [*negative keyword lists negative keywords create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordCreateCall), [*negative keyword lists negative keywords delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordDeleteCall), [*negative keyword lists negative keywords list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordListCall), [*negative keyword lists negative keywords replace*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordReplaceCall), [*negative keyword lists patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListPatchCall), [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserPatchCall), [*targeting types assigned targeting options create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionCreateCall), [*targeting types assigned targeting options delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionDeleteCall), [*targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionGetCall) and [*targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionListCall) -* [combined audiences](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CombinedAudience) - * [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CombinedAudienceGetCall) and [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CombinedAudienceListCall) -* [custom bidding algorithms](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithm) - * [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmCreateCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmListCall), [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmPatchCall), [*scripts create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmScriptCreateCall), [*scripts get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmScriptGetCall), [*scripts list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmScriptListCall) and [*upload script*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomBiddingAlgorithmUploadScriptCall) -* [custom lists](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomList) - * [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomListGetCall) and [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::CustomListListCall) -* [first and third party audiences](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FirstAndThirdPartyAudience) - * [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceCreateCall), [*edit customer match members*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceEditCustomerMatchMemberCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FirstAndThirdPartyAudiencePatchCall) -* [floodlight groups](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FloodlightGroup) - * [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FloodlightGroupGetCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::FloodlightGroupPatchCall) -* [google audiences](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GoogleAudience) - * [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GoogleAudienceGetCall) and [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GoogleAudienceListCall) -* [guaranteed orders](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GuaranteedOrder) - * [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GuaranteedOrderCreateCall), [*edit guaranteed order read accessors*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GuaranteedOrderEditGuaranteedOrderReadAccessorCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GuaranteedOrderGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GuaranteedOrderListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::GuaranteedOrderPatchCall) -* [inventory source groups](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroup) - * [*assigned inventory sources bulk edit*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceBulkEditCall), [*assigned inventory sources create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceCreateCall), [*assigned inventory sources delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceDeleteCall), [*assigned inventory sources list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceListCall), [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupCreateCall), [*delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupDeleteCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGroupPatchCall) -* [inventory sources](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySource) - * [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceCreateCall), [*edit inventory source read write accessors*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceEditInventorySourceReadWriteAccessorCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourceListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::InventorySourcePatchCall) +* [advertisers](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::Advertiser) + * [*assets upload*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserAssetUploadCall), [*audit*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserAuditCall), [*bulk edit advertiser assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserBulkEditAdvertiserAssignedTargetingOptionCall), [*bulk list advertiser assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserBulkListAdvertiserAssignedTargetingOptionCall), [*campaigns bulk list campaign assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignBulkListCampaignAssignedTargetingOptionCall), [*campaigns create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignCreateCall), [*campaigns delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignDeleteCall), [*campaigns get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignGetCall), [*campaigns list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignListCall), [*campaigns patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignPatchCall), [*campaigns targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignTargetingTypeAssignedTargetingOptionGetCall), [*campaigns targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCampaignTargetingTypeAssignedTargetingOptionListCall), [*channels create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelCreateCall), [*channels get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelGetCall), [*channels list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelListCall), [*channels patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelPatchCall), [*channels sites bulk edit*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelSiteBulkEditCall), [*channels sites create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelSiteCreateCall), [*channels sites delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelSiteDeleteCall), [*channels sites list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelSiteListCall), [*channels sites replace*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserChannelSiteReplaceCall), [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCreateCall), [*creatives create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCreativeCreateCall), [*creatives delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCreativeDeleteCall), [*creatives get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCreativeGetCall), [*creatives list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCreativeListCall), [*creatives patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserCreativePatchCall), [*delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserDeleteCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserGetCall), [*insertion orders bulk list insertion order assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderBulkListInsertionOrderAssignedTargetingOptionCall), [*insertion orders create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderCreateCall), [*insertion orders delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderDeleteCall), [*insertion orders get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderGetCall), [*insertion orders list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderListCall), [*insertion orders patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderPatchCall), [*insertion orders targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderTargetingTypeAssignedTargetingOptionGetCall), [*insertion orders targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInsertionOrderTargetingTypeAssignedTargetingOptionListCall), [*invoices list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInvoiceListCall), [*invoices lookup invoice currency*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserInvoiceLookupInvoiceCurrencyCall), [*line items bulk edit line item assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemBulkEditLineItemAssignedTargetingOptionCall), [*line items bulk list line item assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemBulkListLineItemAssignedTargetingOptionCall), [*line items create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemCreateCall), [*line items delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemDeleteCall), [*line items generate default*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemGenerateDefaultCall), [*line items get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemGetCall), [*line items list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemListCall), [*line items patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemPatchCall), [*line items targeting types assigned targeting options create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionCreateCall), [*line items targeting types assigned targeting options delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionDeleteCall), [*line items targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionGetCall), [*line items targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLineItemTargetingTypeAssignedTargetingOptionListCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserListCall), [*location lists assigned locations bulk edit*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationBulkEditCall), [*location lists assigned locations create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationCreateCall), [*location lists assigned locations delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationDeleteCall), [*location lists assigned locations list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListAssignedLocationListCall), [*location lists create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListCreateCall), [*location lists get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListGetCall), [*location lists list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListListCall), [*location lists patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserLocationListPatchCall), [*manual triggers activate*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserManualTriggerActivateCall), [*manual triggers create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserManualTriggerCreateCall), [*manual triggers deactivate*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserManualTriggerDeactivateCall), [*manual triggers get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserManualTriggerGetCall), [*manual triggers list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserManualTriggerListCall), [*manual triggers patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserManualTriggerPatchCall), [*negative keyword lists create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListCreateCall), [*negative keyword lists delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListDeleteCall), [*negative keyword lists get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListGetCall), [*negative keyword lists list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListListCall), [*negative keyword lists negative keywords bulk edit*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordBulkEditCall), [*negative keyword lists negative keywords create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordCreateCall), [*negative keyword lists negative keywords delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordDeleteCall), [*negative keyword lists negative keywords list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordListCall), [*negative keyword lists negative keywords replace*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListNegativeKeywordReplaceCall), [*negative keyword lists patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserNegativeKeywordListPatchCall), [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserPatchCall), [*targeting types assigned targeting options create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionCreateCall), [*targeting types assigned targeting options delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionDeleteCall), [*targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionGetCall) and [*targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserTargetingTypeAssignedTargetingOptionListCall) +* [combined audiences](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CombinedAudience) + * [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CombinedAudienceGetCall) and [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CombinedAudienceListCall) +* [custom bidding algorithms](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithm) + * [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmCreateCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmListCall), [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmPatchCall), [*scripts create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmScriptCreateCall), [*scripts get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmScriptGetCall), [*scripts list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmScriptListCall) and [*upload script*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomBiddingAlgorithmUploadScriptCall) +* [custom lists](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomList) + * [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomListGetCall) and [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::CustomListListCall) +* [first and third party audiences](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FirstAndThirdPartyAudience) + * [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceCreateCall), [*edit customer match members*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceEditCustomerMatchMemberCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FirstAndThirdPartyAudienceListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FirstAndThirdPartyAudiencePatchCall) +* [floodlight groups](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FloodlightGroup) + * [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FloodlightGroupGetCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::FloodlightGroupPatchCall) +* [google audiences](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GoogleAudience) + * [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GoogleAudienceGetCall) and [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GoogleAudienceListCall) +* [guaranteed orders](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GuaranteedOrder) + * [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GuaranteedOrderCreateCall), [*edit guaranteed order read accessors*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GuaranteedOrderEditGuaranteedOrderReadAccessorCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GuaranteedOrderGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GuaranteedOrderListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::GuaranteedOrderPatchCall) +* [inventory source groups](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroup) + * [*assigned inventory sources bulk edit*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceBulkEditCall), [*assigned inventory sources create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceCreateCall), [*assigned inventory sources delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceDeleteCall), [*assigned inventory sources list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupAssignedInventorySourceListCall), [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupCreateCall), [*delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupDeleteCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGroupPatchCall) +* [inventory sources](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySource) + * [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceCreateCall), [*edit inventory source read write accessors*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceEditInventorySourceReadWriteAccessorCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourceListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::InventorySourcePatchCall) * media - * [*download*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::MediaDownloadCall) and [*upload*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::MediaUploadCall) -* [partners](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::Partner) - * [*bulk edit partner assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerBulkEditPartnerAssignedTargetingOptionCall), [*channels create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelCreateCall), [*channels get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelGetCall), [*channels list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelListCall), [*channels patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelPatchCall), [*channels sites bulk edit*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelSiteBulkEditCall), [*channels sites create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelSiteCreateCall), [*channels sites delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelSiteDeleteCall), [*channels sites list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelSiteListCall), [*channels sites replace*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerChannelSiteReplaceCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerListCall), [*targeting types assigned targeting options create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionCreateCall), [*targeting types assigned targeting options delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionDeleteCall), [*targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionGetCall) and [*targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionListCall) + * [*download*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::MediaDownloadCall) and [*upload*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::MediaUploadCall) +* [partners](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::Partner) + * [*bulk edit partner assigned targeting options*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerBulkEditPartnerAssignedTargetingOptionCall), [*channels create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelCreateCall), [*channels get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelGetCall), [*channels list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelListCall), [*channels patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelPatchCall), [*channels sites bulk edit*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelSiteBulkEditCall), [*channels sites create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelSiteCreateCall), [*channels sites delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelSiteDeleteCall), [*channels sites list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelSiteListCall), [*channels sites replace*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerChannelSiteReplaceCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerListCall), [*targeting types assigned targeting options create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionCreateCall), [*targeting types assigned targeting options delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionDeleteCall), [*targeting types assigned targeting options get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionGetCall) and [*targeting types assigned targeting options list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::PartnerTargetingTypeAssignedTargetingOptionListCall) * sdfdownloadtasks - * [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::SdfdownloadtaskCreateCall) and [*operations get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::SdfdownloadtaskOperationGetCall) + * [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::SdfdownloadtaskCreateCall) and [*operations get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::SdfdownloadtaskOperationGetCall) * targeting types - * [*targeting options get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::TargetingTypeTargetingOptionGetCall), [*targeting options list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::TargetingTypeTargetingOptionListCall) and [*targeting options search*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::TargetingTypeTargetingOptionSearchCall) -* [users](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::User) - * [*bulk edit assigned user roles*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::UserBulkEditAssignedUserRoleCall), [*create*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::UserCreateCall), [*delete*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::UserDeleteCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::UserGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::UserListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::UserPatchCall) + * [*targeting options get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::TargetingTypeTargetingOptionGetCall), [*targeting options list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::TargetingTypeTargetingOptionListCall) and [*targeting options search*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::TargetingTypeTargetingOptionSearchCall) +* [users](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::User) + * [*bulk edit assigned user roles*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::UserBulkEditAssignedUserRoleCall), [*create*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::UserCreateCall), [*delete*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::UserDeleteCall), [*get*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::UserGetCall), [*list*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::UserListCall) and [*patch*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::UserPatchCall) Upload supported by ... -* [*assets upload advertisers*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::AdvertiserAssetUploadCall) -* [*upload media*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::MediaUploadCall) +* [*assets upload advertisers*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::AdvertiserAssetUploadCall) +* [*upload media*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::MediaUploadCall) Download supported by ... -* [*download media*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/api::MediaDownloadCall) +* [*download media*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/api::MediaDownloadCall) @@ -60,17 +60,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/DisplayVideo)** +* **[Hub](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/DisplayVideo)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::CallBuilder) -* **[Resources](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::CallBuilder) +* **[Resources](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Part)** + * **[Parts](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -239,17 +239,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -259,29 +259,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Delegate) to the -[Method Builder](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Delegate) to the +[Method Builder](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::RequestValue) and -[decodable](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::RequestValue) and +[decodable](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-displayvideo1/5.0.2-beta-1+20230119/google_displayvideo1/client::RequestValue) are moved +* [request values](https://docs.rs/google-displayvideo1/5.0.2+20230119/google_displayvideo1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/displayvideo1/src/api.rs b/gen/displayvideo1/src/api.rs index 4ced48d1ee..c412fe7021 100644 --- a/gen/displayvideo1/src/api.rs +++ b/gen/displayvideo1/src/api.rs @@ -135,7 +135,7 @@ impl<'a, S> DisplayVideo { DisplayVideo { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://displayvideo.googleapis.com/".to_string(), _root_url: "https://displayvideo.googleapis.com/".to_string(), } @@ -188,7 +188,7 @@ impl<'a, S> DisplayVideo { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/displayvideo1/src/client.rs b/gen/displayvideo1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/displayvideo1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/displayvideo1/src/lib.rs b/gen/displayvideo1/src/lib.rs index 9121262a8d..56ce84219d 100644 --- a/gen/displayvideo1/src/lib.rs +++ b/gen/displayvideo1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Display Video* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *displayvideo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Display Video* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *displayvideo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Display Video* *v1* API can be found at the //! [official documentation site](https://developers.google.com/display-video/). diff --git a/gen/dlp2-cli/Cargo.toml b/gen/dlp2-cli/Cargo.toml index 414a4b14e3..0b17af36ec 100644 --- a/gen/dlp2-cli/Cargo.toml +++ b/gen/dlp2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dlp2-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with DLP (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dlp2-cli" @@ -20,13 +20,13 @@ name = "dlp2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dlp2] path = "../dlp2" -version = "4.0.1+20220227" +version = "5.0.2+20230121" + diff --git a/gen/dlp2-cli/README.md b/gen/dlp2-cli/README.md index 78a6782859..afac92b25e 100644 --- a/gen/dlp2-cli/README.md +++ b/gen/dlp2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *DLP* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *DLP* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash dlp2 [options] diff --git a/gen/dlp2-cli/mkdocs.yml b/gen/dlp2-cli/mkdocs.yml index 043173b7cd..51b4beaadb 100644 --- a/gen/dlp2-cli/mkdocs.yml +++ b/gen/dlp2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: DLP v4.0.1+20220227 +site_name: DLP v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-dlp2-cli site_description: A complete library to interact with DLP (protocol v2) @@ -7,109 +7,113 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dlp2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['info-types_list.md', 'Info Types', 'List'] -- ['locations_info-types-list.md', 'Locations', 'Info Types List'] -- ['organizations_deidentify-templates-create.md', 'Organizations', 'Deidentify Templates Create'] -- ['organizations_deidentify-templates-delete.md', 'Organizations', 'Deidentify Templates Delete'] -- ['organizations_deidentify-templates-get.md', 'Organizations', 'Deidentify Templates Get'] -- ['organizations_deidentify-templates-list.md', 'Organizations', 'Deidentify Templates List'] -- ['organizations_deidentify-templates-patch.md', 'Organizations', 'Deidentify Templates Patch'] -- ['organizations_inspect-templates-create.md', 'Organizations', 'Inspect Templates Create'] -- ['organizations_inspect-templates-delete.md', 'Organizations', 'Inspect Templates Delete'] -- ['organizations_inspect-templates-get.md', 'Organizations', 'Inspect Templates Get'] -- ['organizations_inspect-templates-list.md', 'Organizations', 'Inspect Templates List'] -- ['organizations_inspect-templates-patch.md', 'Organizations', 'Inspect Templates Patch'] -- ['organizations_locations-deidentify-templates-create.md', 'Organizations', 'Locations Deidentify Templates Create'] -- ['organizations_locations-deidentify-templates-delete.md', 'Organizations', 'Locations Deidentify Templates Delete'] -- ['organizations_locations-deidentify-templates-get.md', 'Organizations', 'Locations Deidentify Templates Get'] -- ['organizations_locations-deidentify-templates-list.md', 'Organizations', 'Locations Deidentify Templates List'] -- ['organizations_locations-deidentify-templates-patch.md', 'Organizations', 'Locations Deidentify Templates Patch'] -- ['organizations_locations-dlp-jobs-list.md', 'Organizations', 'Locations Dlp Jobs List'] -- ['organizations_locations-inspect-templates-create.md', 'Organizations', 'Locations Inspect Templates Create'] -- ['organizations_locations-inspect-templates-delete.md', 'Organizations', 'Locations Inspect Templates Delete'] -- ['organizations_locations-inspect-templates-get.md', 'Organizations', 'Locations Inspect Templates Get'] -- ['organizations_locations-inspect-templates-list.md', 'Organizations', 'Locations Inspect Templates List'] -- ['organizations_locations-inspect-templates-patch.md', 'Organizations', 'Locations Inspect Templates Patch'] -- ['organizations_locations-job-triggers-create.md', 'Organizations', 'Locations Job Triggers Create'] -- ['organizations_locations-job-triggers-delete.md', 'Organizations', 'Locations Job Triggers Delete'] -- ['organizations_locations-job-triggers-get.md', 'Organizations', 'Locations Job Triggers Get'] -- ['organizations_locations-job-triggers-list.md', 'Organizations', 'Locations Job Triggers List'] -- ['organizations_locations-job-triggers-patch.md', 'Organizations', 'Locations Job Triggers Patch'] -- ['organizations_locations-stored-info-types-create.md', 'Organizations', 'Locations Stored Info Types Create'] -- ['organizations_locations-stored-info-types-delete.md', 'Organizations', 'Locations Stored Info Types Delete'] -- ['organizations_locations-stored-info-types-get.md', 'Organizations', 'Locations Stored Info Types Get'] -- ['organizations_locations-stored-info-types-list.md', 'Organizations', 'Locations Stored Info Types List'] -- ['organizations_locations-stored-info-types-patch.md', 'Organizations', 'Locations Stored Info Types Patch'] -- ['organizations_stored-info-types-create.md', 'Organizations', 'Stored Info Types Create'] -- ['organizations_stored-info-types-delete.md', 'Organizations', 'Stored Info Types Delete'] -- ['organizations_stored-info-types-get.md', 'Organizations', 'Stored Info Types Get'] -- ['organizations_stored-info-types-list.md', 'Organizations', 'Stored Info Types List'] -- ['organizations_stored-info-types-patch.md', 'Organizations', 'Stored Info Types Patch'] -- ['projects_content-deidentify.md', 'Projects', 'Content Deidentify'] -- ['projects_content-inspect.md', 'Projects', 'Content Inspect'] -- ['projects_content-reidentify.md', 'Projects', 'Content Reidentify'] -- ['projects_deidentify-templates-create.md', 'Projects', 'Deidentify Templates Create'] -- ['projects_deidentify-templates-delete.md', 'Projects', 'Deidentify Templates Delete'] -- ['projects_deidentify-templates-get.md', 'Projects', 'Deidentify Templates Get'] -- ['projects_deidentify-templates-list.md', 'Projects', 'Deidentify Templates List'] -- ['projects_deidentify-templates-patch.md', 'Projects', 'Deidentify Templates Patch'] -- ['projects_dlp-jobs-cancel.md', 'Projects', 'Dlp Jobs Cancel'] -- ['projects_dlp-jobs-create.md', 'Projects', 'Dlp Jobs Create'] -- ['projects_dlp-jobs-delete.md', 'Projects', 'Dlp Jobs Delete'] -- ['projects_dlp-jobs-get.md', 'Projects', 'Dlp Jobs Get'] -- ['projects_dlp-jobs-list.md', 'Projects', 'Dlp Jobs List'] -- ['projects_image-redact.md', 'Projects', 'Image Redact'] -- ['projects_inspect-templates-create.md', 'Projects', 'Inspect Templates Create'] -- ['projects_inspect-templates-delete.md', 'Projects', 'Inspect Templates Delete'] -- ['projects_inspect-templates-get.md', 'Projects', 'Inspect Templates Get'] -- ['projects_inspect-templates-list.md', 'Projects', 'Inspect Templates List'] -- ['projects_inspect-templates-patch.md', 'Projects', 'Inspect Templates Patch'] -- ['projects_job-triggers-activate.md', 'Projects', 'Job Triggers Activate'] -- ['projects_job-triggers-create.md', 'Projects', 'Job Triggers Create'] -- ['projects_job-triggers-delete.md', 'Projects', 'Job Triggers Delete'] -- ['projects_job-triggers-get.md', 'Projects', 'Job Triggers Get'] -- ['projects_job-triggers-list.md', 'Projects', 'Job Triggers List'] -- ['projects_job-triggers-patch.md', 'Projects', 'Job Triggers Patch'] -- ['projects_locations-content-deidentify.md', 'Projects', 'Locations Content Deidentify'] -- ['projects_locations-content-inspect.md', 'Projects', 'Locations Content Inspect'] -- ['projects_locations-content-reidentify.md', 'Projects', 'Locations Content Reidentify'] -- ['projects_locations-deidentify-templates-create.md', 'Projects', 'Locations Deidentify Templates Create'] -- ['projects_locations-deidentify-templates-delete.md', 'Projects', 'Locations Deidentify Templates Delete'] -- ['projects_locations-deidentify-templates-get.md', 'Projects', 'Locations Deidentify Templates Get'] -- ['projects_locations-deidentify-templates-list.md', 'Projects', 'Locations Deidentify Templates List'] -- ['projects_locations-deidentify-templates-patch.md', 'Projects', 'Locations Deidentify Templates Patch'] -- ['projects_locations-dlp-jobs-cancel.md', 'Projects', 'Locations Dlp Jobs Cancel'] -- ['projects_locations-dlp-jobs-create.md', 'Projects', 'Locations Dlp Jobs Create'] -- ['projects_locations-dlp-jobs-delete.md', 'Projects', 'Locations Dlp Jobs Delete'] -- ['projects_locations-dlp-jobs-finish.md', 'Projects', 'Locations Dlp Jobs Finish'] -- ['projects_locations-dlp-jobs-get.md', 'Projects', 'Locations Dlp Jobs Get'] -- ['projects_locations-dlp-jobs-hybrid-inspect.md', 'Projects', 'Locations Dlp Jobs Hybrid Inspect'] -- ['projects_locations-dlp-jobs-list.md', 'Projects', 'Locations Dlp Jobs List'] -- ['projects_locations-image-redact.md', 'Projects', 'Locations Image Redact'] -- ['projects_locations-inspect-templates-create.md', 'Projects', 'Locations Inspect Templates Create'] -- ['projects_locations-inspect-templates-delete.md', 'Projects', 'Locations Inspect Templates Delete'] -- ['projects_locations-inspect-templates-get.md', 'Projects', 'Locations Inspect Templates Get'] -- ['projects_locations-inspect-templates-list.md', 'Projects', 'Locations Inspect Templates List'] -- ['projects_locations-inspect-templates-patch.md', 'Projects', 'Locations Inspect Templates Patch'] -- ['projects_locations-job-triggers-activate.md', 'Projects', 'Locations Job Triggers Activate'] -- ['projects_locations-job-triggers-create.md', 'Projects', 'Locations Job Triggers Create'] -- ['projects_locations-job-triggers-delete.md', 'Projects', 'Locations Job Triggers Delete'] -- ['projects_locations-job-triggers-get.md', 'Projects', 'Locations Job Triggers Get'] -- ['projects_locations-job-triggers-hybrid-inspect.md', 'Projects', 'Locations Job Triggers Hybrid Inspect'] -- ['projects_locations-job-triggers-list.md', 'Projects', 'Locations Job Triggers List'] -- ['projects_locations-job-triggers-patch.md', 'Projects', 'Locations Job Triggers Patch'] -- ['projects_locations-stored-info-types-create.md', 'Projects', 'Locations Stored Info Types Create'] -- ['projects_locations-stored-info-types-delete.md', 'Projects', 'Locations Stored Info Types Delete'] -- ['projects_locations-stored-info-types-get.md', 'Projects', 'Locations Stored Info Types Get'] -- ['projects_locations-stored-info-types-list.md', 'Projects', 'Locations Stored Info Types List'] -- ['projects_locations-stored-info-types-patch.md', 'Projects', 'Locations Stored Info Types Patch'] -- ['projects_stored-info-types-create.md', 'Projects', 'Stored Info Types Create'] -- ['projects_stored-info-types-delete.md', 'Projects', 'Stored Info Types Delete'] -- ['projects_stored-info-types-get.md', 'Projects', 'Stored Info Types Get'] -- ['projects_stored-info-types-list.md', 'Projects', 'Stored Info Types List'] -- ['projects_stored-info-types-patch.md', 'Projects', 'Stored Info Types Patch'] +nav: +- Home: 'index.md' +- 'Info Types': + - 'List': 'info-types_list.md' +- 'Locations': + - 'Info Types List': 'locations_info-types-list.md' +- 'Organizations': + - 'Deidentify Templates Create': 'organizations_deidentify-templates-create.md' + - 'Deidentify Templates Delete': 'organizations_deidentify-templates-delete.md' + - 'Deidentify Templates Get': 'organizations_deidentify-templates-get.md' + - 'Deidentify Templates List': 'organizations_deidentify-templates-list.md' + - 'Deidentify Templates Patch': 'organizations_deidentify-templates-patch.md' + - 'Inspect Templates Create': 'organizations_inspect-templates-create.md' + - 'Inspect Templates Delete': 'organizations_inspect-templates-delete.md' + - 'Inspect Templates Get': 'organizations_inspect-templates-get.md' + - 'Inspect Templates List': 'organizations_inspect-templates-list.md' + - 'Inspect Templates Patch': 'organizations_inspect-templates-patch.md' + - 'Locations Deidentify Templates Create': 'organizations_locations-deidentify-templates-create.md' + - 'Locations Deidentify Templates Delete': 'organizations_locations-deidentify-templates-delete.md' + - 'Locations Deidentify Templates Get': 'organizations_locations-deidentify-templates-get.md' + - 'Locations Deidentify Templates List': 'organizations_locations-deidentify-templates-list.md' + - 'Locations Deidentify Templates Patch': 'organizations_locations-deidentify-templates-patch.md' + - 'Locations Dlp Jobs List': 'organizations_locations-dlp-jobs-list.md' + - 'Locations Inspect Templates Create': 'organizations_locations-inspect-templates-create.md' + - 'Locations Inspect Templates Delete': 'organizations_locations-inspect-templates-delete.md' + - 'Locations Inspect Templates Get': 'organizations_locations-inspect-templates-get.md' + - 'Locations Inspect Templates List': 'organizations_locations-inspect-templates-list.md' + - 'Locations Inspect Templates Patch': 'organizations_locations-inspect-templates-patch.md' + - 'Locations Job Triggers Create': 'organizations_locations-job-triggers-create.md' + - 'Locations Job Triggers Delete': 'organizations_locations-job-triggers-delete.md' + - 'Locations Job Triggers Get': 'organizations_locations-job-triggers-get.md' + - 'Locations Job Triggers List': 'organizations_locations-job-triggers-list.md' + - 'Locations Job Triggers Patch': 'organizations_locations-job-triggers-patch.md' + - 'Locations Stored Info Types Create': 'organizations_locations-stored-info-types-create.md' + - 'Locations Stored Info Types Delete': 'organizations_locations-stored-info-types-delete.md' + - 'Locations Stored Info Types Get': 'organizations_locations-stored-info-types-get.md' + - 'Locations Stored Info Types List': 'organizations_locations-stored-info-types-list.md' + - 'Locations Stored Info Types Patch': 'organizations_locations-stored-info-types-patch.md' + - 'Stored Info Types Create': 'organizations_stored-info-types-create.md' + - 'Stored Info Types Delete': 'organizations_stored-info-types-delete.md' + - 'Stored Info Types Get': 'organizations_stored-info-types-get.md' + - 'Stored Info Types List': 'organizations_stored-info-types-list.md' + - 'Stored Info Types Patch': 'organizations_stored-info-types-patch.md' +- 'Projects': + - 'Content Deidentify': 'projects_content-deidentify.md' + - 'Content Inspect': 'projects_content-inspect.md' + - 'Content Reidentify': 'projects_content-reidentify.md' + - 'Deidentify Templates Create': 'projects_deidentify-templates-create.md' + - 'Deidentify Templates Delete': 'projects_deidentify-templates-delete.md' + - 'Deidentify Templates Get': 'projects_deidentify-templates-get.md' + - 'Deidentify Templates List': 'projects_deidentify-templates-list.md' + - 'Deidentify Templates Patch': 'projects_deidentify-templates-patch.md' + - 'Dlp Jobs Cancel': 'projects_dlp-jobs-cancel.md' + - 'Dlp Jobs Create': 'projects_dlp-jobs-create.md' + - 'Dlp Jobs Delete': 'projects_dlp-jobs-delete.md' + - 'Dlp Jobs Get': 'projects_dlp-jobs-get.md' + - 'Dlp Jobs List': 'projects_dlp-jobs-list.md' + - 'Image Redact': 'projects_image-redact.md' + - 'Inspect Templates Create': 'projects_inspect-templates-create.md' + - 'Inspect Templates Delete': 'projects_inspect-templates-delete.md' + - 'Inspect Templates Get': 'projects_inspect-templates-get.md' + - 'Inspect Templates List': 'projects_inspect-templates-list.md' + - 'Inspect Templates Patch': 'projects_inspect-templates-patch.md' + - 'Job Triggers Activate': 'projects_job-triggers-activate.md' + - 'Job Triggers Create': 'projects_job-triggers-create.md' + - 'Job Triggers Delete': 'projects_job-triggers-delete.md' + - 'Job Triggers Get': 'projects_job-triggers-get.md' + - 'Job Triggers List': 'projects_job-triggers-list.md' + - 'Job Triggers Patch': 'projects_job-triggers-patch.md' + - 'Locations Content Deidentify': 'projects_locations-content-deidentify.md' + - 'Locations Content Inspect': 'projects_locations-content-inspect.md' + - 'Locations Content Reidentify': 'projects_locations-content-reidentify.md' + - 'Locations Deidentify Templates Create': 'projects_locations-deidentify-templates-create.md' + - 'Locations Deidentify Templates Delete': 'projects_locations-deidentify-templates-delete.md' + - 'Locations Deidentify Templates Get': 'projects_locations-deidentify-templates-get.md' + - 'Locations Deidentify Templates List': 'projects_locations-deidentify-templates-list.md' + - 'Locations Deidentify Templates Patch': 'projects_locations-deidentify-templates-patch.md' + - 'Locations Dlp Jobs Cancel': 'projects_locations-dlp-jobs-cancel.md' + - 'Locations Dlp Jobs Create': 'projects_locations-dlp-jobs-create.md' + - 'Locations Dlp Jobs Delete': 'projects_locations-dlp-jobs-delete.md' + - 'Locations Dlp Jobs Finish': 'projects_locations-dlp-jobs-finish.md' + - 'Locations Dlp Jobs Get': 'projects_locations-dlp-jobs-get.md' + - 'Locations Dlp Jobs Hybrid Inspect': 'projects_locations-dlp-jobs-hybrid-inspect.md' + - 'Locations Dlp Jobs List': 'projects_locations-dlp-jobs-list.md' + - 'Locations Image Redact': 'projects_locations-image-redact.md' + - 'Locations Inspect Templates Create': 'projects_locations-inspect-templates-create.md' + - 'Locations Inspect Templates Delete': 'projects_locations-inspect-templates-delete.md' + - 'Locations Inspect Templates Get': 'projects_locations-inspect-templates-get.md' + - 'Locations Inspect Templates List': 'projects_locations-inspect-templates-list.md' + - 'Locations Inspect Templates Patch': 'projects_locations-inspect-templates-patch.md' + - 'Locations Job Triggers Activate': 'projects_locations-job-triggers-activate.md' + - 'Locations Job Triggers Create': 'projects_locations-job-triggers-create.md' + - 'Locations Job Triggers Delete': 'projects_locations-job-triggers-delete.md' + - 'Locations Job Triggers Get': 'projects_locations-job-triggers-get.md' + - 'Locations Job Triggers Hybrid Inspect': 'projects_locations-job-triggers-hybrid-inspect.md' + - 'Locations Job Triggers List': 'projects_locations-job-triggers-list.md' + - 'Locations Job Triggers Patch': 'projects_locations-job-triggers-patch.md' + - 'Locations Stored Info Types Create': 'projects_locations-stored-info-types-create.md' + - 'Locations Stored Info Types Delete': 'projects_locations-stored-info-types-delete.md' + - 'Locations Stored Info Types Get': 'projects_locations-stored-info-types-get.md' + - 'Locations Stored Info Types List': 'projects_locations-stored-info-types-list.md' + - 'Locations Stored Info Types Patch': 'projects_locations-stored-info-types-patch.md' + - 'Stored Info Types Create': 'projects_stored-info-types-create.md' + - 'Stored Info Types Delete': 'projects_stored-info-types-delete.md' + - 'Stored Info Types Get': 'projects_stored-info-types-get.md' + - 'Stored Info Types List': 'projects_stored-info-types-list.md' + - 'Stored Info Types Patch': 'projects_stored-info-types-patch.md' theme: readthedocs diff --git a/gen/dlp2-cli/src/client.rs b/gen/dlp2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dlp2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dlp2-cli/src/main.rs b/gen/dlp2-cli/src/main.rs index e002117fee..7ea8eccb1e 100644 --- a/gen/dlp2-cli/src/main.rs +++ b/gen/dlp2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dlp2::{api, Error, oauth2}; +use google_dlp2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -383,7 +382,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -739,7 +738,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1095,7 +1094,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1253,7 +1252,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1522,7 +1521,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1915,7 +1914,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2309,7 +2308,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2673,7 +2672,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3317,7 +3316,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3790,7 +3789,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4153,7 +4152,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4630,7 +4629,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -5304,7 +5303,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -5958,7 +5957,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -6321,7 +6320,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -6895,7 +6894,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -7289,7 +7288,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -7653,7 +7652,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -8220,7 +8219,7 @@ async fn main() { let arg_data = [ ("info-types", "methods: 'list'", vec![ ("list", - Some(r##"Returns a list of the sensitive information types that the DLP API supports. See https://cloud.google.com/dlp/docs/infotypes-reference to learn more."##), + Some(r##"Returns a list of the sensitive information types that DLP API supports. See https://cloud.google.com/dlp/docs/infotypes-reference to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/info-types_list", vec![ (Some(r##"v"##), @@ -8239,7 +8238,7 @@ async fn main() { ("locations", "methods: 'info-types-list'", vec![ ("info-types-list", - Some(r##"Returns a list of the sensitive information types that the DLP API supports. See https://cloud.google.com/dlp/docs/infotypes-reference to learn more."##), + Some(r##"Returns a list of the sensitive information types that DLP API supports. See https://cloud.google.com/dlp/docs/infotypes-reference to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/locations_info-types-list", vec![ (Some(r##"parent"##), @@ -8264,7 +8263,7 @@ async fn main() { ("organizations", "methods: 'deidentify-templates-create', 'deidentify-templates-delete', 'deidentify-templates-get', 'deidentify-templates-list', 'deidentify-templates-patch', 'inspect-templates-create', 'inspect-templates-delete', 'inspect-templates-get', 'inspect-templates-list', 'inspect-templates-patch', 'locations-deidentify-templates-create', 'locations-deidentify-templates-delete', 'locations-deidentify-templates-get', 'locations-deidentify-templates-list', 'locations-deidentify-templates-patch', 'locations-dlp-jobs-list', 'locations-inspect-templates-create', 'locations-inspect-templates-delete', 'locations-inspect-templates-get', 'locations-inspect-templates-list', 'locations-inspect-templates-patch', 'locations-job-triggers-create', 'locations-job-triggers-delete', 'locations-job-triggers-get', 'locations-job-triggers-list', 'locations-job-triggers-patch', 'locations-stored-info-types-create', 'locations-stored-info-types-delete', 'locations-stored-info-types-get', 'locations-stored-info-types-list', 'locations-stored-info-types-patch', 'stored-info-types-create', 'stored-info-types-delete', 'stored-info-types-get', 'stored-info-types-list' and 'stored-info-types-patch'", vec![ ("deidentify-templates-create", - Some(r##"Creates a DeidentifyTemplate for re-using frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), + Some(r##"Creates a DeidentifyTemplate for reusing frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/organizations_deidentify-templates-create", vec![ (Some(r##"parent"##), @@ -8386,7 +8385,7 @@ async fn main() { Some(false)), ]), ("inspect-templates-create", - Some(r##"Creates an InspectTemplate for re-using frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), + Some(r##"Creates an InspectTemplate for reusing frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/organizations_inspect-templates-create", vec![ (Some(r##"parent"##), @@ -8508,7 +8507,7 @@ async fn main() { Some(false)), ]), ("locations-deidentify-templates-create", - Some(r##"Creates a DeidentifyTemplate for re-using frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), + Some(r##"Creates a DeidentifyTemplate for reusing frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/organizations_locations-deidentify-templates-create", vec![ (Some(r##"parent"##), @@ -8652,7 +8651,7 @@ async fn main() { Some(false)), ]), ("locations-inspect-templates-create", - Some(r##"Creates an InspectTemplate for re-using frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), + Some(r##"Creates an InspectTemplate for reusing frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/organizations_locations-inspect-templates-create", vec![ (Some(r##"parent"##), @@ -8973,7 +8972,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID + Organizations scope, location specified: `organizations/`ORG_ID`/locations/`LOCATION_ID + Organizations scope, no location specified (defaults to global): `organizations/`ORG_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), + Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), Some(true), Some(false)), @@ -9095,7 +9094,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID + Organizations scope, location specified: `organizations/`ORG_ID`/locations/`LOCATION_ID + Organizations scope, no location specified (defaults to global): `organizations/`ORG_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), + Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), Some(true), Some(false)), @@ -9227,7 +9226,7 @@ async fn main() { Some(false)), ]), ("deidentify-templates-create", - Some(r##"Creates a DeidentifyTemplate for re-using frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), + Some(r##"Creates a DeidentifyTemplate for reusing frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/projects_deidentify-templates-create", vec![ (Some(r##"parent"##), @@ -9405,7 +9404,7 @@ async fn main() { Some(false)), ]), ("dlp-jobs-delete", - Some(r##"Deletes a long-running DlpJob. This method indicates that the client is no longer interested in the DlpJob result. The job will be cancelled if possible. See https://cloud.google.com/dlp/docs/inspecting-storage and https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more."##), + Some(r##"Deletes a long-running DlpJob. This method indicates that the client is no longer interested in the DlpJob result. The job will be canceled if possible. See https://cloud.google.com/dlp/docs/inspecting-storage and https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/projects_dlp-jobs-delete", vec![ (Some(r##"name"##), @@ -9499,7 +9498,7 @@ async fn main() { Some(false)), ]), ("inspect-templates-create", - Some(r##"Creates an InspectTemplate for re-using frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), + Some(r##"Creates an InspectTemplate for reusing frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/projects_inspect-templates-create", vec![ (Some(r##"parent"##), @@ -9855,7 +9854,7 @@ async fn main() { Some(false)), ]), ("locations-deidentify-templates-create", - Some(r##"Creates a DeidentifyTemplate for re-using frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), + Some(r##"Creates a DeidentifyTemplate for reusing frequently used configuration for de-identifying content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates-deid to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/projects_locations-deidentify-templates-create", vec![ (Some(r##"parent"##), @@ -10033,7 +10032,7 @@ async fn main() { Some(false)), ]), ("locations-dlp-jobs-delete", - Some(r##"Deletes a long-running DlpJob. This method indicates that the client is no longer interested in the DlpJob result. The job will be cancelled if possible. See https://cloud.google.com/dlp/docs/inspecting-storage and https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more."##), + Some(r##"Deletes a long-running DlpJob. This method indicates that the client is no longer interested in the DlpJob result. The job will be canceled if possible. See https://cloud.google.com/dlp/docs/inspecting-storage and https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/projects_locations-dlp-jobs-delete", vec![ (Some(r##"name"##), @@ -10183,7 +10182,7 @@ async fn main() { Some(false)), ]), ("locations-inspect-templates-create", - Some(r##"Creates an InspectTemplate for re-using frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), + Some(r##"Creates an InspectTemplate for reusing frequently used configuration for inspecting content, images, and storage. See https://cloud.google.com/dlp/docs/creating-templates to learn more."##), "Details at http://byron.github.io/google-apis-rs/google_dlp2_cli/projects_locations-inspect-templates-create", vec![ (Some(r##"parent"##), @@ -10560,7 +10559,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID + Organizations scope, location specified: `organizations/`ORG_ID`/locations/`LOCATION_ID + Organizations scope, no location specified (defaults to global): `organizations/`ORG_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), + Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), Some(true), Some(false)), @@ -10682,7 +10681,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID + Organizations scope, location specified: `organizations/`ORG_ID`/locations/`LOCATION_ID + Organizations scope, no location specified (defaults to global): `organizations/`ORG_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), + Some(r##"Required. Parent resource name. The format of this value varies depending on the scope of the request (project or organization) and whether you have [specified a processing location](https://cloud.google.com/dlp/docs/specifying-location): + Projects scope, location specified: `projects/`PROJECT_ID`/locations/`LOCATION_ID + Projects scope, no location specified (defaults to global): `projects/`PROJECT_ID The following example `parent` string specifies a parent project with the identifier `example-project`, and specifies the `europe-west3` location for processing data: parent=projects/example-project/locations/europe-west3"##), Some(true), Some(false)), @@ -10732,7 +10731,7 @@ async fn main() { let mut app = App::new("dlp2") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230121") .about("Provides methods for detection, risk analysis, and de-identification of privacy-sensitive fragments in text, images, and Google Cloud Platform storage repositories.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dlp2_cli") .arg(Arg::with_name("url") diff --git a/gen/dlp2/Cargo.toml b/gen/dlp2/Cargo.toml index 8537c47e84..f6642ff1dc 100644 --- a/gen/dlp2/Cargo.toml +++ b/gen/dlp2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dlp2" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with DLP (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dlp2" homepage = "https://cloud.google.com/dlp/docs/" -documentation = "https://docs.rs/google-dlp2/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-dlp2/5.0.2+20230121" license = "MIT" keywords = ["dlp", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dlp2/README.md b/gen/dlp2/README.md index 5e5baed123..dc555154b0 100644 --- a/gen/dlp2/README.md +++ b/gen/dlp2/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-dlp2` library allows access to all features of the *Google DLP* service. -This documentation was generated from *DLP* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *dlp:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *DLP* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *dlp:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *DLP* *v2* API can be found at the [official documentation site](https://cloud.google.com/dlp/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/DLP) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/DLP) ... * info types - * [*list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::InfoTypeListCall) + * [*list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::InfoTypeListCall) * locations - * [*info types list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::LocationInfoTypeListCall) + * [*info types list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::LocationInfoTypeListCall) * organizations - * [*deidentify templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationDeidentifyTemplateCreateCall), [*deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationDeidentifyTemplateDeleteCall), [*deidentify templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationDeidentifyTemplateGetCall), [*deidentify templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationDeidentifyTemplateListCall), [*deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationDeidentifyTemplatePatchCall), [*inspect templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationInspectTemplateCreateCall), [*inspect templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationInspectTemplateDeleteCall), [*inspect templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationInspectTemplateGetCall), [*inspect templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationInspectTemplateListCall), [*inspect templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationInspectTemplatePatchCall), [*locations deidentify templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateCreateCall), [*locations deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateDeleteCall), [*locations deidentify templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateGetCall), [*locations deidentify templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateListCall), [*locations deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplatePatchCall), [*locations dlp jobs list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationDlpJobListCall), [*locations inspect templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationInspectTemplateCreateCall), [*locations inspect templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationInspectTemplateDeleteCall), [*locations inspect templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationInspectTemplateGetCall), [*locations inspect templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationInspectTemplateListCall), [*locations inspect templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationInspectTemplatePatchCall), [*locations job triggers create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationJobTriggerCreateCall), [*locations job triggers delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationJobTriggerDeleteCall), [*locations job triggers get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationJobTriggerGetCall), [*locations job triggers list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationJobTriggerListCall), [*locations job triggers patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationJobTriggerPatchCall), [*locations stored info types create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeCreateCall), [*locations stored info types delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeDeleteCall), [*locations stored info types get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeGetCall), [*locations stored info types list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeListCall), [*locations stored info types patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypePatchCall), [*stored info types create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationStoredInfoTypeCreateCall), [*stored info types delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationStoredInfoTypeDeleteCall), [*stored info types get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationStoredInfoTypeGetCall), [*stored info types list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationStoredInfoTypeListCall) and [*stored info types patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::OrganizationStoredInfoTypePatchCall) + * [*deidentify templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationDeidentifyTemplateCreateCall), [*deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationDeidentifyTemplateDeleteCall), [*deidentify templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationDeidentifyTemplateGetCall), [*deidentify templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationDeidentifyTemplateListCall), [*deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationDeidentifyTemplatePatchCall), [*inspect templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationInspectTemplateCreateCall), [*inspect templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationInspectTemplateDeleteCall), [*inspect templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationInspectTemplateGetCall), [*inspect templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationInspectTemplateListCall), [*inspect templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationInspectTemplatePatchCall), [*locations deidentify templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateCreateCall), [*locations deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateDeleteCall), [*locations deidentify templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateGetCall), [*locations deidentify templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplateListCall), [*locations deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationDeidentifyTemplatePatchCall), [*locations dlp jobs list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationDlpJobListCall), [*locations inspect templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationInspectTemplateCreateCall), [*locations inspect templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationInspectTemplateDeleteCall), [*locations inspect templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationInspectTemplateGetCall), [*locations inspect templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationInspectTemplateListCall), [*locations inspect templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationInspectTemplatePatchCall), [*locations job triggers create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationJobTriggerCreateCall), [*locations job triggers delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationJobTriggerDeleteCall), [*locations job triggers get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationJobTriggerGetCall), [*locations job triggers list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationJobTriggerListCall), [*locations job triggers patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationJobTriggerPatchCall), [*locations stored info types create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeCreateCall), [*locations stored info types delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeDeleteCall), [*locations stored info types get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeGetCall), [*locations stored info types list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypeListCall), [*locations stored info types patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationLocationStoredInfoTypePatchCall), [*stored info types create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationStoredInfoTypeCreateCall), [*stored info types delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationStoredInfoTypeDeleteCall), [*stored info types get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationStoredInfoTypeGetCall), [*stored info types list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationStoredInfoTypeListCall) and [*stored info types patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::OrganizationStoredInfoTypePatchCall) * projects - * [*content deidentify*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectContentDeidentifyCall), [*content inspect*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectContentInspectCall), [*content reidentify*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectContentReidentifyCall), [*deidentify templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDeidentifyTemplateCreateCall), [*deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDeidentifyTemplateDeleteCall), [*deidentify templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDeidentifyTemplateGetCall), [*deidentify templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDeidentifyTemplateListCall), [*deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDeidentifyTemplatePatchCall), [*dlp jobs cancel*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDlpJobCancelCall), [*dlp jobs create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDlpJobCreateCall), [*dlp jobs delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDlpJobDeleteCall), [*dlp jobs get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDlpJobGetCall), [*dlp jobs list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectDlpJobListCall), [*image redact*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectImageRedactCall), [*inspect templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectInspectTemplateCreateCall), [*inspect templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectInspectTemplateDeleteCall), [*inspect templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectInspectTemplateGetCall), [*inspect templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectInspectTemplateListCall), [*inspect templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectInspectTemplatePatchCall), [*job triggers activate*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectJobTriggerActivateCall), [*job triggers create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectJobTriggerCreateCall), [*job triggers delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectJobTriggerDeleteCall), [*job triggers get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectJobTriggerGetCall), [*job triggers list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectJobTriggerListCall), [*job triggers patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectJobTriggerPatchCall), [*locations content deidentify*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationContentDeidentifyCall), [*locations content inspect*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationContentInspectCall), [*locations content reidentify*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationContentReidentifyCall), [*locations deidentify templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateCreateCall), [*locations deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateDeleteCall), [*locations deidentify templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateGetCall), [*locations deidentify templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateListCall), [*locations deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplatePatchCall), [*locations dlp jobs cancel*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDlpJobCancelCall), [*locations dlp jobs create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDlpJobCreateCall), [*locations dlp jobs delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDlpJobDeleteCall), [*locations dlp jobs finish*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDlpJobFinishCall), [*locations dlp jobs get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDlpJobGetCall), [*locations dlp jobs hybrid inspect*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDlpJobHybridInspectCall), [*locations dlp jobs list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationDlpJobListCall), [*locations image redact*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationImageRedactCall), [*locations inspect templates create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationInspectTemplateCreateCall), [*locations inspect templates delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationInspectTemplateDeleteCall), [*locations inspect templates get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationInspectTemplateGetCall), [*locations inspect templates list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationInspectTemplateListCall), [*locations inspect templates patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationInspectTemplatePatchCall), [*locations job triggers activate*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationJobTriggerActivateCall), [*locations job triggers create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationJobTriggerCreateCall), [*locations job triggers delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationJobTriggerDeleteCall), [*locations job triggers get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationJobTriggerGetCall), [*locations job triggers hybrid inspect*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationJobTriggerHybridInspectCall), [*locations job triggers list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationJobTriggerListCall), [*locations job triggers patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationJobTriggerPatchCall), [*locations stored info types create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeCreateCall), [*locations stored info types delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeDeleteCall), [*locations stored info types get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeGetCall), [*locations stored info types list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeListCall), [*locations stored info types patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectLocationStoredInfoTypePatchCall), [*stored info types create*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectStoredInfoTypeCreateCall), [*stored info types delete*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectStoredInfoTypeDeleteCall), [*stored info types get*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectStoredInfoTypeGetCall), [*stored info types list*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectStoredInfoTypeListCall) and [*stored info types patch*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/api::ProjectStoredInfoTypePatchCall) + * [*content deidentify*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectContentDeidentifyCall), [*content inspect*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectContentInspectCall), [*content reidentify*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectContentReidentifyCall), [*deidentify templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDeidentifyTemplateCreateCall), [*deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDeidentifyTemplateDeleteCall), [*deidentify templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDeidentifyTemplateGetCall), [*deidentify templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDeidentifyTemplateListCall), [*deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDeidentifyTemplatePatchCall), [*dlp jobs cancel*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDlpJobCancelCall), [*dlp jobs create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDlpJobCreateCall), [*dlp jobs delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDlpJobDeleteCall), [*dlp jobs get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDlpJobGetCall), [*dlp jobs list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectDlpJobListCall), [*image redact*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectImageRedactCall), [*inspect templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectInspectTemplateCreateCall), [*inspect templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectInspectTemplateDeleteCall), [*inspect templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectInspectTemplateGetCall), [*inspect templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectInspectTemplateListCall), [*inspect templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectInspectTemplatePatchCall), [*job triggers activate*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectJobTriggerActivateCall), [*job triggers create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectJobTriggerCreateCall), [*job triggers delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectJobTriggerDeleteCall), [*job triggers get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectJobTriggerGetCall), [*job triggers list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectJobTriggerListCall), [*job triggers patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectJobTriggerPatchCall), [*locations content deidentify*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationContentDeidentifyCall), [*locations content inspect*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationContentInspectCall), [*locations content reidentify*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationContentReidentifyCall), [*locations deidentify templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateCreateCall), [*locations deidentify templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateDeleteCall), [*locations deidentify templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateGetCall), [*locations deidentify templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplateListCall), [*locations deidentify templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDeidentifyTemplatePatchCall), [*locations dlp jobs cancel*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDlpJobCancelCall), [*locations dlp jobs create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDlpJobCreateCall), [*locations dlp jobs delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDlpJobDeleteCall), [*locations dlp jobs finish*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDlpJobFinishCall), [*locations dlp jobs get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDlpJobGetCall), [*locations dlp jobs hybrid inspect*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDlpJobHybridInspectCall), [*locations dlp jobs list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationDlpJobListCall), [*locations image redact*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationImageRedactCall), [*locations inspect templates create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationInspectTemplateCreateCall), [*locations inspect templates delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationInspectTemplateDeleteCall), [*locations inspect templates get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationInspectTemplateGetCall), [*locations inspect templates list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationInspectTemplateListCall), [*locations inspect templates patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationInspectTemplatePatchCall), [*locations job triggers activate*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationJobTriggerActivateCall), [*locations job triggers create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationJobTriggerCreateCall), [*locations job triggers delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationJobTriggerDeleteCall), [*locations job triggers get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationJobTriggerGetCall), [*locations job triggers hybrid inspect*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationJobTriggerHybridInspectCall), [*locations job triggers list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationJobTriggerListCall), [*locations job triggers patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationJobTriggerPatchCall), [*locations stored info types create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeCreateCall), [*locations stored info types delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeDeleteCall), [*locations stored info types get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeGetCall), [*locations stored info types list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationStoredInfoTypeListCall), [*locations stored info types patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectLocationStoredInfoTypePatchCall), [*stored info types create*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectStoredInfoTypeCreateCall), [*stored info types delete*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectStoredInfoTypeDeleteCall), [*stored info types get*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectStoredInfoTypeGetCall), [*stored info types list*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectStoredInfoTypeListCall) and [*stored info types patch*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/api::ProjectStoredInfoTypePatchCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/DLP)** +* **[Hub](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/DLP)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::CallBuilder) -* **[Resources](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::CallBuilder) +* **[Resources](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Part)** + * **[Parts](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -149,17 +149,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -169,29 +169,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Delegate) to the -[Method Builder](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Delegate) to the +[Method Builder](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::RequestValue) and -[decodable](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::RequestValue) and +[decodable](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dlp2/5.0.2-beta-1+20230121/google_dlp2/client::RequestValue) are moved +* [request values](https://docs.rs/google-dlp2/5.0.2+20230121/google_dlp2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dlp2/src/api.rs b/gen/dlp2/src/api.rs index 5bb2980ce0..0b60c02f94 100644 --- a/gen/dlp2/src/api.rs +++ b/gen/dlp2/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> DLP { DLP { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dlp.googleapis.com/".to_string(), _root_url: "https://dlp.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> DLP { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dlp2/src/client.rs b/gen/dlp2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dlp2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dlp2/src/lib.rs b/gen/dlp2/src/lib.rs index 0073100fd4..885a9ce0b1 100644 --- a/gen/dlp2/src/lib.rs +++ b/gen/dlp2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *DLP* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *dlp:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *DLP* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *dlp:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *DLP* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/dlp/docs/). diff --git a/gen/dlp2_beta1-cli/Cargo.toml b/gen/dlp2_beta1-cli/Cargo.toml index cb93fca7a7..d63a0df790 100644 --- a/gen/dlp2_beta1-cli/Cargo.toml +++ b/gen/dlp2_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dlp2_beta1-cli" -version = "4.0.1+20171205" +version = "5.0.2+20171205" authors = ["Sebastian Thiel "] description = "A complete library to interact with DLP (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dlp2_beta1-cli" @@ -20,13 +20,13 @@ name = "dlp2-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dlp2_beta1] path = "../dlp2_beta1" -version = "4.0.1+20171205" +version = "5.0.2+20171205" + diff --git a/gen/dlp2_beta1-cli/README.md b/gen/dlp2_beta1-cli/README.md index dd59af683d..d6a143d06f 100644 --- a/gen/dlp2_beta1-cli/README.md +++ b/gen/dlp2_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *DLP* API at revision *20171205*. The CLI is at version *4.0.1*. +This documentation was generated from the *DLP* API at revision *20171205*. The CLI is at version *5.0.2*. ```bash dlp2-beta1 [options] diff --git a/gen/dlp2_beta1-cli/mkdocs.yml b/gen/dlp2_beta1-cli/mkdocs.yml index 9746a3f325..1772e1ffaf 100644 --- a/gen/dlp2_beta1-cli/mkdocs.yml +++ b/gen/dlp2_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: DLP v4.0.1+20171205 +site_name: DLP v5.0.2+20171205 site_url: http://byron.github.io/google-apis-rs/google-dlp2_beta1-cli site_description: A complete library to interact with DLP (protocol v2beta1) @@ -7,24 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dlp2_beta1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['content_deidentify.md', 'Content', 'Deidentify'] -- ['content_inspect.md', 'Content', 'Inspect'] -- ['content_redact.md', 'Content', 'Redact'] -- ['data-source_analyze.md', 'Data Source', 'Analyze'] -- ['inspect_operations-cancel.md', 'Inspect', 'Operations Cancel'] -- ['inspect_operations-create.md', 'Inspect', 'Operations Create'] -- ['inspect_operations-delete.md', 'Inspect', 'Operations Delete'] -- ['inspect_operations-get.md', 'Inspect', 'Operations Get'] -- ['inspect_operations-list.md', 'Inspect', 'Operations List'] -- ['inspect_results-findings-list.md', 'Inspect', 'Results Findings List'] -- ['risk-analysis_operations-cancel.md', 'Risk Analysis', 'Operations Cancel'] -- ['risk-analysis_operations-delete.md', 'Risk Analysis', 'Operations Delete'] -- ['risk-analysis_operations-get.md', 'Risk Analysis', 'Operations Get'] -- ['risk-analysis_operations-list.md', 'Risk Analysis', 'Operations List'] -- ['root-categories_info-types-list.md', 'Root Categories', 'Info Types List'] -- ['root-categories_list.md', 'Root Categories', 'List'] +nav: +- Home: 'index.md' +- 'Content': + - 'Deidentify': 'content_deidentify.md' + - 'Inspect': 'content_inspect.md' + - 'Redact': 'content_redact.md' +- 'Data Source': + - 'Analyze': 'data-source_analyze.md' +- 'Inspect': + - 'Operations Cancel': 'inspect_operations-cancel.md' + - 'Operations Create': 'inspect_operations-create.md' + - 'Operations Delete': 'inspect_operations-delete.md' + - 'Operations Get': 'inspect_operations-get.md' + - 'Operations List': 'inspect_operations-list.md' + - 'Results Findings List': 'inspect_results-findings-list.md' +- 'Risk Analysis': + - 'Operations Cancel': 'risk-analysis_operations-cancel.md' + - 'Operations Delete': 'risk-analysis_operations-delete.md' + - 'Operations Get': 'risk-analysis_operations-get.md' + - 'Operations List': 'risk-analysis_operations-list.md' +- 'Root Categories': + - 'Info Types List': 'root-categories_info-types-list.md' + - 'List': 'root-categories_list.md' theme: readthedocs diff --git a/gen/dlp2_beta1-cli/src/client.rs b/gen/dlp2_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dlp2_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dlp2_beta1-cli/src/main.rs b/gen/dlp2_beta1-cli/src/main.rs index fb26aa090b..a51efa6993 100644 --- a/gen/dlp2_beta1-cli/src/main.rs +++ b/gen/dlp2_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dlp2_beta1::{api, Error, oauth2}; +use google_dlp2_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -705,7 +704,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -767,7 +766,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1017,7 +1016,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1736,7 +1735,7 @@ async fn main() { let mut app = App::new("dlp2-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20171205") + .version("5.0.2+20171205") .about("The Google Data Loss Prevention API provides methods for detection of privacy-sensitive fragments in text, images, and Google Cloud Platform storage repositories.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dlp2_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/dlp2_beta1/Cargo.toml b/gen/dlp2_beta1/Cargo.toml index 5f07e4c8bc..c3790e941f 100644 --- a/gen/dlp2_beta1/Cargo.toml +++ b/gen/dlp2_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dlp2_beta1" -version = "5.0.2-beta-1+20171205" +version = "5.0.2+20171205" authors = ["Sebastian Thiel "] description = "A complete library to interact with DLP (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dlp2_beta1" homepage = "https://cloud.google.com/dlp/docs/" -documentation = "https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205" +documentation = "https://docs.rs/google-dlp2_beta1/5.0.2+20171205" license = "MIT" keywords = ["dlp", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dlp2_beta1/README.md b/gen/dlp2_beta1/README.md index dcc58b4564..a8636edd11 100644 --- a/gen/dlp2_beta1/README.md +++ b/gen/dlp2_beta1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-dlp2_beta1` library allows access to all features of the *Google DLP* service. -This documentation was generated from *DLP* crate version *5.0.2-beta-1+20171205*, where *20171205* is the exact revision of the *dlp:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *DLP* crate version *5.0.2+20171205*, where *20171205* is the exact revision of the *dlp:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *DLP* *v2_beta1* API can be found at the [official documentation site](https://cloud.google.com/dlp/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/DLP) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/DLP) ... * content - * [*deidentify*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::ContentDeidentifyCall), [*inspect*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::ContentInspectCall) and [*redact*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::ContentRedactCall) + * [*deidentify*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::ContentDeidentifyCall), [*inspect*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::ContentInspectCall) and [*redact*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::ContentRedactCall) * data source - * [*analyze*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::DataSourceAnalyzeCall) + * [*analyze*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::DataSourceAnalyzeCall) * inspect - * [*operations cancel*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::InspectOperationCancelCall), [*operations create*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::InspectOperationCreateCall), [*operations delete*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::InspectOperationDeleteCall), [*operations get*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::InspectOperationGetCall), [*operations list*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::InspectOperationListCall) and [*results findings list*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::InspectResultFindingListCall) + * [*operations cancel*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::InspectOperationCancelCall), [*operations create*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::InspectOperationCreateCall), [*operations delete*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::InspectOperationDeleteCall), [*operations get*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::InspectOperationGetCall), [*operations list*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::InspectOperationListCall) and [*results findings list*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::InspectResultFindingListCall) * risk analysis - * [*operations cancel*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::RiskAnalysiOperationCancelCall), [*operations delete*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::RiskAnalysiOperationDeleteCall), [*operations get*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::RiskAnalysiOperationGetCall) and [*operations list*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::RiskAnalysiOperationListCall) + * [*operations cancel*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::RiskAnalysiOperationCancelCall), [*operations delete*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::RiskAnalysiOperationDeleteCall), [*operations get*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::RiskAnalysiOperationGetCall) and [*operations list*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::RiskAnalysiOperationListCall) * root categories - * [*info types list*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::RootCategoryInfoTypeListCall) and [*list*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/api::RootCategoryListCall) + * [*info types list*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::RootCategoryInfoTypeListCall) and [*list*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/api::RootCategoryListCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/DLP)** +* **[Hub](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/DLP)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Part)** + * **[Parts](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dlp2_beta1/5.0.2-beta-1+20171205/google_dlp2_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-dlp2_beta1/5.0.2+20171205/google_dlp2_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dlp2_beta1/src/api.rs b/gen/dlp2_beta1/src/api.rs index f5e58f7e05..5e58fcd559 100644 --- a/gen/dlp2_beta1/src/api.rs +++ b/gen/dlp2_beta1/src/api.rs @@ -119,7 +119,7 @@ impl<'a, S> DLP { DLP { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dlp.googleapis.com/".to_string(), _root_url: "https://dlp.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> DLP { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dlp2_beta1/src/client.rs b/gen/dlp2_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dlp2_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dlp2_beta1/src/lib.rs b/gen/dlp2_beta1/src/lib.rs index d2649b86cc..713c618bb2 100644 --- a/gen/dlp2_beta1/src/lib.rs +++ b/gen/dlp2_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *DLP* crate version *5.0.2-beta-1+20171205*, where *20171205* is the exact revision of the *dlp:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *DLP* crate version *5.0.2+20171205*, where *20171205* is the exact revision of the *dlp:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *DLP* *v2_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/dlp/docs/). diff --git a/gen/dns1-cli/Cargo.toml b/gen/dns1-cli/Cargo.toml index a10f4fdb00..2c56b5cee7 100644 --- a/gen/dns1-cli/Cargo.toml +++ b/gen/dns1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dns1-cli" -version = "4.0.1+20220217" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dns (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dns1-cli" @@ -20,13 +20,13 @@ name = "dns1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dns1] path = "../dns1" -version = "4.0.1+20220217" +version = "5.0.2+20230119" + diff --git a/gen/dns1-cli/README.md b/gen/dns1-cli/README.md index 5e8139bd59..8a430d68d9 100644 --- a/gen/dns1-cli/README.md +++ b/gen/dns1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dns* API at revision *20220217*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dns* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash dns1 [options] @@ -43,8 +43,11 @@ dns1 [options] create (-r )... [-p ]... [-o ] delete [-p ]... get [-p ]... [-o ] + get-iam-policy (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] + set-iam-policy (-r )... [-p ]... [-o ] + test-iam-permissions (-r )... [-p ]... [-o ] update (-r )... [-p ]... [-o ] policies create (-r )... [-p ]... [-o ] diff --git a/gen/dns1-cli/mkdocs.yml b/gen/dns1-cli/mkdocs.yml index fb794394ea..25c7144b39 100644 --- a/gen/dns1-cli/mkdocs.yml +++ b/gen/dns1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dns v4.0.1+20220217 +site_name: Dns v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-dns1-cli site_description: A complete library to interact with Dns (protocol v1) @@ -7,45 +7,57 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dns1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['changes_create.md', 'Changes', 'Create'] -- ['changes_get.md', 'Changes', 'Get'] -- ['changes_list.md', 'Changes', 'List'] -- ['dns-keys_get.md', 'Dns Keys', 'Get'] -- ['dns-keys_list.md', 'Dns Keys', 'List'] -- ['managed-zone-operations_get.md', 'Managed Zone Operations', 'Get'] -- ['managed-zone-operations_list.md', 'Managed Zone Operations', 'List'] -- ['managed-zones_create.md', 'Managed Zones', 'Create'] -- ['managed-zones_delete.md', 'Managed Zones', 'Delete'] -- ['managed-zones_get.md', 'Managed Zones', 'Get'] -- ['managed-zones_list.md', 'Managed Zones', 'List'] -- ['managed-zones_patch.md', 'Managed Zones', 'Patch'] -- ['managed-zones_update.md', 'Managed Zones', 'Update'] -- ['policies_create.md', 'Policies', 'Create'] -- ['policies_delete.md', 'Policies', 'Delete'] -- ['policies_get.md', 'Policies', 'Get'] -- ['policies_list.md', 'Policies', 'List'] -- ['policies_patch.md', 'Policies', 'Patch'] -- ['policies_update.md', 'Policies', 'Update'] -- ['projects_get.md', 'Projects', 'Get'] -- ['resource-record-sets_create.md', 'Resource Record Sets', 'Create'] -- ['resource-record-sets_delete.md', 'Resource Record Sets', 'Delete'] -- ['resource-record-sets_get.md', 'Resource Record Sets', 'Get'] -- ['resource-record-sets_list.md', 'Resource Record Sets', 'List'] -- ['resource-record-sets_patch.md', 'Resource Record Sets', 'Patch'] -- ['response-policies_create.md', 'Response Policies', 'Create'] -- ['response-policies_delete.md', 'Response Policies', 'Delete'] -- ['response-policies_get.md', 'Response Policies', 'Get'] -- ['response-policies_list.md', 'Response Policies', 'List'] -- ['response-policies_patch.md', 'Response Policies', 'Patch'] -- ['response-policies_update.md', 'Response Policies', 'Update'] -- ['response-policy-rules_create.md', 'Response Policy Rules', 'Create'] -- ['response-policy-rules_delete.md', 'Response Policy Rules', 'Delete'] -- ['response-policy-rules_get.md', 'Response Policy Rules', 'Get'] -- ['response-policy-rules_list.md', 'Response Policy Rules', 'List'] -- ['response-policy-rules_patch.md', 'Response Policy Rules', 'Patch'] -- ['response-policy-rules_update.md', 'Response Policy Rules', 'Update'] +nav: +- Home: 'index.md' +- 'Changes': + - 'Create': 'changes_create.md' + - 'Get': 'changes_get.md' + - 'List': 'changes_list.md' +- 'Dns Keys': + - 'Get': 'dns-keys_get.md' + - 'List': 'dns-keys_list.md' +- 'Managed Zone Operations': + - 'Get': 'managed-zone-operations_get.md' + - 'List': 'managed-zone-operations_list.md' +- 'Managed Zones': + - 'Create': 'managed-zones_create.md' + - 'Delete': 'managed-zones_delete.md' + - 'Get': 'managed-zones_get.md' + - 'Get Iam Policy': 'managed-zones_get-iam-policy.md' + - 'List': 'managed-zones_list.md' + - 'Patch': 'managed-zones_patch.md' + - 'Set Iam Policy': 'managed-zones_set-iam-policy.md' + - 'Test Iam Permissions': 'managed-zones_test-iam-permissions.md' + - 'Update': 'managed-zones_update.md' +- 'Policies': + - 'Create': 'policies_create.md' + - 'Delete': 'policies_delete.md' + - 'Get': 'policies_get.md' + - 'List': 'policies_list.md' + - 'Patch': 'policies_patch.md' + - 'Update': 'policies_update.md' +- 'Projects': + - 'Get': 'projects_get.md' +- 'Resource Record Sets': + - 'Create': 'resource-record-sets_create.md' + - 'Delete': 'resource-record-sets_delete.md' + - 'Get': 'resource-record-sets_get.md' + - 'List': 'resource-record-sets_list.md' + - 'Patch': 'resource-record-sets_patch.md' +- 'Response Policies': + - 'Create': 'response-policies_create.md' + - 'Delete': 'response-policies_delete.md' + - 'Get': 'response-policies_get.md' + - 'List': 'response-policies_list.md' + - 'Patch': 'response-policies_patch.md' + - 'Update': 'response-policies_update.md' +- 'Response Policy Rules': + - 'Create': 'response-policy-rules_create.md' + - 'Delete': 'response-policy-rules_delete.md' + - 'Get': 'response-policy-rules_get.md' + - 'List': 'response-policy-rules_list.md' + - 'Patch': 'response-policy-rules_patch.md' + - 'Update': 'response-policy-rules_update.md' theme: readthedocs diff --git a/gen/dns1-cli/src/client.rs b/gen/dns1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dns1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dns1-cli/src/main.rs b/gen/dns1-cli/src/main.rs index dfccb03653..d505387869 100644 --- a/gen/dns1-cli/src/main.rs +++ b/gen/dns1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dns1::{api, Error, oauth2}; +use google_dns1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -216,7 +215,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -334,7 +333,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "digest-type" => { call = call.digest_type(value.unwrap_or("")); @@ -455,7 +454,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -722,6 +721,91 @@ where } } + async fn _managed_zones_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "options.requested-policy-version" => Some(("options.requestedPolicyVersion", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["options", "requested-policy-version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1GetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.managed_zones().get_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _managed_zones_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.managed_zones().list(opt.value_of("project").unwrap_or("")); @@ -732,7 +816,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "dns-name" => { call = call.dns_name(value.unwrap_or("")); @@ -898,6 +982,178 @@ where } } + async fn _managed_zones_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.managed_zones().set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _managed_zones_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.managed_zones().test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _managed_zones_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1221,7 +1477,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1541,15 +1797,20 @@ where match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.geo.enable-fencing" => Some(("routingPolicy.geo.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "routing-policy.geo.kind" => Some(("routingPolicy.geo.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routing-policy.kind" => Some(("routingPolicy.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.enable-fencing" => Some(("routingPolicy.primaryBackup.backupGeoTargets.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.kind" => Some(("routingPolicy.primaryBackup.backupGeoTargets.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.kind" => Some(("routingPolicy.primaryBackup.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.trickle-traffic" => Some(("routingPolicy.primaryBackup.trickleTraffic", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "routing-policy.wrr.kind" => Some(("routingPolicy.wrr.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rrdatas" => Some(("rrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "signature-rrdatas" => Some(("signatureRrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "ttl" => Some(("ttl", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["geo", "kind", "name", "routing-policy", "rrdatas", "signature-rrdatas", "ttl", "type", "wrr"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-geo-targets", "enable-fencing", "geo", "kind", "name", "primary-backup", "routing-policy", "rrdatas", "signature-rrdatas", "trickle-traffic", "ttl", "type", "wrr"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1741,7 +2002,7 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1815,15 +2076,20 @@ where match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.geo.enable-fencing" => Some(("routingPolicy.geo.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "routing-policy.geo.kind" => Some(("routingPolicy.geo.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routing-policy.kind" => Some(("routingPolicy.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.enable-fencing" => Some(("routingPolicy.primaryBackup.backupGeoTargets.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.kind" => Some(("routingPolicy.primaryBackup.backupGeoTargets.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.kind" => Some(("routingPolicy.primaryBackup.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.trickle-traffic" => Some(("routingPolicy.primaryBackup.trickleTraffic", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "routing-policy.wrr.kind" => Some(("routingPolicy.wrr.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rrdatas" => Some(("rrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "signature-rrdatas" => Some(("signatureRrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "ttl" => Some(("ttl", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["geo", "kind", "name", "routing-policy", "rrdatas", "signature-rrdatas", "ttl", "type", "wrr"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-geo-targets", "enable-fencing", "geo", "kind", "name", "primary-backup", "routing-policy", "rrdatas", "signature-rrdatas", "trickle-traffic", "ttl", "type", "wrr"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1913,9 +2179,10 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "response-policy-name" => Some(("responsePolicyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "response-policy-name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "labels", "response-policy-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2093,7 +2360,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2168,9 +2435,10 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "response-policy-name" => Some(("responsePolicyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "response-policy-name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "labels", "response-policy-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2260,9 +2528,10 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "response-policy-name" => Some(("responsePolicyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "response-policy-name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "labels", "response-policy-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2532,7 +2801,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2826,12 +3095,21 @@ where ("get", Some(opt)) => { call_result = self._managed_zones_get(opt, dry_run, &mut err).await; }, + ("get-iam-policy", Some(opt)) => { + call_result = self._managed_zones_get_iam_policy(opt, dry_run, &mut err).await; + }, ("list", Some(opt)) => { call_result = self._managed_zones_list(opt, dry_run, &mut err).await; }, ("patch", Some(opt)) => { call_result = self._managed_zones_patch(opt, dry_run, &mut err).await; }, + ("set-iam-policy", Some(opt)) => { + call_result = self._managed_zones_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("test-iam-permissions", Some(opt)) => { + call_result = self._managed_zones_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("update", Some(opt)) => { call_result = self._managed_zones_update(opt, dry_run, &mut err).await; }, @@ -3255,7 +3533,7 @@ async fn main() { ]), ]), - ("managed-zones", "methods: 'create', 'delete', 'get', 'list', 'patch' and 'update'", vec![ + ("managed-zones", "methods: 'create', 'delete', 'get', 'get-iam-policy', 'list', 'patch', 'set-iam-policy', 'test-iam-permissions' and 'update'", vec![ ("create", Some(r##"Creates a new ManagedZone."##), "Details at http://byron.github.io/google-apis-rs/google_dns1_cli/managed-zones_create", @@ -3328,6 +3606,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_dns1_cli/managed-zones_get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3384,6 +3690,62 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_dns1_cli/managed-zones_set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this returns an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_dns1_cli/managed-zones_test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3926,7 +4288,7 @@ async fn main() { (Some(r##"response-policy"##), None, - Some(r##"User assigned name of the Respones Policy addressed by this request."##), + Some(r##"User assigned name of the response policy addressed by this request."##), Some(true), Some(false)), @@ -4195,7 +4557,7 @@ async fn main() { let mut app = App::new("dns1") .author("Sebastian Thiel ") - .version("4.0.1+20220217") + .version("5.0.2+20230119") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dns1_cli") .arg(Arg::with_name("url") diff --git a/gen/dns1/Cargo.toml b/gen/dns1/Cargo.toml index 5894b1fa3e..74ee2e32bc 100644 --- a/gen/dns1/Cargo.toml +++ b/gen/dns1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dns1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dns (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dns1" homepage = "https://cloud.google.com/dns/docs" -documentation = "https://docs.rs/google-dns1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-dns1/5.0.2+20230119" license = "MIT" keywords = ["dns", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dns1/README.md b/gen/dns1/README.md index 755af2b242..67c935a473 100644 --- a/gen/dns1/README.md +++ b/gen/dns1/README.md @@ -5,32 +5,32 @@ DO NOT EDIT ! --> The `google-dns1` library allows access to all features of the *Google Dns* service. -This documentation was generated from *Dns* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *dns:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dns* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *dns:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dns* *v1* API can be found at the [official documentation site](https://cloud.google.com/dns/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/Dns) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/Dns) ... -* [changes](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::Change) - * [*create*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ChangeCreateCall), [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ChangeGetCall) and [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ChangeListCall) -* [dns keys](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::DnsKey) - * [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::DnsKeyGetCall) and [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::DnsKeyListCall) +* [changes](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::Change) + * [*create*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ChangeCreateCall), [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ChangeGetCall) and [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ChangeListCall) +* [dns keys](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::DnsKey) + * [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::DnsKeyGetCall) and [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::DnsKeyListCall) * managed zone operations - * [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneOperationGetCall) and [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneOperationListCall) -* [managed zones](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZone) - * [*create*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneGetCall), [*get iam policy*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneGetIamPolicyCall), [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneListCall), [*patch*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZonePatchCall), [*set iam policy*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneTestIamPermissionCall) and [*update*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ManagedZoneUpdateCall) -* [policies](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::Policy) - * [*create*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::PolicyCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::PolicyDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::PolicyGetCall), [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::PolicyListCall), [*patch*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::PolicyPatchCall) and [*update*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::PolicyUpdateCall) -* [projects](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::Project) - * [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ProjectGetCall) -* [resource record sets](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResourceRecordSet) - * [*create*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResourceRecordSetCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResourceRecordSetDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResourceRecordSetGetCall), [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResourceRecordSetListCall) and [*patch*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResourceRecordSetPatchCall) -* [response policies](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicy) - * [*create*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyGetCall), [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyListCall), [*patch*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyPatchCall) and [*update*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyUpdateCall) -* [response policy rules](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyRule) - * [*create*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyRuleCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyRuleDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyRuleGetCall), [*list*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyRuleListCall), [*patch*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyRulePatchCall) and [*update*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/api::ResponsePolicyRuleUpdateCall) + * [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneOperationGetCall) and [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneOperationListCall) +* [managed zones](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZone) + * [*create*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneGetCall), [*get iam policy*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneGetIamPolicyCall), [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneListCall), [*patch*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZonePatchCall), [*set iam policy*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneTestIamPermissionCall) and [*update*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ManagedZoneUpdateCall) +* [policies](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::Policy) + * [*create*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::PolicyCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::PolicyDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::PolicyGetCall), [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::PolicyListCall), [*patch*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::PolicyPatchCall) and [*update*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::PolicyUpdateCall) +* [projects](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::Project) + * [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ProjectGetCall) +* [resource record sets](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResourceRecordSet) + * [*create*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResourceRecordSetCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResourceRecordSetDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResourceRecordSetGetCall), [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResourceRecordSetListCall) and [*patch*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResourceRecordSetPatchCall) +* [response policies](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicy) + * [*create*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyGetCall), [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyListCall), [*patch*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyPatchCall) and [*update*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyUpdateCall) +* [response policy rules](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyRule) + * [*create*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyRuleCreateCall), [*delete*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyRuleDeleteCall), [*get*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyRuleGetCall), [*list*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyRuleListCall), [*patch*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyRulePatchCall) and [*update*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/api::ResponsePolicyRuleUpdateCall) @@ -39,17 +39,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/Dns)** +* **[Hub](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/Dns)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::CallBuilder) -* **[Resources](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::CallBuilder) +* **[Resources](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Part)** + * **[Parts](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -145,17 +145,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -165,29 +165,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Delegate) to the -[Method Builder](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Delegate) to the +[Method Builder](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::RequestValue) and -[decodable](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::RequestValue) and +[decodable](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dns1/5.0.2-beta-1+20230119/google_dns1/client::RequestValue) are moved +* [request values](https://docs.rs/google-dns1/5.0.2+20230119/google_dns1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dns1/src/api.rs b/gen/dns1/src/api.rs index c5c3b619a2..54f4d5c17e 100644 --- a/gen/dns1/src/api.rs +++ b/gen/dns1/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> Dns { Dns { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dns.googleapis.com/".to_string(), _root_url: "https://dns.googleapis.com/".to_string(), } @@ -169,7 +169,7 @@ impl<'a, S> Dns { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dns1/src/client.rs b/gen/dns1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dns1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dns1/src/lib.rs b/gen/dns1/src/lib.rs index 60a9f917f3..09c8294b93 100644 --- a/gen/dns1/src/lib.rs +++ b/gen/dns1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dns* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *dns:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dns* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *dns:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dns* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/dns/docs). diff --git a/gen/dns2-cli/Cargo.toml b/gen/dns2-cli/Cargo.toml index e8c1a6cdbc..736ee16b55 100644 --- a/gen/dns2-cli/Cargo.toml +++ b/gen/dns2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-dns2-cli" -version = "4.0.1+20220217" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dns (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dns2-cli" @@ -20,13 +20,13 @@ name = "dns2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-dns2] path = "../dns2" -version = "4.0.1+20220217" +version = "5.0.2+20230119" + diff --git a/gen/dns2-cli/README.md b/gen/dns2-cli/README.md index 02b69cb9e1..67d5c29ae8 100644 --- a/gen/dns2-cli/README.md +++ b/gen/dns2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dns* API at revision *20220217*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dns* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash dns2 [options] @@ -43,8 +43,11 @@ dns2 [options] create (-r )... [-p ]... [-o ] delete [-p ]... get [-p ]... [-o ] + get-iam-policy (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] + set-iam-policy (-r )... [-p ]... [-o ] + test-iam-permissions (-r )... [-p ]... [-o ] update (-r )... [-p ]... [-o ] policies create (-r )... [-p ]... [-o ] diff --git a/gen/dns2-cli/mkdocs.yml b/gen/dns2-cli/mkdocs.yml index 6bfa60f504..f7563f52a2 100644 --- a/gen/dns2-cli/mkdocs.yml +++ b/gen/dns2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dns v4.0.1+20220217 +site_name: Dns v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-dns2-cli site_description: A complete library to interact with Dns (protocol v2) @@ -7,45 +7,57 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/dns2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['changes_create.md', 'Changes', 'Create'] -- ['changes_get.md', 'Changes', 'Get'] -- ['changes_list.md', 'Changes', 'List'] -- ['dns-keys_get.md', 'Dns Keys', 'Get'] -- ['dns-keys_list.md', 'Dns Keys', 'List'] -- ['managed-zone-operations_get.md', 'Managed Zone Operations', 'Get'] -- ['managed-zone-operations_list.md', 'Managed Zone Operations', 'List'] -- ['managed-zones_create.md', 'Managed Zones', 'Create'] -- ['managed-zones_delete.md', 'Managed Zones', 'Delete'] -- ['managed-zones_get.md', 'Managed Zones', 'Get'] -- ['managed-zones_list.md', 'Managed Zones', 'List'] -- ['managed-zones_patch.md', 'Managed Zones', 'Patch'] -- ['managed-zones_update.md', 'Managed Zones', 'Update'] -- ['policies_create.md', 'Policies', 'Create'] -- ['policies_delete.md', 'Policies', 'Delete'] -- ['policies_get.md', 'Policies', 'Get'] -- ['policies_list.md', 'Policies', 'List'] -- ['policies_patch.md', 'Policies', 'Patch'] -- ['policies_update.md', 'Policies', 'Update'] -- ['projects_get.md', 'Projects', 'Get'] -- ['resource-record-sets_create.md', 'Resource Record Sets', 'Create'] -- ['resource-record-sets_delete.md', 'Resource Record Sets', 'Delete'] -- ['resource-record-sets_get.md', 'Resource Record Sets', 'Get'] -- ['resource-record-sets_list.md', 'Resource Record Sets', 'List'] -- ['resource-record-sets_patch.md', 'Resource Record Sets', 'Patch'] -- ['response-policies_create.md', 'Response Policies', 'Create'] -- ['response-policies_delete.md', 'Response Policies', 'Delete'] -- ['response-policies_get.md', 'Response Policies', 'Get'] -- ['response-policies_list.md', 'Response Policies', 'List'] -- ['response-policies_patch.md', 'Response Policies', 'Patch'] -- ['response-policies_update.md', 'Response Policies', 'Update'] -- ['response-policy-rules_create.md', 'Response Policy Rules', 'Create'] -- ['response-policy-rules_delete.md', 'Response Policy Rules', 'Delete'] -- ['response-policy-rules_get.md', 'Response Policy Rules', 'Get'] -- ['response-policy-rules_list.md', 'Response Policy Rules', 'List'] -- ['response-policy-rules_patch.md', 'Response Policy Rules', 'Patch'] -- ['response-policy-rules_update.md', 'Response Policy Rules', 'Update'] +nav: +- Home: 'index.md' +- 'Changes': + - 'Create': 'changes_create.md' + - 'Get': 'changes_get.md' + - 'List': 'changes_list.md' +- 'Dns Keys': + - 'Get': 'dns-keys_get.md' + - 'List': 'dns-keys_list.md' +- 'Managed Zone Operations': + - 'Get': 'managed-zone-operations_get.md' + - 'List': 'managed-zone-operations_list.md' +- 'Managed Zones': + - 'Create': 'managed-zones_create.md' + - 'Delete': 'managed-zones_delete.md' + - 'Get': 'managed-zones_get.md' + - 'Get Iam Policy': 'managed-zones_get-iam-policy.md' + - 'List': 'managed-zones_list.md' + - 'Patch': 'managed-zones_patch.md' + - 'Set Iam Policy': 'managed-zones_set-iam-policy.md' + - 'Test Iam Permissions': 'managed-zones_test-iam-permissions.md' + - 'Update': 'managed-zones_update.md' +- 'Policies': + - 'Create': 'policies_create.md' + - 'Delete': 'policies_delete.md' + - 'Get': 'policies_get.md' + - 'List': 'policies_list.md' + - 'Patch': 'policies_patch.md' + - 'Update': 'policies_update.md' +- 'Projects': + - 'Get': 'projects_get.md' +- 'Resource Record Sets': + - 'Create': 'resource-record-sets_create.md' + - 'Delete': 'resource-record-sets_delete.md' + - 'Get': 'resource-record-sets_get.md' + - 'List': 'resource-record-sets_list.md' + - 'Patch': 'resource-record-sets_patch.md' +- 'Response Policies': + - 'Create': 'response-policies_create.md' + - 'Delete': 'response-policies_delete.md' + - 'Get': 'response-policies_get.md' + - 'List': 'response-policies_list.md' + - 'Patch': 'response-policies_patch.md' + - 'Update': 'response-policies_update.md' +- 'Response Policy Rules': + - 'Create': 'response-policy-rules_create.md' + - 'Delete': 'response-policy-rules_delete.md' + - 'Get': 'response-policy-rules_get.md' + - 'List': 'response-policy-rules_list.md' + - 'Patch': 'response-policy-rules_patch.md' + - 'Update': 'response-policy-rules_update.md' theme: readthedocs diff --git a/gen/dns2-cli/src/client.rs b/gen/dns2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/dns2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/dns2-cli/src/main.rs b/gen/dns2-cli/src/main.rs index 8c1f138e8f..a92f8ce48a 100644 --- a/gen/dns2-cli/src/main.rs +++ b/gen/dns2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_dns2::{api, Error, oauth2}; +use google_dns2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -216,7 +215,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -334,7 +333,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "digest-type" => { call = call.digest_type(value.unwrap_or("")); @@ -455,7 +454,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -722,6 +721,91 @@ where } } + async fn _managed_zones_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "options.requested-policy-version" => Some(("options.requestedPolicyVersion", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["options", "requested-policy-version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1GetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.managed_zones().get_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _managed_zones_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.managed_zones().list(opt.value_of("project").unwrap_or(""), opt.value_of("location").unwrap_or("")); @@ -732,7 +816,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "dns-name" => { call = call.dns_name(value.unwrap_or("")); @@ -898,6 +982,178 @@ where } } + async fn _managed_zones_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.managed_zones().set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _managed_zones_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.managed_zones().test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _managed_zones_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1221,7 +1477,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1541,15 +1797,20 @@ where match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.geo.enable-fencing" => Some(("routingPolicy.geo.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "routing-policy.geo.kind" => Some(("routingPolicy.geo.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routing-policy.kind" => Some(("routingPolicy.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.enable-fencing" => Some(("routingPolicy.primaryBackup.backupGeoTargets.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.kind" => Some(("routingPolicy.primaryBackup.backupGeoTargets.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.kind" => Some(("routingPolicy.primaryBackup.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.trickle-traffic" => Some(("routingPolicy.primaryBackup.trickleTraffic", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "routing-policy.wrr.kind" => Some(("routingPolicy.wrr.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rrdatas" => Some(("rrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "signature-rrdatas" => Some(("signatureRrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "ttl" => Some(("ttl", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["geo", "kind", "name", "routing-policy", "rrdatas", "signature-rrdatas", "ttl", "type", "wrr"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-geo-targets", "enable-fencing", "geo", "kind", "name", "primary-backup", "routing-policy", "rrdatas", "signature-rrdatas", "trickle-traffic", "ttl", "type", "wrr"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1733,7 +1994,7 @@ where call = call.name(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1807,15 +2068,20 @@ where match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.geo.enable-fencing" => Some(("routingPolicy.geo.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "routing-policy.geo.kind" => Some(("routingPolicy.geo.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "routing-policy.kind" => Some(("routingPolicy.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.enable-fencing" => Some(("routingPolicy.primaryBackup.backupGeoTargets.enableFencing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.backup-geo-targets.kind" => Some(("routingPolicy.primaryBackup.backupGeoTargets.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.kind" => Some(("routingPolicy.primaryBackup.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "routing-policy.primary-backup.trickle-traffic" => Some(("routingPolicy.primaryBackup.trickleTraffic", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "routing-policy.wrr.kind" => Some(("routingPolicy.wrr.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rrdatas" => Some(("rrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "signature-rrdatas" => Some(("signatureRrdatas", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "ttl" => Some(("ttl", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["geo", "kind", "name", "routing-policy", "rrdatas", "signature-rrdatas", "ttl", "type", "wrr"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-geo-targets", "enable-fencing", "geo", "kind", "name", "primary-backup", "routing-policy", "rrdatas", "signature-rrdatas", "trickle-traffic", "ttl", "type", "wrr"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1905,9 +2171,10 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "response-policy-name" => Some(("responsePolicyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "response-policy-name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "labels", "response-policy-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2085,7 +2352,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2160,9 +2427,10 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "response-policy-name" => Some(("responsePolicyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "response-policy-name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "labels", "response-policy-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2252,9 +2520,10 @@ where "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "response-policy-name" => Some(("responsePolicyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "response-policy-name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "id", "kind", "labels", "response-policy-name"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2524,7 +2793,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2818,12 +3087,21 @@ where ("get", Some(opt)) => { call_result = self._managed_zones_get(opt, dry_run, &mut err).await; }, + ("get-iam-policy", Some(opt)) => { + call_result = self._managed_zones_get_iam_policy(opt, dry_run, &mut err).await; + }, ("list", Some(opt)) => { call_result = self._managed_zones_list(opt, dry_run, &mut err).await; }, ("patch", Some(opt)) => { call_result = self._managed_zones_patch(opt, dry_run, &mut err).await; }, + ("set-iam-policy", Some(opt)) => { + call_result = self._managed_zones_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("test-iam-permissions", Some(opt)) => { + call_result = self._managed_zones_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("update", Some(opt)) => { call_result = self._managed_zones_update(opt, dry_run, &mut err).await; }, @@ -3289,7 +3567,7 @@ async fn main() { ]), ]), - ("managed-zones", "methods: 'create', 'delete', 'get', 'list', 'patch' and 'update'", vec![ + ("managed-zones", "methods: 'create', 'delete', 'get', 'get-iam-policy', 'list', 'patch', 'set-iam-policy', 'test-iam-permissions' and 'update'", vec![ ("create", Some(r##"Creates a new ManagedZone."##), "Details at http://byron.github.io/google-apis-rs/google_dns2_cli/managed-zones_create", @@ -3380,6 +3658,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_dns2_cli/managed-zones_get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3448,6 +3754,62 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_dns2_cli/managed-zones_set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this returns an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_dns2_cli/managed-zones_test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4092,7 +4454,7 @@ async fn main() { (Some(r##"response-policy"##), None, - Some(r##"User assigned name of the Respones Policy addressed by this request."##), + Some(r##"User assigned name of the response policy addressed by this request."##), Some(true), Some(false)), @@ -4403,7 +4765,7 @@ async fn main() { let mut app = App::new("dns2") .author("Sebastian Thiel ") - .version("4.0.1+20220217") + .version("5.0.2+20230119") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_dns2_cli") .arg(Arg::with_name("url") diff --git a/gen/dns2/Cargo.toml b/gen/dns2/Cargo.toml index de4a61fbad..75633250ba 100644 --- a/gen/dns2/Cargo.toml +++ b/gen/dns2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-dns2" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dns (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/dns2" homepage = "https://cloud.google.com/dns/docs" -documentation = "https://docs.rs/google-dns2/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-dns2/5.0.2+20230119" license = "MIT" keywords = ["dns", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/dns2/README.md b/gen/dns2/README.md index 4040659898..dc1d342163 100644 --- a/gen/dns2/README.md +++ b/gen/dns2/README.md @@ -5,32 +5,32 @@ DO NOT EDIT ! --> The `google-dns2` library allows access to all features of the *Google Dns* service. -This documentation was generated from *Dns* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *dns:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dns* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *dns:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dns* *v2* API can be found at the [official documentation site](https://cloud.google.com/dns/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/Dns) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/Dns) ... -* [changes](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::Change) - * [*create*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ChangeCreateCall), [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ChangeGetCall) and [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ChangeListCall) -* [dns keys](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::DnsKey) - * [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::DnsKeyGetCall) and [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::DnsKeyListCall) +* [changes](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::Change) + * [*create*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ChangeCreateCall), [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ChangeGetCall) and [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ChangeListCall) +* [dns keys](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::DnsKey) + * [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::DnsKeyGetCall) and [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::DnsKeyListCall) * managed zone operations - * [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneOperationGetCall) and [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneOperationListCall) -* [managed zones](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZone) - * [*create*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneGetCall), [*get iam policy*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneGetIamPolicyCall), [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneListCall), [*patch*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZonePatchCall), [*set iam policy*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneTestIamPermissionCall) and [*update*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ManagedZoneUpdateCall) -* [policies](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::Policy) - * [*create*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::PolicyCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::PolicyDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::PolicyGetCall), [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::PolicyListCall), [*patch*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::PolicyPatchCall) and [*update*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::PolicyUpdateCall) -* [projects](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::Project) - * [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ProjectGetCall) -* [resource record sets](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResourceRecordSet) - * [*create*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResourceRecordSetCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResourceRecordSetDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResourceRecordSetGetCall), [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResourceRecordSetListCall) and [*patch*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResourceRecordSetPatchCall) -* [response policies](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicy) - * [*create*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyGetCall), [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyListCall), [*patch*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyPatchCall) and [*update*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyUpdateCall) -* [response policy rules](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyRule) - * [*create*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyRuleCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyRuleDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyRuleGetCall), [*list*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyRuleListCall), [*patch*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyRulePatchCall) and [*update*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/api::ResponsePolicyRuleUpdateCall) + * [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneOperationGetCall) and [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneOperationListCall) +* [managed zones](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZone) + * [*create*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneGetCall), [*get iam policy*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneGetIamPolicyCall), [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneListCall), [*patch*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZonePatchCall), [*set iam policy*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneTestIamPermissionCall) and [*update*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ManagedZoneUpdateCall) +* [policies](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::Policy) + * [*create*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::PolicyCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::PolicyDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::PolicyGetCall), [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::PolicyListCall), [*patch*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::PolicyPatchCall) and [*update*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::PolicyUpdateCall) +* [projects](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::Project) + * [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ProjectGetCall) +* [resource record sets](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResourceRecordSet) + * [*create*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResourceRecordSetCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResourceRecordSetDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResourceRecordSetGetCall), [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResourceRecordSetListCall) and [*patch*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResourceRecordSetPatchCall) +* [response policies](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicy) + * [*create*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyGetCall), [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyListCall), [*patch*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyPatchCall) and [*update*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyUpdateCall) +* [response policy rules](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyRule) + * [*create*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyRuleCreateCall), [*delete*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyRuleDeleteCall), [*get*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyRuleGetCall), [*list*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyRuleListCall), [*patch*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyRulePatchCall) and [*update*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/api::ResponsePolicyRuleUpdateCall) @@ -39,17 +39,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/Dns)** +* **[Hub](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/Dns)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::CallBuilder) -* **[Resources](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::CallBuilder) +* **[Resources](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Part)** + * **[Parts](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -145,17 +145,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -165,29 +165,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Delegate) to the -[Method Builder](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Delegate) to the +[Method Builder](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::RequestValue) and -[decodable](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::RequestValue) and +[decodable](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-dns2/5.0.2-beta-1+20230119/google_dns2/client::RequestValue) are moved +* [request values](https://docs.rs/google-dns2/5.0.2+20230119/google_dns2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/dns2/src/api.rs b/gen/dns2/src/api.rs index ea3fdde944..e5fe0c372c 100644 --- a/gen/dns2/src/api.rs +++ b/gen/dns2/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> Dns { Dns { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://dns.googleapis.com/".to_string(), _root_url: "https://dns.googleapis.com/".to_string(), } @@ -169,7 +169,7 @@ impl<'a, S> Dns { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/dns2/src/client.rs b/gen/dns2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/dns2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/dns2/src/lib.rs b/gen/dns2/src/lib.rs index cdf712d84c..018af7e336 100644 --- a/gen/dns2/src/lib.rs +++ b/gen/dns2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dns* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *dns:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dns* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *dns:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dns* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/dns/docs). diff --git a/gen/docs1-cli/Cargo.toml b/gen/docs1-cli/Cargo.toml index 60460f3879..7672088874 100644 --- a/gen/docs1-cli/Cargo.toml +++ b/gen/docs1-cli/Cargo.toml @@ -1,10 +1,10 @@ # DO NOT EDIT ! -# This file was generated automatically from 'src/mako/Cargo.toml.mako' +# This file was generated automatically from 'src/generator/templates/Cargo.toml.mako' # DO NOT EDIT ! [package] name = "google-docs1-cli" -version = "3.1.0+20220301" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Docs (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/docs1-cli" @@ -20,21 +20,23 @@ name = "docs1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 6.6" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" +http = "^0.2" hyper = { version = "0.14", features = ["full"] } tokio = { version = "^ 1.0", features = ["full"] } - +tower-service = "^0.3.1" [dependencies.google-docs1] path = "../docs1" -version = "3.1.0+20220301" +version = "5.0.2+20230119" + diff --git a/gen/docs1-cli/LICENSE.md b/gen/docs1-cli/LICENSE.md index d2306e20eb..63756554ea 100644 --- a/gen/docs1-cli/LICENSE.md +++ b/gen/docs1-cli/LICENSE.md @@ -1,6 +1,6 @@ The MIT License (MIT) diff --git a/gen/docs1-cli/README.md b/gen/docs1-cli/README.md index 1b47a4ecf1..470242a575 100644 --- a/gen/docs1-cli/README.md +++ b/gen/docs1-cli/README.md @@ -1,6 +1,6 @@ The `docs1` command-line interface *(CLI)* allows to use most features of the *Google Docs* service from the comfort of your terminal. @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Docs* API at revision *20220301*. The CLI is at version *3.1.0*. +This documentation was generated from the *Docs* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash docs1 [options] @@ -104,4 +104,4 @@ You may consider redirecting standard error into a file for ease of use, e.g. `d [scopes]: https://developers.google.com/+/api/oauth#scopes [revoke-access]: http://webapps.stackexchange.com/a/30849 [google-dev-console]: https://console.developers.google.com/ -[google-project-new]: https://developers.google.com/console/help/new/ \ No newline at end of file +[google-project-new]: https://developers.google.com/console/help/new/ diff --git a/gen/docs1-cli/mkdocs.yml b/gen/docs1-cli/mkdocs.yml index 92cfc99ac5..b622c11cf7 100644 --- a/gen/docs1-cli/mkdocs.yml +++ b/gen/docs1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Docs v3.1.0+20220301 +site_name: Docs v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-docs1-cli site_description: A complete library to interact with Docs (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/docs1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['documents_batch-update.md', 'Documents', 'Batch Update'] -- ['documents_create.md', 'Documents', 'Create'] -- ['documents_get.md', 'Documents', 'Get'] +nav: +- Home: 'index.md' +- 'Documents': + - 'Batch Update': 'documents_batch-update.md' + - 'Create': 'documents_create.md' + - 'Get': 'documents_get.md' theme: readthedocs diff --git a/gen/docs1-cli/src/client.rs b/gen/docs1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/docs1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/docs1-cli/src/main.rs b/gen/docs1-cli/src/main.rs index 1f0a1c89e6..878096c206 100644 --- a/gen/docs1-cli/src/main.rs +++ b/gen/docs1-cli/src/main.rs @@ -1,10 +1,8 @@ // DO NOT EDIT ! -// This file was generated automatically from 'src/mako/cli/main.rs.mako' +// This file was generated automatically from 'src/generator/templates/cli/main.rs.mako' // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,34 +10,46 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_docs1::{api, Error, oauth2}; +use google_docs1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo}; use std::default::Default; +use std::error::Error as StdError; use std::str::FromStr; use serde_json as json; use clap::ArgMatches; +use http::Uri; +use hyper::client::connect; +use tokio::io::{AsyncRead, AsyncWrite}; +use tower_service; enum DoitError { IoError(String, io::Error), ApiError(Error), } -struct Engine<'n> { +struct Engine<'n, S> { opt: ArgMatches<'n>, - hub: api::Docs, + hub: api::Docs, gp: Vec<&'static str>, gpm: Vec<(&'static str, &'static str)>, } -impl<'n> Engine<'n> { +impl<'n, S> Engine<'n, S> +where + S: tower_service::Service + Clone + Send + Sync + 'static, + S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, + S::Future: Send + Unpin + 'static, + S::Error: Into>, +{ async fn _documents_batch_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -338,7 +348,7 @@ impl<'n> Engine<'n> { } // Please note that this call will fail if any part of the opt can't be handled - async fn new(opt: ArgMatches<'n>) -> Result, InvalidOptionsError> { + async fn new(opt: ArgMatches<'n>, connector: S) -> Result, InvalidOptionsError> { let (config_dir, secret) = { let config_dir = match client::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) { Err(e) => return Err(InvalidOptionsError::single(e, 3)), @@ -352,18 +362,14 @@ impl<'n> Engine<'n> { } }; - let auth = oauth2::InstalledFlowAuthenticator::builder( + let client = hyper::Client::builder().build(connector); + + let auth = oauth2::InstalledFlowAuthenticator::with_client( secret, oauth2::InstalledFlowReturnMethod::HTTPRedirect, + client.clone(), ).persist_tokens_to_disk(format!("{}/docs1", config_dir)).build().await.unwrap(); - let client = hyper::Client::builder().build( - hyper_rustls::HttpsConnectorBuilder::new().with_native_roots() - .https_or_http() - .enable_http1() - .enable_http2() - .build() - ); let engine = Engine { opt: opt, hub: api::Docs::new(client, auth), @@ -477,7 +483,7 @@ async fn main() { let mut app = App::new("docs1") .author("Sebastian Thiel ") - .version("3.1.0+20220301") + .version("5.0.2+20230119") .about("Reads and writes Google Docs documents.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_docs1_cli") .arg(Arg::with_name("url") @@ -539,8 +545,14 @@ async fn main() { let matches = app.get_matches(); - let debug = matches.is_present("debug"); - match Engine::new(matches).await { + let debug = matches.is_present("adebug"); + let connector = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots() + .https_or_http() + .enable_http1() + .enable_http2() + .build(); + + match Engine::new(matches, connector).await { Err(err) => { exit_status = err.exit_code; writeln!(io::stderr(), "{}", err).ok(); diff --git a/gen/docs1/Cargo.toml b/gen/docs1/Cargo.toml index 4ce85d28d6..7ccca46c06 100644 --- a/gen/docs1/Cargo.toml +++ b/gen/docs1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-docs1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Docs (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/docs1" homepage = "https://developers.google.com/docs/" -documentation = "https://docs.rs/google-docs1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-docs1/5.0.2+20230119" license = "MIT" keywords = ["docs", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/docs1/README.md b/gen/docs1/README.md index 67aeb98016..6d1489dcd6 100644 --- a/gen/docs1/README.md +++ b/gen/docs1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-docs1` library allows access to all features of the *Google Docs* service. -This documentation was generated from *Docs* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *docs:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Docs* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *docs:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Docs* *v1* API can be found at the [official documentation site](https://developers.google.com/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/Docs) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/Docs) ... -* [documents](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/api::Document) - * [*batch update*](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/api::DocumentBatchUpdateCall), [*create*](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/api::DocumentCreateCall) and [*get*](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/api::DocumentGetCall) +* [documents](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/api::Document) + * [*batch update*](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/api::DocumentBatchUpdateCall), [*create*](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/api::DocumentCreateCall) and [*get*](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/api::DocumentGetCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/Docs)** +* **[Hub](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/Docs)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::CallBuilder) -* **[Resources](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::CallBuilder) +* **[Resources](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Part)** + * **[Parts](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Delegate) to the -[Method Builder](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Delegate) to the +[Method Builder](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::RequestValue) and -[decodable](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::RequestValue) and +[decodable](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-docs1/5.0.2-beta-1+20230119/google_docs1/client::RequestValue) are moved +* [request values](https://docs.rs/google-docs1/5.0.2+20230119/google_docs1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/docs1/src/api.rs b/gen/docs1/src/api.rs index e46261f424..d9a81268d5 100644 --- a/gen/docs1/src/api.rs +++ b/gen/docs1/src/api.rs @@ -141,7 +141,7 @@ impl<'a, S> Docs { Docs { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://docs.googleapis.com/".to_string(), _root_url: "https://docs.googleapis.com/".to_string(), } @@ -152,7 +152,7 @@ impl<'a, S> Docs { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/docs1/src/client.rs b/gen/docs1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/docs1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/docs1/src/lib.rs b/gen/docs1/src/lib.rs index 41a7320bb4..2fd984dab1 100644 --- a/gen/docs1/src/lib.rs +++ b/gen/docs1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Docs* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *docs:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Docs* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *docs:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Docs* *v1* API can be found at the //! [official documentation site](https://developers.google.com/docs/). diff --git a/gen/documentai1-cli/Cargo.toml b/gen/documentai1-cli/Cargo.toml index 9f0a76e814..1dd42c0c8a 100644 --- a/gen/documentai1-cli/Cargo.toml +++ b/gen/documentai1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-documentai1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Document (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/documentai1-cli" @@ -20,13 +20,13 @@ name = "documentai1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-documentai1] path = "../documentai1" -version = "4.0.1+20220226" +version = "5.0.2+20230117" + diff --git a/gen/documentai1-cli/README.md b/gen/documentai1-cli/README.md index a7d081ddf2..503ce1d2ce 100644 --- a/gen/documentai1-cli/README.md +++ b/gen/documentai1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Document* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *Document* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash documentai1 [options] @@ -38,6 +38,8 @@ documentai1 [options] locations-operations-cancel [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] + locations-processor-types-get [-p ]... [-o ] + locations-processor-types-list [-p ]... [-o ] locations-processors-batch-process (-r )... [-p ]... [-o ] locations-processors-create (-r )... [-p ]... [-o ] locations-processors-delete [-p ]... [-o ] @@ -50,18 +52,16 @@ documentai1 [options] locations-processors-processor-versions-batch-process (-r )... [-p ]... [-o ] locations-processors-processor-versions-delete [-p ]... [-o ] locations-processors-processor-versions-deploy (-r )... [-p ]... [-o ] + locations-processors-processor-versions-evaluate-processor-version (-r )... [-p ]... [-o ] + locations-processors-processor-versions-evaluations-get [-p ]... [-o ] + locations-processors-processor-versions-evaluations-list [-p ]... [-o ] locations-processors-processor-versions-get [-p ]... [-o ] locations-processors-processor-versions-list [-p ]... [-o ] locations-processors-processor-versions-process (-r )... [-p ]... [-o ] + locations-processors-processor-versions-train (-r )... [-p ]... [-o ] locations-processors-processor-versions-undeploy (-r )... [-p ]... [-o ] locations-processors-set-default-processor-version (-r )... [-p ]... [-o ] operations-get [-p ]... [-o ] - uiv1beta3 - projects-locations-get [-p ]... [-o ] - projects-locations-list [-p ]... [-o ] - projects-locations-operations-cancel [-p ]... [-o ] - projects-locations-operations-get [-p ]... [-o ] - projects-locations-operations-list [-p ]... [-o ] documentai1 --help Configuration: diff --git a/gen/documentai1-cli/mkdocs.yml b/gen/documentai1-cli/mkdocs.yml index 39fce96ac9..14b959094e 100644 --- a/gen/documentai1-cli/mkdocs.yml +++ b/gen/documentai1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Document v4.0.1+20220226 +site_name: Document v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-documentai1-cli site_description: A complete library to interact with Document (protocol v1) @@ -7,38 +7,41 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/documentai1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['projects_locations-fetch-processor-types.md', 'Projects', 'Locations Fetch Processor Types'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-processors-batch-process.md', 'Projects', 'Locations Processors Batch Process'] -- ['projects_locations-processors-create.md', 'Projects', 'Locations Processors Create'] -- ['projects_locations-processors-delete.md', 'Projects', 'Locations Processors Delete'] -- ['projects_locations-processors-disable.md', 'Projects', 'Locations Processors Disable'] -- ['projects_locations-processors-enable.md', 'Projects', 'Locations Processors Enable'] -- ['projects_locations-processors-get.md', 'Projects', 'Locations Processors Get'] -- ['projects_locations-processors-human-review-config-review-document.md', 'Projects', 'Locations Processors Human Review Config Review Document'] -- ['projects_locations-processors-list.md', 'Projects', 'Locations Processors List'] -- ['projects_locations-processors-process.md', 'Projects', 'Locations Processors Process'] -- ['projects_locations-processors-processor-versions-batch-process.md', 'Projects', 'Locations Processors Processor Versions Batch Process'] -- ['projects_locations-processors-processor-versions-delete.md', 'Projects', 'Locations Processors Processor Versions Delete'] -- ['projects_locations-processors-processor-versions-deploy.md', 'Projects', 'Locations Processors Processor Versions Deploy'] -- ['projects_locations-processors-processor-versions-get.md', 'Projects', 'Locations Processors Processor Versions Get'] -- ['projects_locations-processors-processor-versions-list.md', 'Projects', 'Locations Processors Processor Versions List'] -- ['projects_locations-processors-processor-versions-process.md', 'Projects', 'Locations Processors Processor Versions Process'] -- ['projects_locations-processors-processor-versions-undeploy.md', 'Projects', 'Locations Processors Processor Versions Undeploy'] -- ['projects_locations-processors-set-default-processor-version.md', 'Projects', 'Locations Processors Set Default Processor Version'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['uiv1beta3_projects-locations-get.md', 'Uiv1beta3', 'Projects Locations Get'] -- ['uiv1beta3_projects-locations-list.md', 'Uiv1beta3', 'Projects Locations List'] -- ['uiv1beta3_projects-locations-operations-cancel.md', 'Uiv1beta3', 'Projects Locations Operations Cancel'] -- ['uiv1beta3_projects-locations-operations-get.md', 'Uiv1beta3', 'Projects Locations Operations Get'] -- ['uiv1beta3_projects-locations-operations-list.md', 'Uiv1beta3', 'Projects Locations Operations List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Delete': 'operations_delete.md' +- 'Projects': + - 'Locations Fetch Processor Types': 'projects_locations-fetch-processor-types.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Processor Types Get': 'projects_locations-processor-types-get.md' + - 'Locations Processor Types List': 'projects_locations-processor-types-list.md' + - 'Locations Processors Batch Process': 'projects_locations-processors-batch-process.md' + - 'Locations Processors Create': 'projects_locations-processors-create.md' + - 'Locations Processors Delete': 'projects_locations-processors-delete.md' + - 'Locations Processors Disable': 'projects_locations-processors-disable.md' + - 'Locations Processors Enable': 'projects_locations-processors-enable.md' + - 'Locations Processors Get': 'projects_locations-processors-get.md' + - 'Locations Processors Human Review Config Review Document': 'projects_locations-processors-human-review-config-review-document.md' + - 'Locations Processors List': 'projects_locations-processors-list.md' + - 'Locations Processors Process': 'projects_locations-processors-process.md' + - 'Locations Processors Processor Versions Batch Process': 'projects_locations-processors-processor-versions-batch-process.md' + - 'Locations Processors Processor Versions Delete': 'projects_locations-processors-processor-versions-delete.md' + - 'Locations Processors Processor Versions Deploy': 'projects_locations-processors-processor-versions-deploy.md' + - 'Locations Processors Processor Versions Evaluate Processor Version': 'projects_locations-processors-processor-versions-evaluate-processor-version.md' + - 'Locations Processors Processor Versions Evaluations Get': 'projects_locations-processors-processor-versions-evaluations-get.md' + - 'Locations Processors Processor Versions Evaluations List': 'projects_locations-processors-processor-versions-evaluations-list.md' + - 'Locations Processors Processor Versions Get': 'projects_locations-processors-processor-versions-get.md' + - 'Locations Processors Processor Versions List': 'projects_locations-processors-processor-versions-list.md' + - 'Locations Processors Processor Versions Process': 'projects_locations-processors-processor-versions-process.md' + - 'Locations Processors Processor Versions Train': 'projects_locations-processors-processor-versions-train.md' + - 'Locations Processors Processor Versions Undeploy': 'projects_locations-processors-processor-versions-undeploy.md' + - 'Locations Processors Set Default Processor Version': 'projects_locations-processors-set-default-processor-version.md' + - 'Operations Get': 'projects_operations-get.md' theme: readthedocs diff --git a/gen/documentai1-cli/src/client.rs b/gen/documentai1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/documentai1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/documentai1-cli/src/main.rs b/gen/documentai1-cli/src/main.rs index c84174b9f2..f640ea5e87 100644 --- a/gen/documentai1-cli/src/main.rs +++ b/gen/documentai1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_documentai1::{api, Error, oauth2}; +use google_documentai1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -217,7 +216,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -383,7 +382,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -435,6 +434,117 @@ where } } + async fn _projects_locations_processor_types_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_processor_types_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_processor_types_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_processor_types_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_processors_batch_process(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -458,11 +568,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "document-output-config.gcs-output-config.field-mask" => Some(("documentOutputConfig.gcsOutputConfig.fieldMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document-output-config.gcs-output-config.gcs-uri" => Some(("documentOutputConfig.gcsOutputConfig.gcsUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "document-output-config.gcs-output-config.sharding-config.pages-overlap" => Some(("documentOutputConfig.gcsOutputConfig.shardingConfig.pagesOverlap", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "document-output-config.gcs-output-config.sharding-config.pages-per-shard" => Some(("documentOutputConfig.gcsOutputConfig.shardingConfig.pagesPerShard", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "input-documents.gcs-prefix.gcs-uri-prefix" => Some(("inputDocuments.gcsPrefix.gcsUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "skip-human-review" => Some(("skipHumanReview", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["document-output-config", "gcs-output-config", "gcs-prefix", "gcs-uri", "gcs-uri-prefix", "input-documents", "skip-human-review"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["document-output-config", "field-mask", "gcs-output-config", "gcs-prefix", "gcs-uri", "gcs-uri-prefix", "input-documents", "pages-overlap", "pages-per-shard", "sharding-config", "skip-human-review"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -909,6 +1022,12 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "document-schema.description" => Some(("documentSchema.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "document-schema.display-name" => Some(("documentSchema.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "document-schema.metadata.document-allow-multiple-labels" => Some(("documentSchema.metadata.documentAllowMultipleLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "document-schema.metadata.document-splitter" => Some(("documentSchema.metadata.documentSplitter", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "document-schema.metadata.prefixed-naming-on-properties" => Some(("documentSchema.metadata.prefixedNamingOnProperties", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "document-schema.metadata.skip-naming-validation" => Some(("documentSchema.metadata.skipNamingValidation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "enable-schema-validation" => Some(("enableSchemaValidation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "inline-document.content" => Some(("inlineDocument.content", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "inline-document.error.code" => Some(("inlineDocument.error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -921,7 +1040,7 @@ where "inline-document.uri" => Some(("inlineDocument.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "priority" => Some(("priority", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "content", "enable-schema-validation", "error", "inline-document", "message", "mime-type", "priority", "shard-count", "shard-index", "shard-info", "text", "text-offset", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "content", "description", "display-name", "document-allow-multiple-labels", "document-schema", "document-splitter", "enable-schema-validation", "error", "inline-document", "message", "metadata", "mime-type", "prefixed-naming-on-properties", "priority", "shard-count", "shard-index", "shard-info", "skip-naming-validation", "text", "text-offset", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -991,7 +1110,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1063,6 +1182,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "field-mask" => Some(("fieldMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "inline-document.content" => Some(("inlineDocument.content", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "inline-document.error.code" => Some(("inlineDocument.error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "inline-document.error.message" => Some(("inlineDocument.error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1076,7 +1196,7 @@ where "raw-document.mime-type" => Some(("rawDocument.mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "skip-human-review" => Some(("skipHumanReview", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "content", "error", "inline-document", "message", "mime-type", "raw-document", "shard-count", "shard-index", "shard-info", "skip-human-review", "text", "text-offset", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "content", "error", "field-mask", "inline-document", "message", "mime-type", "raw-document", "shard-count", "shard-index", "shard-info", "skip-human-review", "text", "text-offset", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1159,11 +1279,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "document-output-config.gcs-output-config.field-mask" => Some(("documentOutputConfig.gcsOutputConfig.fieldMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document-output-config.gcs-output-config.gcs-uri" => Some(("documentOutputConfig.gcsOutputConfig.gcsUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "document-output-config.gcs-output-config.sharding-config.pages-overlap" => Some(("documentOutputConfig.gcsOutputConfig.shardingConfig.pagesOverlap", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "document-output-config.gcs-output-config.sharding-config.pages-per-shard" => Some(("documentOutputConfig.gcsOutputConfig.shardingConfig.pagesPerShard", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "input-documents.gcs-prefix.gcs-uri-prefix" => Some(("inputDocuments.gcsPrefix.gcsUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "skip-human-review" => Some(("skipHumanReview", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["document-output-config", "gcs-output-config", "gcs-prefix", "gcs-uri", "gcs-uri-prefix", "input-documents", "skip-human-review"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["document-output-config", "field-mask", "gcs-output-config", "gcs-prefix", "gcs-uri", "gcs-uri-prefix", "input-documents", "pages-overlap", "pages-per-shard", "sharding-config", "skip-human-review"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1359,6 +1482,202 @@ where } } + async fn _projects_locations_processors_processor_versions_evaluate_processor_version(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "evaluation-documents.gcs-prefix.gcs-uri-prefix" => Some(("evaluationDocuments.gcsPrefix.gcsUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["evaluation-documents", "gcs-prefix", "gcs-uri-prefix"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDocumentaiV1EvaluateProcessorVersionRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_processors_processor_versions_evaluate_processor_version(request, opt.value_of("processor-version").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_processors_processor_versions_evaluations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_processors_processor_versions_evaluations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_processors_processor_versions_evaluations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_processors_processor_versions_evaluations_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_processors_processor_versions_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_processors_processor_versions_get(opt.value_of("name").unwrap_or("")); @@ -1421,7 +1740,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1493,6 +1812,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "field-mask" => Some(("fieldMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "inline-document.content" => Some(("inlineDocument.content", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "inline-document.error.code" => Some(("inlineDocument.error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "inline-document.error.message" => Some(("inlineDocument.error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1506,7 +1826,7 @@ where "raw-document.mime-type" => Some(("rawDocument.mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "skip-human-review" => Some(("skipHumanReview", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "content", "error", "inline-document", "message", "mime-type", "raw-document", "shard-count", "shard-index", "shard-info", "skip-human-review", "text", "text-offset", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "content", "error", "field-mask", "inline-document", "message", "mime-type", "raw-document", "shard-count", "shard-index", "shard-info", "skip-human-review", "text", "text-offset", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1566,6 +1886,138 @@ where } } + async fn _projects_locations_processors_processor_versions_train(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "base-processor-version" => Some(("baseProcessorVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "document-schema.description" => Some(("documentSchema.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "document-schema.display-name" => Some(("documentSchema.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "document-schema.metadata.document-allow-multiple-labels" => Some(("documentSchema.metadata.documentAllowMultipleLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "document-schema.metadata.document-splitter" => Some(("documentSchema.metadata.documentSplitter", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "document-schema.metadata.prefixed-naming-on-properties" => Some(("documentSchema.metadata.prefixedNamingOnProperties", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "document-schema.metadata.skip-naming-validation" => Some(("documentSchema.metadata.skipNamingValidation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "input-data.test-documents.gcs-prefix.gcs-uri-prefix" => Some(("inputData.testDocuments.gcsPrefix.gcsUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "input-data.training-documents.gcs-prefix.gcs-uri-prefix" => Some(("inputData.trainingDocuments.gcsPrefix.gcsUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.create-time" => Some(("processorVersion.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.deprecation-info.deprecation-time" => Some(("processorVersion.deprecationInfo.deprecationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.deprecation-info.replacement-processor-version" => Some(("processorVersion.deprecationInfo.replacementProcessorVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.display-name" => Some(("processorVersion.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.document-schema.description" => Some(("processorVersion.documentSchema.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.document-schema.display-name" => Some(("processorVersion.documentSchema.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.document-schema.metadata.document-allow-multiple-labels" => Some(("processorVersion.documentSchema.metadata.documentAllowMultipleLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "processor-version.document-schema.metadata.document-splitter" => Some(("processorVersion.documentSchema.metadata.documentSplitter", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "processor-version.document-schema.metadata.prefixed-naming-on-properties" => Some(("processorVersion.documentSchema.metadata.prefixedNamingOnProperties", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "processor-version.document-schema.metadata.skip-naming-validation" => Some(("processorVersion.documentSchema.metadata.skipNamingValidation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "processor-version.google-managed" => Some(("processorVersion.googleManaged", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "processor-version.kms-key-name" => Some(("processorVersion.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.kms-key-version-name" => Some(("processorVersion.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.f1-score" => Some(("processorVersion.latestEvaluation.aggregateMetrics.f1Score", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.false-negatives-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.falseNegativesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.false-positives-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.falsePositivesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.ground-truth-document-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.groundTruthDocumentCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.ground-truth-occurrences-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.groundTruthOccurrencesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.precision" => Some(("processorVersion.latestEvaluation.aggregateMetrics.precision", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.predicted-document-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.predictedDocumentCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.predicted-occurrences-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.predictedOccurrencesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.recall" => Some(("processorVersion.latestEvaluation.aggregateMetrics.recall", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.total-documents-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.totalDocumentsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics.true-positives-count" => Some(("processorVersion.latestEvaluation.aggregateMetrics.truePositivesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.f1-score" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.f1Score", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.false-negatives-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.falseNegativesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.false-positives-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.falsePositivesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.ground-truth-document-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.groundTruthDocumentCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.ground-truth-occurrences-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.groundTruthOccurrencesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.precision" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.precision", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.predicted-document-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.predictedDocumentCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.predicted-occurrences-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.predictedOccurrencesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.recall" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.recall", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.total-documents-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.totalDocumentsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.aggregate-metrics-exact.true-positives-count" => Some(("processorVersion.latestEvaluation.aggregateMetricsExact.truePositivesCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.evaluation" => Some(("processorVersion.latestEvaluation.evaluation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.latest-evaluation.operation" => Some(("processorVersion.latestEvaluation.operation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.name" => Some(("processorVersion.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "processor-version.state" => Some(("processorVersion.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["aggregate-metrics", "aggregate-metrics-exact", "base-processor-version", "create-time", "deprecation-info", "deprecation-time", "description", "display-name", "document-allow-multiple-labels", "document-schema", "document-splitter", "evaluation", "f1-score", "false-negatives-count", "false-positives-count", "gcs-prefix", "gcs-uri-prefix", "google-managed", "ground-truth-document-count", "ground-truth-occurrences-count", "input-data", "kms-key-name", "kms-key-version-name", "latest-evaluation", "metadata", "name", "operation", "precision", "predicted-document-count", "predicted-occurrences-count", "prefixed-naming-on-properties", "processor-version", "recall", "replacement-processor-version", "skip-naming-validation", "state", "test-documents", "total-documents-count", "training-documents", "true-positives-count"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudDocumentaiV1TrainProcessorVersionRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_processors_processor_versions_train(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_processors_processor_versions_undeploy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1787,286 +2239,6 @@ where } } - async fn _uiv1beta3_projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.uiv1beta3().projects_locations_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _uiv1beta3_projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.uiv1beta3().projects_locations_list(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _uiv1beta3_projects_locations_operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.uiv1beta3().projects_locations_operations_cancel(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _uiv1beta3_projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.uiv1beta3().projects_locations_operations_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _uiv1beta3_projects_locations_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.uiv1beta3().projects_locations_operations_list(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -2103,6 +2275,12 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, + ("locations-processor-types-get", Some(opt)) => { + call_result = self._projects_locations_processor_types_get(opt, dry_run, &mut err).await; + }, + ("locations-processor-types-list", Some(opt)) => { + call_result = self._projects_locations_processor_types_list(opt, dry_run, &mut err).await; + }, ("locations-processors-batch-process", Some(opt)) => { call_result = self._projects_locations_processors_batch_process(opt, dry_run, &mut err).await; }, @@ -2139,6 +2317,15 @@ where ("locations-processors-processor-versions-deploy", Some(opt)) => { call_result = self._projects_locations_processors_processor_versions_deploy(opt, dry_run, &mut err).await; }, + ("locations-processors-processor-versions-evaluate-processor-version", Some(opt)) => { + call_result = self._projects_locations_processors_processor_versions_evaluate_processor_version(opt, dry_run, &mut err).await; + }, + ("locations-processors-processor-versions-evaluations-get", Some(opt)) => { + call_result = self._projects_locations_processors_processor_versions_evaluations_get(opt, dry_run, &mut err).await; + }, + ("locations-processors-processor-versions-evaluations-list", Some(opt)) => { + call_result = self._projects_locations_processors_processor_versions_evaluations_list(opt, dry_run, &mut err).await; + }, ("locations-processors-processor-versions-get", Some(opt)) => { call_result = self._projects_locations_processors_processor_versions_get(opt, dry_run, &mut err).await; }, @@ -2148,6 +2335,9 @@ where ("locations-processors-processor-versions-process", Some(opt)) => { call_result = self._projects_locations_processors_processor_versions_process(opt, dry_run, &mut err).await; }, + ("locations-processors-processor-versions-train", Some(opt)) => { + call_result = self._projects_locations_processors_processor_versions_train(opt, dry_run, &mut err).await; + }, ("locations-processors-processor-versions-undeploy", Some(opt)) => { call_result = self._projects_locations_processors_processor_versions_undeploy(opt, dry_run, &mut err).await; }, @@ -2163,29 +2353,6 @@ where } } }, - ("uiv1beta3", Some(opt)) => { - match opt.subcommand() { - ("projects-locations-get", Some(opt)) => { - call_result = self._uiv1beta3_projects_locations_get(opt, dry_run, &mut err).await; - }, - ("projects-locations-list", Some(opt)) => { - call_result = self._uiv1beta3_projects_locations_list(opt, dry_run, &mut err).await; - }, - ("projects-locations-operations-cancel", Some(opt)) => { - call_result = self._uiv1beta3_projects_locations_operations_cancel(opt, dry_run, &mut err).await; - }, - ("projects-locations-operations-get", Some(opt)) => { - call_result = self._uiv1beta3_projects_locations_operations_get(opt, dry_run, &mut err).await; - }, - ("projects-locations-operations-list", Some(opt)) => { - call_result = self._uiv1beta3_projects_locations_operations_list(opt, dry_run, &mut err).await; - }, - _ => { - err.issues.push(CLIError::MissingMethodError("uiv1beta3".to_string())); - writeln!(io::stderr(), "{}\n", opt.usage()).ok(); - } - } - }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); @@ -2284,7 +2451,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'locations-fetch-processor-types', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-processors-batch-process', 'locations-processors-create', 'locations-processors-delete', 'locations-processors-disable', 'locations-processors-enable', 'locations-processors-get', 'locations-processors-human-review-config-review-document', 'locations-processors-list', 'locations-processors-process', 'locations-processors-processor-versions-batch-process', 'locations-processors-processor-versions-delete', 'locations-processors-processor-versions-deploy', 'locations-processors-processor-versions-get', 'locations-processors-processor-versions-list', 'locations-processors-processor-versions-process', 'locations-processors-processor-versions-undeploy', 'locations-processors-set-default-processor-version' and 'operations-get'", vec![ + ("projects", "methods: 'locations-fetch-processor-types', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'locations-processor-types-get', 'locations-processor-types-list', 'locations-processors-batch-process', 'locations-processors-create', 'locations-processors-delete', 'locations-processors-disable', 'locations-processors-enable', 'locations-processors-get', 'locations-processors-human-review-config-review-document', 'locations-processors-list', 'locations-processors-process', 'locations-processors-processor-versions-batch-process', 'locations-processors-processor-versions-delete', 'locations-processors-processor-versions-deploy', 'locations-processors-processor-versions-evaluate-processor-version', 'locations-processors-processor-versions-evaluations-get', 'locations-processors-processor-versions-evaluations-list', 'locations-processors-processor-versions-get', 'locations-processors-processor-versions-list', 'locations-processors-processor-versions-process', 'locations-processors-processor-versions-train', 'locations-processors-processor-versions-undeploy', 'locations-processors-set-default-processor-version' and 'operations-get'", vec![ ("locations-fetch-processor-types", Some(r##"Fetches processor types. Note that we do not use ListProcessorTypes here because it is not paginated."##), "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/projects_locations-fetch-processor-types", @@ -2411,6 +2578,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-processor-types-get", + Some(r##"Gets a processor type detail."##), + "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/projects_locations-processor-types-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The processor type resource name."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-processor-types-list", + Some(r##"Lists the processor types that exist."##), + "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/projects_locations-processor-types-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The location of processor type to list. The available processor types may depend on the allow-listing on projects. Format: `projects/{project}/locations/{location}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2723,6 +2934,78 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-processors-processor-versions-evaluate-processor-version", + Some(r##"Evaluates a ProcessorVersion against annotated documents, producing an Evaluation."##), + "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/projects_locations-processors-processor-versions-evaluate-processor-version", + vec![ + (Some(r##"processor-version"##), + None, + Some(r##"Required. The resource name of the ProcessorVersion to evaluate. `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-processors-processor-versions-evaluations-get", + Some(r##"Retrieves a specific evaluation."##), + "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/projects_locations-processors-processor-versions-evaluations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the Evaluation to get. `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}/evaluations/{evaluation}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-processors-processor-versions-evaluations-list", + Some(r##"Retrieves a set of evaluations for a given processor version."##), + "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/projects_locations-processors-processor-versions-evaluations-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the ProcessorVersion to list evaluations for. `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2795,6 +3078,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-processors-processor-versions-train", + Some(r##"Trains a new processor version. Operation metadata is returned as cloud_documentai_core.TrainProcessorVersionMetadata."##), + "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/projects_locations-processors-processor-versions-train", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent (project, location and processor) to create the new version for. Format: `projects/{project}/locations/{location}/processors/{processor}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2881,124 +3192,11 @@ async fn main() { ]), ]), - ("uiv1beta3", "methods: 'projects-locations-get', 'projects-locations-list', 'projects-locations-operations-cancel', 'projects-locations-operations-get' and 'projects-locations-operations-list'", vec![ - ("projects-locations-get", - Some(r##"Gets information about a location."##), - "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/uiv1beta3_projects-locations-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Resource name for the location."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("projects-locations-list", - Some(r##"Lists information about the supported locations for this service."##), - "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/uiv1beta3_projects-locations-list", - vec![ - (Some(r##"name"##), - None, - Some(r##"The resource that owns the locations collection, if applicable."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("projects-locations-operations-cancel", - Some(r##"Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`."##), - "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/uiv1beta3_projects-locations-operations-cancel", - vec![ - (Some(r##"name"##), - None, - Some(r##"The name of the operation resource to be cancelled."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("projects-locations-operations-get", - Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), - "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/uiv1beta3_projects-locations-operations-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"The name of the operation resource."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("projects-locations-operations-list", - Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), - "Details at http://byron.github.io/google-apis-rs/google_documentai1_cli/uiv1beta3_projects-locations-operations-list", - vec![ - (Some(r##"name"##), - None, - Some(r##"The name of the operation's parent resource."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ]), - ]; let mut app = App::new("documentai1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230117") .about("Service to parse structured information from unstructured or semi-structured documents using state-of-the-art Google AI such as natural language, computer vision, translation, and AutoML.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_documentai1_cli") .arg(Arg::with_name("url") diff --git a/gen/documentai1/Cargo.toml b/gen/documentai1/Cargo.toml index 11d8a62948..1717270907 100644 --- a/gen/documentai1/Cargo.toml +++ b/gen/documentai1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-documentai1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Document (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/documentai1" homepage = "https://cloud.google.com/document-ai/docs/" -documentation = "https://docs.rs/google-documentai1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-documentai1/5.0.2+20230117" license = "MIT" keywords = ["documentai", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/documentai1/README.md b/gen/documentai1/README.md index 55fa6d23f4..41f7d1b621 100644 --- a/gen/documentai1/README.md +++ b/gen/documentai1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-documentai1` library allows access to all features of the *Google Document* service. -This documentation was generated from *Document* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *documentai:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Document* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *documentai:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Document* *v1* API can be found at the [official documentation site](https://cloud.google.com/document-ai/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/Document) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/Document) ... * operations - * [*delete*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::OperationDeleteCall) + * [*delete*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::OperationDeleteCall) * projects - * [*locations fetch processor types*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationFetchProcessorTypeCall), [*locations get*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationOperationListCall), [*locations processor types get*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorTypeGetCall), [*locations processor types list*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorTypeListCall), [*locations processors batch process*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorBatchProcesCall), [*locations processors create*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorCreateCall), [*locations processors delete*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorDeleteCall), [*locations processors disable*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorDisableCall), [*locations processors enable*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorEnableCall), [*locations processors get*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorGetCall), [*locations processors human review config review document*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorHumanReviewConfigReviewDocumentCall), [*locations processors list*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorListCall), [*locations processors process*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcesCall), [*locations processors processor versions batch process*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionBatchProcesCall), [*locations processors processor versions delete*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionDeleteCall), [*locations processors processor versions deploy*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionDeployCall), [*locations processors processor versions evaluate processor version*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionEvaluateProcessorVersionCall), [*locations processors processor versions evaluations get*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionEvaluationGetCall), [*locations processors processor versions evaluations list*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionEvaluationListCall), [*locations processors processor versions get*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionGetCall), [*locations processors processor versions list*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionListCall), [*locations processors processor versions process*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionProcesCall), [*locations processors processor versions train*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionTrainCall), [*locations processors processor versions undeploy*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionUndeployCall), [*locations processors set default processor version*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectLocationProcessorSetDefaultProcessorVersionCall) and [*operations get*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/api::ProjectOperationGetCall) + * [*locations fetch processor types*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationFetchProcessorTypeCall), [*locations get*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationOperationListCall), [*locations processor types get*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorTypeGetCall), [*locations processor types list*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorTypeListCall), [*locations processors batch process*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorBatchProcesCall), [*locations processors create*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorCreateCall), [*locations processors delete*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorDeleteCall), [*locations processors disable*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorDisableCall), [*locations processors enable*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorEnableCall), [*locations processors get*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorGetCall), [*locations processors human review config review document*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorHumanReviewConfigReviewDocumentCall), [*locations processors list*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorListCall), [*locations processors process*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcesCall), [*locations processors processor versions batch process*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionBatchProcesCall), [*locations processors processor versions delete*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionDeleteCall), [*locations processors processor versions deploy*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionDeployCall), [*locations processors processor versions evaluate processor version*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionEvaluateProcessorVersionCall), [*locations processors processor versions evaluations get*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionEvaluationGetCall), [*locations processors processor versions evaluations list*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionEvaluationListCall), [*locations processors processor versions get*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionGetCall), [*locations processors processor versions list*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionListCall), [*locations processors processor versions process*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionProcesCall), [*locations processors processor versions train*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionTrainCall), [*locations processors processor versions undeploy*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorProcessorVersionUndeployCall), [*locations processors set default processor version*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectLocationProcessorSetDefaultProcessorVersionCall) and [*operations get*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/api::ProjectOperationGetCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/Document)** +* **[Hub](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/Document)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::CallBuilder) -* **[Resources](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::CallBuilder) +* **[Resources](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Part)** + * **[Parts](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Delegate) to the -[Method Builder](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Delegate) to the +[Method Builder](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::RequestValue) and -[decodable](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::RequestValue) and +[decodable](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-documentai1/5.0.2-beta-1+20230117/google_documentai1/client::RequestValue) are moved +* [request values](https://docs.rs/google-documentai1/5.0.2+20230117/google_documentai1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/documentai1/src/api.rs b/gen/documentai1/src/api.rs index e03983bb33..70ac6de3d4 100644 --- a/gen/documentai1/src/api.rs +++ b/gen/documentai1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Document { Document { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://documentai.googleapis.com/".to_string(), _root_url: "https://documentai.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> Document { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/documentai1/src/client.rs b/gen/documentai1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/documentai1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/documentai1/src/lib.rs b/gen/documentai1/src/lib.rs index 890de5792d..c36507f77a 100644 --- a/gen/documentai1/src/lib.rs +++ b/gen/documentai1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Document* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *documentai:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Document* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *documentai:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Document* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/document-ai/docs/). diff --git a/gen/documentai1_beta2-cli/Cargo.toml b/gen/documentai1_beta2-cli/Cargo.toml index dcb8c4daf2..c7e60f5787 100644 --- a/gen/documentai1_beta2-cli/Cargo.toml +++ b/gen/documentai1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-documentai1_beta2-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Document (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/documentai1_beta2-cli" @@ -20,13 +20,13 @@ name = "documentai1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-documentai1_beta2] path = "../documentai1_beta2" -version = "4.0.1+20220226" +version = "5.0.2+20230117" + diff --git a/gen/documentai1_beta2-cli/README.md b/gen/documentai1_beta2-cli/README.md index fd363644f4..11306a31ac 100644 --- a/gen/documentai1_beta2-cli/README.md +++ b/gen/documentai1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Document* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *Document* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash documentai1-beta2 [options] diff --git a/gen/documentai1_beta2-cli/mkdocs.yml b/gen/documentai1_beta2-cli/mkdocs.yml index 560cfb2a7e..0ad527eed8 100644 --- a/gen/documentai1_beta2-cli/mkdocs.yml +++ b/gen/documentai1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Document v4.0.1+20220226 +site_name: Document v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-documentai1_beta2-cli site_description: A complete library to interact with Document (protocol v1beta2) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/documentai1_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_documents-batch-process.md', 'Projects', 'Documents Batch Process'] -- ['projects_documents-process.md', 'Projects', 'Documents Process'] -- ['projects_locations-documents-batch-process.md', 'Projects', 'Locations Documents Batch Process'] -- ['projects_locations-documents-process.md', 'Projects', 'Locations Documents Process'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Documents Batch Process': 'projects_documents-batch-process.md' + - 'Documents Process': 'projects_documents-process.md' + - 'Locations Documents Batch Process': 'projects_locations-documents-batch-process.md' + - 'Locations Documents Process': 'projects_locations-documents-process.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Operations Get': 'projects_operations-get.md' theme: readthedocs diff --git a/gen/documentai1_beta2-cli/src/client.rs b/gen/documentai1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/documentai1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/documentai1_beta2-cli/src/main.rs b/gen/documentai1_beta2-cli/src/main.rs index d690863513..86aa90a97e 100644 --- a/gen/documentai1_beta2-cli/src/main.rs +++ b/gen/documentai1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_documentai1_beta2::{api, Error, oauth2}; +use google_documentai1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -790,7 +789,7 @@ async fn main() { let mut app = App::new("documentai1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230117") .about("Service to parse structured information from unstructured or semi-structured documents using state-of-the-art Google AI such as natural language, computer vision, translation, and AutoML.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_documentai1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/documentai1_beta2/Cargo.toml b/gen/documentai1_beta2/Cargo.toml index e879a0ca59..d79f2d677c 100644 --- a/gen/documentai1_beta2/Cargo.toml +++ b/gen/documentai1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-documentai1_beta2" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Document (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/documentai1_beta2" homepage = "https://cloud.google.com/document-ai/docs/" -documentation = "https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-documentai1_beta2/5.0.2+20230117" license = "MIT" keywords = ["documentai", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/documentai1_beta2/README.md b/gen/documentai1_beta2/README.md index de54b03279..4b96f61d76 100644 --- a/gen/documentai1_beta2/README.md +++ b/gen/documentai1_beta2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-documentai1_beta2` library allows access to all features of the *Google Document* service. -This documentation was generated from *Document* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *documentai:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Document* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *documentai:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Document* *v1_beta2* API can be found at the [official documentation site](https://cloud.google.com/document-ai/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/Document) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/Document) ... * projects - * [*documents batch process*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/api::ProjectDocumentBatchProcesCall), [*documents process*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/api::ProjectDocumentProcesCall), [*locations documents batch process*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/api::ProjectLocationDocumentBatchProcesCall), [*locations documents process*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/api::ProjectLocationDocumentProcesCall), [*locations operations get*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/api::ProjectLocationOperationGetCall) and [*operations get*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/api::ProjectOperationGetCall) + * [*documents batch process*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/api::ProjectDocumentBatchProcesCall), [*documents process*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/api::ProjectDocumentProcesCall), [*locations documents batch process*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/api::ProjectLocationDocumentBatchProcesCall), [*locations documents process*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/api::ProjectLocationDocumentProcesCall), [*locations operations get*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/api::ProjectLocationOperationGetCall) and [*operations get*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/api::ProjectOperationGetCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/Document)** +* **[Hub](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/Document)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-documentai1_beta2/5.0.2-beta-1+20230117/google_documentai1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-documentai1_beta2/5.0.2+20230117/google_documentai1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/documentai1_beta2/src/api.rs b/gen/documentai1_beta2/src/api.rs index 54750d7db3..0d14e15921 100644 --- a/gen/documentai1_beta2/src/api.rs +++ b/gen/documentai1_beta2/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Document { Document { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://documentai.googleapis.com/".to_string(), _root_url: "https://documentai.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Document { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/documentai1_beta2/src/client.rs b/gen/documentai1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/documentai1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/documentai1_beta2/src/lib.rs b/gen/documentai1_beta2/src/lib.rs index 5e22ac8ee2..ee2d142ed2 100644 --- a/gen/documentai1_beta2/src/lib.rs +++ b/gen/documentai1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Document* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *documentai:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Document* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *documentai:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Document* *v1_beta2* API can be found at the //! [official documentation site](https://cloud.google.com/document-ai/docs/). diff --git a/gen/domains1-cli/Cargo.toml b/gen/domains1-cli/Cargo.toml index 7afe41db7b..055cbe12f4 100644 --- a/gen/domains1-cli/Cargo.toml +++ b/gen/domains1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-domains1-cli" -version = "4.0.1+20220128" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Domains (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/domains1-cli" @@ -20,13 +20,13 @@ name = "domains1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-domains1] path = "../domains1" -version = "4.0.1+20220128" +version = "5.0.2+20230105" + diff --git a/gen/domains1-cli/README.md b/gen/domains1-cli/README.md index 8ee4fda918..c170fd8344 100644 --- a/gen/domains1-cli/README.md +++ b/gen/domains1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Domains* API at revision *20220128*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Domains* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash domains1 [options] @@ -41,11 +41,13 @@ domains1 [options] locations-registrations-export (-r )... [-p ]... [-o ] locations-registrations-get [-p ]... [-o ] locations-registrations-get-iam-policy [-p ]... [-o ] + locations-registrations-import (-r )... [-p ]... [-o ] locations-registrations-list [-p ]... [-o ] locations-registrations-patch (-r )... [-p ]... [-o ] locations-registrations-register (-r )... [-p ]... [-o ] locations-registrations-reset-authorization-code (-r )... [-p ]... [-o ] locations-registrations-retrieve-authorization-code [-p ]... [-o ] + locations-registrations-retrieve-importable-domains [-p ]... [-o ] locations-registrations-retrieve-register-parameters [-p ]... [-o ] locations-registrations-retrieve-transfer-parameters [-p ]... [-o ] locations-registrations-search-domains [-p ]... [-o ] diff --git a/gen/domains1-cli/mkdocs.yml b/gen/domains1-cli/mkdocs.yml index d4a3ba7319..de022bd749 100644 --- a/gen/domains1-cli/mkdocs.yml +++ b/gen/domains1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Domains v4.0.1+20220128 +site_name: Cloud Domains v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-domains1-cli site_description: A complete library to interact with Cloud Domains (protocol v1) @@ -7,30 +7,33 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/domains1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-registrations-configure-contact-settings.md', 'Projects', 'Locations Registrations Configure Contact Settings'] -- ['projects_locations-registrations-configure-dns-settings.md', 'Projects', 'Locations Registrations Configure Dns Settings'] -- ['projects_locations-registrations-configure-management-settings.md', 'Projects', 'Locations Registrations Configure Management Settings'] -- ['projects_locations-registrations-delete.md', 'Projects', 'Locations Registrations Delete'] -- ['projects_locations-registrations-export.md', 'Projects', 'Locations Registrations Export'] -- ['projects_locations-registrations-get.md', 'Projects', 'Locations Registrations Get'] -- ['projects_locations-registrations-get-iam-policy.md', 'Projects', 'Locations Registrations Get Iam Policy'] -- ['projects_locations-registrations-list.md', 'Projects', 'Locations Registrations List'] -- ['projects_locations-registrations-patch.md', 'Projects', 'Locations Registrations Patch'] -- ['projects_locations-registrations-register.md', 'Projects', 'Locations Registrations Register'] -- ['projects_locations-registrations-reset-authorization-code.md', 'Projects', 'Locations Registrations Reset Authorization Code'] -- ['projects_locations-registrations-retrieve-authorization-code.md', 'Projects', 'Locations Registrations Retrieve Authorization Code'] -- ['projects_locations-registrations-retrieve-register-parameters.md', 'Projects', 'Locations Registrations Retrieve Register Parameters'] -- ['projects_locations-registrations-retrieve-transfer-parameters.md', 'Projects', 'Locations Registrations Retrieve Transfer Parameters'] -- ['projects_locations-registrations-search-domains.md', 'Projects', 'Locations Registrations Search Domains'] -- ['projects_locations-registrations-set-iam-policy.md', 'Projects', 'Locations Registrations Set Iam Policy'] -- ['projects_locations-registrations-test-iam-permissions.md', 'Projects', 'Locations Registrations Test Iam Permissions'] -- ['projects_locations-registrations-transfer.md', 'Projects', 'Locations Registrations Transfer'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Registrations Configure Contact Settings': 'projects_locations-registrations-configure-contact-settings.md' + - 'Locations Registrations Configure Dns Settings': 'projects_locations-registrations-configure-dns-settings.md' + - 'Locations Registrations Configure Management Settings': 'projects_locations-registrations-configure-management-settings.md' + - 'Locations Registrations Delete': 'projects_locations-registrations-delete.md' + - 'Locations Registrations Export': 'projects_locations-registrations-export.md' + - 'Locations Registrations Get': 'projects_locations-registrations-get.md' + - 'Locations Registrations Get Iam Policy': 'projects_locations-registrations-get-iam-policy.md' + - 'Locations Registrations Import': 'projects_locations-registrations-import.md' + - 'Locations Registrations List': 'projects_locations-registrations-list.md' + - 'Locations Registrations Patch': 'projects_locations-registrations-patch.md' + - 'Locations Registrations Register': 'projects_locations-registrations-register.md' + - 'Locations Registrations Reset Authorization Code': 'projects_locations-registrations-reset-authorization-code.md' + - 'Locations Registrations Retrieve Authorization Code': 'projects_locations-registrations-retrieve-authorization-code.md' + - 'Locations Registrations Retrieve Importable Domains': 'projects_locations-registrations-retrieve-importable-domains.md' + - 'Locations Registrations Retrieve Register Parameters': 'projects_locations-registrations-retrieve-register-parameters.md' + - 'Locations Registrations Retrieve Transfer Parameters': 'projects_locations-registrations-retrieve-transfer-parameters.md' + - 'Locations Registrations Search Domains': 'projects_locations-registrations-search-domains.md' + - 'Locations Registrations Set Iam Policy': 'projects_locations-registrations-set-iam-policy.md' + - 'Locations Registrations Test Iam Permissions': 'projects_locations-registrations-test-iam-permissions.md' + - 'Locations Registrations Transfer': 'projects_locations-registrations-transfer.md' theme: readthedocs diff --git a/gen/domains1-cli/src/client.rs b/gen/domains1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/domains1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/domains1-cli/src/main.rs b/gen/domains1-cli/src/main.rs index 0eac348f4b..f88d0be849 100644 --- a/gen/domains1-cli/src/main.rs +++ b/gen/domains1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_domains1::{api, Error, oauth2}; +use google_domains1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -227,7 +226,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -780,7 +779,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -829,6 +828,92 @@ where } } + async fn _projects_locations_registrations_import(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "domain-name" => Some(("domainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["domain-name", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ImportDomainRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_registrations_import(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_registrations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_registrations_list(opt.value_of("parent").unwrap_or("")); @@ -839,7 +924,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1011,10 +1096,12 @@ where "pending-contact-settings.technical-contact.postal-address.revision" => Some(("pendingContactSettings.technicalContact.postalAddress.revision", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "pending-contact-settings.technical-contact.postal-address.sorting-code" => Some(("pendingContactSettings.technicalContact.postalAddress.sortingCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pending-contact-settings.technical-contact.postal-address.sublocality" => Some(("pendingContactSettings.technicalContact.postalAddress.sublocality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "register-failure-reason" => Some(("registerFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "supported-privacy" => Some(("supportedPrivacy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "transfer-failure-reason" => Some(("transferFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-settings", "create-time", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "registrant-contact", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-lock-state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-settings", "create-time", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "register-failure-reason", "registrant-contact", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-failure-reason", "transfer-lock-state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1029,7 +1116,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1200,14 +1287,16 @@ where "registration.pending-contact-settings.technical-contact.postal-address.revision" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.revision", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sorting-code" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sortingCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sublocality" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sublocality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "registration.register-failure-reason" => Some(("registration.registerFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.state" => Some(("registration.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.supported-privacy" => Some(("registration.supportedPrivacy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "registration.transfer-failure-reason" => Some(("registration.transferFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "yearly-price.currency-code" => Some(("yearlyPrice.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "yearly-price.nanos" => Some(("yearlyPrice.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "yearly-price.units" => Some(("yearlyPrice.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "domain-notices", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-lock-state", "units", "validate-only", "yearly-price"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "domain-notices", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "register-failure-reason", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-failure-reason", "transfer-lock-state", "units", "validate-only", "yearly-price"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1403,6 +1492,65 @@ where } } + async fn _projects_locations_registrations_retrieve_importable_domains(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_registrations_retrieve_importable_domains(opt.value_of("location").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_registrations_retrieve_register_parameters(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_registrations_retrieve_register_parameters(opt.value_of("location").unwrap_or("")); @@ -1865,14 +2013,16 @@ where "registration.pending-contact-settings.technical-contact.postal-address.revision" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.revision", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sorting-code" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sortingCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sublocality" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sublocality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "registration.register-failure-reason" => Some(("registration.registerFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.state" => Some(("registration.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.supported-privacy" => Some(("registration.supportedPrivacy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "registration.transfer-failure-reason" => Some(("registration.transferFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "yearly-price.currency-code" => Some(("yearlyPrice.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "yearly-price.nanos" => Some(("yearlyPrice.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "yearly-price.units" => Some(("yearlyPrice.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "authorization-code", "code", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-lock-state", "units", "validate-only", "yearly-price"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "authorization-code", "code", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "register-failure-reason", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-failure-reason", "transfer-lock-state", "units", "validate-only", "yearly-price"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1972,6 +2122,9 @@ where ("locations-registrations-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_registrations_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-registrations-import", Some(opt)) => { + call_result = self._projects_locations_registrations_import(opt, dry_run, &mut err).await; + }, ("locations-registrations-list", Some(opt)) => { call_result = self._projects_locations_registrations_list(opt, dry_run, &mut err).await; }, @@ -1987,6 +2140,9 @@ where ("locations-registrations-retrieve-authorization-code", Some(opt)) => { call_result = self._projects_locations_registrations_retrieve_authorization_code(opt, dry_run, &mut err).await; }, + ("locations-registrations-retrieve-importable-domains", Some(opt)) => { + call_result = self._projects_locations_registrations_retrieve_importable_domains(opt, dry_run, &mut err).await; + }, ("locations-registrations-retrieve-register-parameters", Some(opt)) => { call_result = self._projects_locations_registrations_retrieve_register_parameters(opt, dry_run, &mut err).await; }, @@ -2084,7 +2240,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-list', 'locations-operations-get', 'locations-operations-list', 'locations-registrations-configure-contact-settings', 'locations-registrations-configure-dns-settings', 'locations-registrations-configure-management-settings', 'locations-registrations-delete', 'locations-registrations-export', 'locations-registrations-get', 'locations-registrations-get-iam-policy', 'locations-registrations-list', 'locations-registrations-patch', 'locations-registrations-register', 'locations-registrations-reset-authorization-code', 'locations-registrations-retrieve-authorization-code', 'locations-registrations-retrieve-register-parameters', 'locations-registrations-retrieve-transfer-parameters', 'locations-registrations-search-domains', 'locations-registrations-set-iam-policy', 'locations-registrations-test-iam-permissions' and 'locations-registrations-transfer'", vec![ + ("projects", "methods: 'locations-get', 'locations-list', 'locations-operations-get', 'locations-operations-list', 'locations-registrations-configure-contact-settings', 'locations-registrations-configure-dns-settings', 'locations-registrations-configure-management-settings', 'locations-registrations-delete', 'locations-registrations-export', 'locations-registrations-get', 'locations-registrations-get-iam-policy', 'locations-registrations-import', 'locations-registrations-list', 'locations-registrations-patch', 'locations-registrations-register', 'locations-registrations-reset-authorization-code', 'locations-registrations-retrieve-authorization-code', 'locations-registrations-retrieve-importable-domains', 'locations-registrations-retrieve-register-parameters', 'locations-registrations-retrieve-transfer-parameters', 'locations-registrations-search-domains', 'locations-registrations-set-iam-policy', 'locations-registrations-test-iam-permissions' and 'locations-registrations-transfer'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_domains1_cli/projects_locations-get", @@ -2335,7 +2491,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2345,6 +2501,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-registrations-import", + Some(r##"Imports a domain name from [Google Domains](https://domains.google/) for use in Cloud Domains. To transfer a domain from another registrar, use the `TransferDomain` method instead. Since individual users can own domains in Google Domains, the calling user must have ownership permission on the domain."##), + "Details at http://byron.github.io/google-apis-rs/google_domains1_cli/projects_locations-registrations-import", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the Registration. Must be in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2473,6 +2657,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-registrations-retrieve-importable-domains", + Some(r##"Lists domain names from [Google Domains](https://domains.google/) that can be imported to Cloud Domains using the `ImportDomain` method. Since individual users can own domains in Google Domains, the list of domains returned depends on the individual user making the call. Domains already managed by Cloud Domains are not returned."##), + "Details at http://byron.github.io/google-apis-rs/google_domains1_cli/projects_locations-registrations-retrieve-importable-domains", + vec![ + (Some(r##"location"##), + None, + Some(r##"Required. The location. Must be in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2502,7 +2708,7 @@ async fn main() { Some(false)), ]), ("locations-registrations-retrieve-transfer-parameters", - Some(r##"Gets parameters needed to transfer a domain name from another registrar to Cloud Domains. For domains managed by Google Domains, transferring to Cloud Domains is not supported. Use the returned values to call `TransferDomain`."##), + Some(r##"Gets parameters needed to transfer a domain name from another registrar to Cloud Domains. For domains already managed by [Google Domains](https://domains.google/), use `ImportDomain` instead. Use the returned values to call `TransferDomain`."##), "Details at http://byron.github.io/google-apis-rs/google_domains1_cli/projects_locations-registrations-retrieve-transfer-parameters", vec![ (Some(r##"location"##), @@ -2551,7 +2757,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2579,7 +2785,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2602,7 +2808,7 @@ async fn main() { Some(false)), ]), ("locations-registrations-transfer", - Some(r##"Transfers a domain name from another registrar to Cloud Domains. For domains managed by Google Domains, transferring to Cloud Domains is not supported. Before calling this method, go to the domain's current registrar to unlock the domain for transfer and retrieve the domain's transfer authorization code. Then call `RetrieveTransferParameters` to confirm that the domain is unlocked and to get values needed to build a call to this method. A successful call creates a `Registration` resource in state `TRANSFER_PENDING`. It can take several days to complete the transfer process. The registrant can often speed up this process by approving the transfer through the current registrar, either by clicking a link in an email from the registrar or by visiting the registrar's website. A few minutes after transfer approval, the resource transitions to state `ACTIVE`, indicating that the transfer was successful. If the transfer is rejected or the request expires without being approved, the resource can end up in state `TRANSFER_FAILED`. If transfer fails, you can safely delete the resource and retry the transfer."##), + Some(r##"Transfers a domain name from another registrar to Cloud Domains. For domains already managed by [Google Domains](https://domains.google/), use `ImportDomain` instead. Before calling this method, go to the domain's current registrar to unlock the domain for transfer and retrieve the domain's transfer authorization code. Then call `RetrieveTransferParameters` to confirm that the domain is unlocked and to get values needed to build a call to this method. A successful call creates a `Registration` resource in state `TRANSFER_PENDING`. It can take several days to complete the transfer process. The registrant can often speed up this process by approving the transfer through the current registrar, either by clicking a link in an email from the registrar or by visiting the registrar's website. A few minutes after transfer approval, the resource transitions to state `ACTIVE`, indicating that the transfer was successful. If the transfer is rejected or the request expires without being approved, the resource can end up in state `TRANSFER_FAILED`. If transfer fails, you can safely delete the resource and retry the transfer."##), "Details at http://byron.github.io/google-apis-rs/google_domains1_cli/projects_locations-registrations-transfer", vec![ (Some(r##"parent"##), @@ -2635,7 +2841,7 @@ async fn main() { let mut app = App::new("domains1") .author("Sebastian Thiel ") - .version("4.0.1+20220128") + .version("5.0.2+20230105") .about("Enables management and configuration of domain names.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_domains1_cli") .arg(Arg::with_name("url") diff --git a/gen/domains1/Cargo.toml b/gen/domains1/Cargo.toml index 64476ada32..7632d29401 100644 --- a/gen/domains1/Cargo.toml +++ b/gen/domains1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-domains1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Domains (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/domains1" homepage = "https://cloud.google.com/domains/" -documentation = "https://docs.rs/google-domains1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-domains1/5.0.2+20230105" license = "MIT" keywords = ["domains", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/domains1/README.md b/gen/domains1/README.md index 39c96aad84..105aac2f2b 100644 --- a/gen/domains1/README.md +++ b/gen/domains1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-domains1` library allows access to all features of the *Google Cloud Domains* service. -This documentation was generated from *Cloud Domains* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *domains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Domains* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *domains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Domains* *v1* API can be found at the [official documentation site](https://cloud.google.com/domains/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/CloudDomains) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/CloudDomains) ... * projects - * [*locations get*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationOperationListCall), [*locations registrations configure contact settings*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationConfigureContactSettingCall), [*locations registrations configure dns settings*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationConfigureDnsSettingCall), [*locations registrations configure management settings*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationConfigureManagementSettingCall), [*locations registrations delete*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationDeleteCall), [*locations registrations export*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationExportCall), [*locations registrations get*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationGetCall), [*locations registrations get iam policy*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationGetIamPolicyCall), [*locations registrations import*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationImportCall), [*locations registrations list*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationListCall), [*locations registrations patch*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationPatchCall), [*locations registrations register*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationRegisterCall), [*locations registrations reset authorization code*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationResetAuthorizationCodeCall), [*locations registrations retrieve authorization code*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveAuthorizationCodeCall), [*locations registrations retrieve importable domains*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveImportableDomainCall), [*locations registrations retrieve register parameters*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveRegisterParameterCall), [*locations registrations retrieve transfer parameters*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveTransferParameterCall), [*locations registrations search domains*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationSearchDomainCall), [*locations registrations set iam policy*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationSetIamPolicyCall), [*locations registrations test iam permissions*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationTestIamPermissionCall) and [*locations registrations transfer*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/api::ProjectLocationRegistrationTransferCall) + * [*locations get*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationOperationListCall), [*locations registrations configure contact settings*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationConfigureContactSettingCall), [*locations registrations configure dns settings*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationConfigureDnsSettingCall), [*locations registrations configure management settings*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationConfigureManagementSettingCall), [*locations registrations delete*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationDeleteCall), [*locations registrations export*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationExportCall), [*locations registrations get*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationGetCall), [*locations registrations get iam policy*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationGetIamPolicyCall), [*locations registrations import*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationImportCall), [*locations registrations list*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationListCall), [*locations registrations patch*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationPatchCall), [*locations registrations register*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationRegisterCall), [*locations registrations reset authorization code*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationResetAuthorizationCodeCall), [*locations registrations retrieve authorization code*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveAuthorizationCodeCall), [*locations registrations retrieve importable domains*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveImportableDomainCall), [*locations registrations retrieve register parameters*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveRegisterParameterCall), [*locations registrations retrieve transfer parameters*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationRetrieveTransferParameterCall), [*locations registrations search domains*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationSearchDomainCall), [*locations registrations set iam policy*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationSetIamPolicyCall), [*locations registrations test iam permissions*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationTestIamPermissionCall) and [*locations registrations transfer*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/api::ProjectLocationRegistrationTransferCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/CloudDomains)** +* **[Hub](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/CloudDomains)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::CallBuilder) -* **[Resources](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::CallBuilder) +* **[Resources](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Part)** + * **[Parts](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Delegate) to the -[Method Builder](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Delegate) to the +[Method Builder](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::RequestValue) and -[decodable](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::RequestValue) and +[decodable](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-domains1/5.0.2-beta-1+20230105/google_domains1/client::RequestValue) are moved +* [request values](https://docs.rs/google-domains1/5.0.2+20230105/google_domains1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/domains1/src/api.rs b/gen/domains1/src/api.rs index ca84e2ce81..3fef7c9376 100644 --- a/gen/domains1/src/api.rs +++ b/gen/domains1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudDomains { CloudDomains { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://domains.googleapis.com/".to_string(), _root_url: "https://domains.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudDomains { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/domains1/src/client.rs b/gen/domains1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/domains1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/domains1/src/lib.rs b/gen/domains1/src/lib.rs index be656816f3..05ad89886c 100644 --- a/gen/domains1/src/lib.rs +++ b/gen/domains1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Domains* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *domains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Domains* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *domains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Domains* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/domains/). diff --git a/gen/domains1_beta1-cli/Cargo.toml b/gen/domains1_beta1-cli/Cargo.toml index e1533d956d..36104a5230 100644 --- a/gen/domains1_beta1-cli/Cargo.toml +++ b/gen/domains1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-domains1_beta1-cli" -version = "4.0.1+20220128" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Domains (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/domains1_beta1-cli" @@ -20,13 +20,13 @@ name = "domains1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-domains1_beta1] path = "../domains1_beta1" -version = "4.0.1+20220128" +version = "5.0.2+20230105" + diff --git a/gen/domains1_beta1-cli/README.md b/gen/domains1_beta1-cli/README.md index 4b82540922..6a7f82a9c0 100644 --- a/gen/domains1_beta1-cli/README.md +++ b/gen/domains1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Domains* API at revision *20220128*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Domains* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash domains1-beta1 [options] @@ -41,11 +41,13 @@ domains1-beta1 [options] locations-registrations-export (-r )... [-p ]... [-o ] locations-registrations-get [-p ]... [-o ] locations-registrations-get-iam-policy [-p ]... [-o ] + locations-registrations-import (-r )... [-p ]... [-o ] locations-registrations-list [-p ]... [-o ] locations-registrations-patch (-r )... [-p ]... [-o ] locations-registrations-register (-r )... [-p ]... [-o ] locations-registrations-reset-authorization-code (-r )... [-p ]... [-o ] locations-registrations-retrieve-authorization-code [-p ]... [-o ] + locations-registrations-retrieve-importable-domains [-p ]... [-o ] locations-registrations-retrieve-register-parameters [-p ]... [-o ] locations-registrations-retrieve-transfer-parameters [-p ]... [-o ] locations-registrations-search-domains [-p ]... [-o ] diff --git a/gen/domains1_beta1-cli/mkdocs.yml b/gen/domains1_beta1-cli/mkdocs.yml index bb30250470..b9a4edbc7d 100644 --- a/gen/domains1_beta1-cli/mkdocs.yml +++ b/gen/domains1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Domains v4.0.1+20220128 +site_name: Cloud Domains v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-domains1_beta1-cli site_description: A complete library to interact with Cloud Domains (protocol v1beta1) @@ -7,30 +7,33 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/domains1_beta1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-registrations-configure-contact-settings.md', 'Projects', 'Locations Registrations Configure Contact Settings'] -- ['projects_locations-registrations-configure-dns-settings.md', 'Projects', 'Locations Registrations Configure Dns Settings'] -- ['projects_locations-registrations-configure-management-settings.md', 'Projects', 'Locations Registrations Configure Management Settings'] -- ['projects_locations-registrations-delete.md', 'Projects', 'Locations Registrations Delete'] -- ['projects_locations-registrations-export.md', 'Projects', 'Locations Registrations Export'] -- ['projects_locations-registrations-get.md', 'Projects', 'Locations Registrations Get'] -- ['projects_locations-registrations-get-iam-policy.md', 'Projects', 'Locations Registrations Get Iam Policy'] -- ['projects_locations-registrations-list.md', 'Projects', 'Locations Registrations List'] -- ['projects_locations-registrations-patch.md', 'Projects', 'Locations Registrations Patch'] -- ['projects_locations-registrations-register.md', 'Projects', 'Locations Registrations Register'] -- ['projects_locations-registrations-reset-authorization-code.md', 'Projects', 'Locations Registrations Reset Authorization Code'] -- ['projects_locations-registrations-retrieve-authorization-code.md', 'Projects', 'Locations Registrations Retrieve Authorization Code'] -- ['projects_locations-registrations-retrieve-register-parameters.md', 'Projects', 'Locations Registrations Retrieve Register Parameters'] -- ['projects_locations-registrations-retrieve-transfer-parameters.md', 'Projects', 'Locations Registrations Retrieve Transfer Parameters'] -- ['projects_locations-registrations-search-domains.md', 'Projects', 'Locations Registrations Search Domains'] -- ['projects_locations-registrations-set-iam-policy.md', 'Projects', 'Locations Registrations Set Iam Policy'] -- ['projects_locations-registrations-test-iam-permissions.md', 'Projects', 'Locations Registrations Test Iam Permissions'] -- ['projects_locations-registrations-transfer.md', 'Projects', 'Locations Registrations Transfer'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Registrations Configure Contact Settings': 'projects_locations-registrations-configure-contact-settings.md' + - 'Locations Registrations Configure Dns Settings': 'projects_locations-registrations-configure-dns-settings.md' + - 'Locations Registrations Configure Management Settings': 'projects_locations-registrations-configure-management-settings.md' + - 'Locations Registrations Delete': 'projects_locations-registrations-delete.md' + - 'Locations Registrations Export': 'projects_locations-registrations-export.md' + - 'Locations Registrations Get': 'projects_locations-registrations-get.md' + - 'Locations Registrations Get Iam Policy': 'projects_locations-registrations-get-iam-policy.md' + - 'Locations Registrations Import': 'projects_locations-registrations-import.md' + - 'Locations Registrations List': 'projects_locations-registrations-list.md' + - 'Locations Registrations Patch': 'projects_locations-registrations-patch.md' + - 'Locations Registrations Register': 'projects_locations-registrations-register.md' + - 'Locations Registrations Reset Authorization Code': 'projects_locations-registrations-reset-authorization-code.md' + - 'Locations Registrations Retrieve Authorization Code': 'projects_locations-registrations-retrieve-authorization-code.md' + - 'Locations Registrations Retrieve Importable Domains': 'projects_locations-registrations-retrieve-importable-domains.md' + - 'Locations Registrations Retrieve Register Parameters': 'projects_locations-registrations-retrieve-register-parameters.md' + - 'Locations Registrations Retrieve Transfer Parameters': 'projects_locations-registrations-retrieve-transfer-parameters.md' + - 'Locations Registrations Search Domains': 'projects_locations-registrations-search-domains.md' + - 'Locations Registrations Set Iam Policy': 'projects_locations-registrations-set-iam-policy.md' + - 'Locations Registrations Test Iam Permissions': 'projects_locations-registrations-test-iam-permissions.md' + - 'Locations Registrations Transfer': 'projects_locations-registrations-transfer.md' theme: readthedocs diff --git a/gen/domains1_beta1-cli/src/client.rs b/gen/domains1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/domains1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/domains1_beta1-cli/src/main.rs b/gen/domains1_beta1-cli/src/main.rs index 3e6349483a..94451e8f69 100644 --- a/gen/domains1_beta1-cli/src/main.rs +++ b/gen/domains1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_domains1_beta1::{api, Error, oauth2}; +use google_domains1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -227,7 +226,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -780,7 +779,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -829,6 +828,92 @@ where } } + async fn _projects_locations_registrations_import(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "domain-name" => Some(("domainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["domain-name", "labels"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ImportDomainRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_registrations_import(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_registrations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_registrations_list(opt.value_of("parent").unwrap_or("")); @@ -839,7 +924,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1011,10 +1096,12 @@ where "pending-contact-settings.technical-contact.postal-address.revision" => Some(("pendingContactSettings.technicalContact.postalAddress.revision", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "pending-contact-settings.technical-contact.postal-address.sorting-code" => Some(("pendingContactSettings.technicalContact.postalAddress.sortingCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pending-contact-settings.technical-contact.postal-address.sublocality" => Some(("pendingContactSettings.technicalContact.postalAddress.sublocality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "register-failure-reason" => Some(("registerFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "supported-privacy" => Some(("supportedPrivacy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "transfer-failure-reason" => Some(("transferFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-settings", "create-time", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "registrant-contact", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-lock-state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-settings", "create-time", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "register-failure-reason", "registrant-contact", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-failure-reason", "transfer-lock-state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1029,7 +1116,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1200,14 +1287,16 @@ where "registration.pending-contact-settings.technical-contact.postal-address.revision" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.revision", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sorting-code" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sortingCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sublocality" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sublocality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "registration.register-failure-reason" => Some(("registration.registerFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.state" => Some(("registration.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.supported-privacy" => Some(("registration.supportedPrivacy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "registration.transfer-failure-reason" => Some(("registration.transferFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "yearly-price.currency-code" => Some(("yearlyPrice.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "yearly-price.nanos" => Some(("yearlyPrice.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "yearly-price.units" => Some(("yearlyPrice.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "domain-notices", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-lock-state", "units", "validate-only", "yearly-price"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "domain-notices", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "register-failure-reason", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-failure-reason", "transfer-lock-state", "units", "validate-only", "yearly-price"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1403,6 +1492,65 @@ where } } + async fn _projects_locations_registrations_retrieve_importable_domains(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_registrations_retrieve_importable_domains(opt.value_of("location").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_registrations_retrieve_register_parameters(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_registrations_retrieve_register_parameters(opt.value_of("location").unwrap_or("")); @@ -1865,14 +2013,16 @@ where "registration.pending-contact-settings.technical-contact.postal-address.revision" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.revision", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sorting-code" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sortingCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.pending-contact-settings.technical-contact.postal-address.sublocality" => Some(("registration.pendingContactSettings.technicalContact.postalAddress.sublocality", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "registration.register-failure-reason" => Some(("registration.registerFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.state" => Some(("registration.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "registration.supported-privacy" => Some(("registration.supportedPrivacy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "registration.transfer-failure-reason" => Some(("registration.transferFailureReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "yearly-price.currency-code" => Some(("yearlyPrice.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "yearly-price.nanos" => Some(("yearlyPrice.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "yearly-price.units" => Some(("yearlyPrice.units", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "authorization-code", "code", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-lock-state", "units", "validate-only", "yearly-price"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address-lines", "admin-contact", "administrative-area", "authorization-code", "code", "contact-notices", "contact-settings", "create-time", "currency-code", "custom-dns", "dns-settings", "domain-name", "ds-state", "email", "expire-time", "fax-number", "google-domains-dns", "issues", "labels", "language-code", "locality", "management-settings", "name", "name-servers", "nanos", "organization", "pending-contact-settings", "phone-number", "postal-address", "postal-code", "privacy", "recipients", "region-code", "register-failure-reason", "registrant-contact", "registration", "renewal-method", "revision", "sorting-code", "state", "sublocality", "supported-privacy", "technical-contact", "transfer-failure-reason", "transfer-lock-state", "units", "validate-only", "yearly-price"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1972,6 +2122,9 @@ where ("locations-registrations-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_registrations_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-registrations-import", Some(opt)) => { + call_result = self._projects_locations_registrations_import(opt, dry_run, &mut err).await; + }, ("locations-registrations-list", Some(opt)) => { call_result = self._projects_locations_registrations_list(opt, dry_run, &mut err).await; }, @@ -1987,6 +2140,9 @@ where ("locations-registrations-retrieve-authorization-code", Some(opt)) => { call_result = self._projects_locations_registrations_retrieve_authorization_code(opt, dry_run, &mut err).await; }, + ("locations-registrations-retrieve-importable-domains", Some(opt)) => { + call_result = self._projects_locations_registrations_retrieve_importable_domains(opt, dry_run, &mut err).await; + }, ("locations-registrations-retrieve-register-parameters", Some(opt)) => { call_result = self._projects_locations_registrations_retrieve_register_parameters(opt, dry_run, &mut err).await; }, @@ -2084,7 +2240,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-list', 'locations-operations-get', 'locations-operations-list', 'locations-registrations-configure-contact-settings', 'locations-registrations-configure-dns-settings', 'locations-registrations-configure-management-settings', 'locations-registrations-delete', 'locations-registrations-export', 'locations-registrations-get', 'locations-registrations-get-iam-policy', 'locations-registrations-list', 'locations-registrations-patch', 'locations-registrations-register', 'locations-registrations-reset-authorization-code', 'locations-registrations-retrieve-authorization-code', 'locations-registrations-retrieve-register-parameters', 'locations-registrations-retrieve-transfer-parameters', 'locations-registrations-search-domains', 'locations-registrations-set-iam-policy', 'locations-registrations-test-iam-permissions' and 'locations-registrations-transfer'", vec![ + ("projects", "methods: 'locations-get', 'locations-list', 'locations-operations-get', 'locations-operations-list', 'locations-registrations-configure-contact-settings', 'locations-registrations-configure-dns-settings', 'locations-registrations-configure-management-settings', 'locations-registrations-delete', 'locations-registrations-export', 'locations-registrations-get', 'locations-registrations-get-iam-policy', 'locations-registrations-import', 'locations-registrations-list', 'locations-registrations-patch', 'locations-registrations-register', 'locations-registrations-reset-authorization-code', 'locations-registrations-retrieve-authorization-code', 'locations-registrations-retrieve-importable-domains', 'locations-registrations-retrieve-register-parameters', 'locations-registrations-retrieve-transfer-parameters', 'locations-registrations-search-domains', 'locations-registrations-set-iam-policy', 'locations-registrations-test-iam-permissions' and 'locations-registrations-transfer'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_domains1_beta1_cli/projects_locations-get", @@ -2335,7 +2491,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2345,6 +2501,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-registrations-import", + Some(r##"Imports a domain name from [Google Domains](https://domains.google/) for use in Cloud Domains. To transfer a domain from another registrar, use the `TransferDomain` method instead. Since individual users can own domains in Google Domains, the calling user must have ownership permission on the domain."##), + "Details at http://byron.github.io/google-apis-rs/google_domains1_beta1_cli/projects_locations-registrations-import", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the Registration. Must be in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2473,6 +2657,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-registrations-retrieve-importable-domains", + Some(r##"Lists domain names from [Google Domains](https://domains.google/) that can be imported to Cloud Domains using the `ImportDomain` method. Since individual users can own domains in Google Domains, the list of domains returned depends on the individual user making the call. Domains already managed by Cloud Domains are not returned."##), + "Details at http://byron.github.io/google-apis-rs/google_domains1_beta1_cli/projects_locations-registrations-retrieve-importable-domains", + vec![ + (Some(r##"location"##), + None, + Some(r##"Required. The location. Must be in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2502,7 +2708,7 @@ async fn main() { Some(false)), ]), ("locations-registrations-retrieve-transfer-parameters", - Some(r##"Gets parameters needed to transfer a domain name from another registrar to Cloud Domains. For domains managed by Google Domains, transferring to Cloud Domains is not supported. Use the returned values to call `TransferDomain`."##), + Some(r##"Gets parameters needed to transfer a domain name from another registrar to Cloud Domains. For domains already managed by [Google Domains](https://domains.google/), use `ImportDomain` instead. Use the returned values to call `TransferDomain`."##), "Details at http://byron.github.io/google-apis-rs/google_domains1_beta1_cli/projects_locations-registrations-retrieve-transfer-parameters", vec![ (Some(r##"location"##), @@ -2551,7 +2757,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2579,7 +2785,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2602,7 +2808,7 @@ async fn main() { Some(false)), ]), ("locations-registrations-transfer", - Some(r##"Transfers a domain name from another registrar to Cloud Domains. For domains managed by Google Domains, transferring to Cloud Domains is not supported. Before calling this method, go to the domain's current registrar to unlock the domain for transfer and retrieve the domain's transfer authorization code. Then call `RetrieveTransferParameters` to confirm that the domain is unlocked and to get values needed to build a call to this method. A successful call creates a `Registration` resource in state `TRANSFER_PENDING`. It can take several days to complete the transfer process. The registrant can often speed up this process by approving the transfer through the current registrar, either by clicking a link in an email from the registrar or by visiting the registrar's website. A few minutes after transfer approval, the resource transitions to state `ACTIVE`, indicating that the transfer was successful. If the transfer is rejected or the request expires without being approved, the resource can end up in state `TRANSFER_FAILED`. If transfer fails, you can safely delete the resource and retry the transfer."##), + Some(r##"Transfers a domain name from another registrar to Cloud Domains. For domains already managed by [Google Domains](https://domains.google/), use `ImportDomain` instead. Before calling this method, go to the domain's current registrar to unlock the domain for transfer and retrieve the domain's transfer authorization code. Then call `RetrieveTransferParameters` to confirm that the domain is unlocked and to get values needed to build a call to this method. A successful call creates a `Registration` resource in state `TRANSFER_PENDING`. It can take several days to complete the transfer process. The registrant can often speed up this process by approving the transfer through the current registrar, either by clicking a link in an email from the registrar or by visiting the registrar's website. A few minutes after transfer approval, the resource transitions to state `ACTIVE`, indicating that the transfer was successful. If the transfer is rejected or the request expires without being approved, the resource can end up in state `TRANSFER_FAILED`. If transfer fails, you can safely delete the resource and retry the transfer."##), "Details at http://byron.github.io/google-apis-rs/google_domains1_beta1_cli/projects_locations-registrations-transfer", vec![ (Some(r##"parent"##), @@ -2635,7 +2841,7 @@ async fn main() { let mut app = App::new("domains1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220128") + .version("5.0.2+20230105") .about("Enables management and configuration of domain names.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_domains1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/domains1_beta1/Cargo.toml b/gen/domains1_beta1/Cargo.toml index a16576a229..db5acb3e0a 100644 --- a/gen/domains1_beta1/Cargo.toml +++ b/gen/domains1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-domains1_beta1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Domains (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/domains1_beta1" homepage = "https://cloud.google.com/domains/" -documentation = "https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-domains1_beta1/5.0.2+20230105" license = "MIT" keywords = ["domains", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/domains1_beta1/README.md b/gen/domains1_beta1/README.md index 93599ba4b4..2fd4ad13a5 100644 --- a/gen/domains1_beta1/README.md +++ b/gen/domains1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-domains1_beta1` library allows access to all features of the *Google Cloud Domains* service. -This documentation was generated from *Cloud Domains* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *domains:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Domains* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *domains:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Domains* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/domains/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/CloudDomains) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/CloudDomains) ... * projects - * [*locations get*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationOperationListCall), [*locations registrations configure contact settings*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationConfigureContactSettingCall), [*locations registrations configure dns settings*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationConfigureDnsSettingCall), [*locations registrations configure management settings*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationConfigureManagementSettingCall), [*locations registrations delete*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationDeleteCall), [*locations registrations export*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationExportCall), [*locations registrations get*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationGetCall), [*locations registrations get iam policy*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationGetIamPolicyCall), [*locations registrations import*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationImportCall), [*locations registrations list*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationListCall), [*locations registrations patch*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationPatchCall), [*locations registrations register*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRegisterCall), [*locations registrations reset authorization code*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationResetAuthorizationCodeCall), [*locations registrations retrieve authorization code*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveAuthorizationCodeCall), [*locations registrations retrieve importable domains*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveImportableDomainCall), [*locations registrations retrieve register parameters*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveRegisterParameterCall), [*locations registrations retrieve transfer parameters*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveTransferParameterCall), [*locations registrations search domains*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationSearchDomainCall), [*locations registrations set iam policy*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationSetIamPolicyCall), [*locations registrations test iam permissions*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationTestIamPermissionCall) and [*locations registrations transfer*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/api::ProjectLocationRegistrationTransferCall) + * [*locations get*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationListCall), [*locations operations get*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationOperationListCall), [*locations registrations configure contact settings*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationConfigureContactSettingCall), [*locations registrations configure dns settings*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationConfigureDnsSettingCall), [*locations registrations configure management settings*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationConfigureManagementSettingCall), [*locations registrations delete*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationDeleteCall), [*locations registrations export*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationExportCall), [*locations registrations get*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationGetCall), [*locations registrations get iam policy*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationGetIamPolicyCall), [*locations registrations import*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationImportCall), [*locations registrations list*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationListCall), [*locations registrations patch*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationPatchCall), [*locations registrations register*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRegisterCall), [*locations registrations reset authorization code*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationResetAuthorizationCodeCall), [*locations registrations retrieve authorization code*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveAuthorizationCodeCall), [*locations registrations retrieve importable domains*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveImportableDomainCall), [*locations registrations retrieve register parameters*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveRegisterParameterCall), [*locations registrations retrieve transfer parameters*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationRetrieveTransferParameterCall), [*locations registrations search domains*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationSearchDomainCall), [*locations registrations set iam policy*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationSetIamPolicyCall), [*locations registrations test iam permissions*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationTestIamPermissionCall) and [*locations registrations transfer*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/api::ProjectLocationRegistrationTransferCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/CloudDomains)** +* **[Hub](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/CloudDomains)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-domains1_beta1/5.0.2-beta-1+20230105/google_domains1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-domains1_beta1/5.0.2+20230105/google_domains1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/domains1_beta1/src/api.rs b/gen/domains1_beta1/src/api.rs index 089735201a..5960b0a7e0 100644 --- a/gen/domains1_beta1/src/api.rs +++ b/gen/domains1_beta1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudDomains { CloudDomains { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://domains.googleapis.com/".to_string(), _root_url: "https://domains.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudDomains { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/domains1_beta1/src/client.rs b/gen/domains1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/domains1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/domains1_beta1/src/lib.rs b/gen/domains1_beta1/src/lib.rs index 37ca6e2cda..9c10165ea8 100644 --- a/gen/domains1_beta1/src/lib.rs +++ b/gen/domains1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Domains* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *domains:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Domains* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *domains:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Domains* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/domains/). diff --git a/gen/domainsrdap1-cli/Cargo.toml b/gen/domainsrdap1-cli/Cargo.toml index c447e05d17..cc373aee99 100644 --- a/gen/domainsrdap1-cli/Cargo.toml +++ b/gen/domainsrdap1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-domainsrdap1-cli" -version = "4.0.1+20220307" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Domains RDAP (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/domainsrdap1-cli" @@ -20,13 +20,13 @@ name = "domainsrdap1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-domainsrdap1] path = "../domainsrdap1" -version = "4.0.1+20220307" +version = "5.0.2+20230124" + diff --git a/gen/domainsrdap1-cli/README.md b/gen/domainsrdap1-cli/README.md index be95958308..2c9a62e336 100644 --- a/gen/domainsrdap1-cli/README.md +++ b/gen/domainsrdap1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Domains RDAP* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *Domains RDAP* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash domainsrdap1 [options] diff --git a/gen/domainsrdap1-cli/mkdocs.yml b/gen/domainsrdap1-cli/mkdocs.yml index db66306e8e..bf756c5f3a 100644 --- a/gen/domainsrdap1-cli/mkdocs.yml +++ b/gen/domainsrdap1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Domains RDAP v4.0.1+20220307 +site_name: Domains RDAP v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-domainsrdap1-cli site_description: A complete library to interact with Domains RDAP (protocol v1) @@ -7,18 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/domainsrdap1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['autnum_get.md', 'Autnum', 'Get'] -- ['domain_get.md', 'Domain', 'Get'] -- ['entity_get.md', 'Entity', 'Get'] -- ['ip_get.md', 'Ip', 'Get'] -- ['methods_get-domains.md', 'Methods', 'Get Domains'] -- ['methods_get-entities.md', 'Methods', 'Get Entities'] -- ['methods_get-help.md', 'Methods', 'Get Help'] -- ['methods_get-ip.md', 'Methods', 'Get Ip'] -- ['methods_get-nameservers.md', 'Methods', 'Get Nameservers'] -- ['nameserver_get.md', 'Nameserver', 'Get'] +nav: +- Home: 'index.md' +- 'Autnum': + - 'Get': 'autnum_get.md' +- 'Domain': + - 'Get': 'domain_get.md' +- 'Entity': + - 'Get': 'entity_get.md' +- 'Ip': + - 'Get': 'ip_get.md' +- 'Methods': + - 'Get Domains': 'methods_get-domains.md' + - 'Get Entities': 'methods_get-entities.md' + - 'Get Help': 'methods_get-help.md' + - 'Get Ip': 'methods_get-ip.md' + - 'Get Nameservers': 'methods_get-nameservers.md' +- 'Nameserver': + - 'Get': 'nameserver_get.md' theme: readthedocs diff --git a/gen/domainsrdap1-cli/src/client.rs b/gen/domainsrdap1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/domainsrdap1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/domainsrdap1-cli/src/main.rs b/gen/domainsrdap1-cli/src/main.rs index c8376c1a74..abdb18aa12 100644 --- a/gen/domainsrdap1-cli/src/main.rs +++ b/gen/domainsrdap1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_domainsrdap1::{api, Error, oauth2}; +use google_domainsrdap1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -915,7 +914,7 @@ async fn main() { let mut app = App::new("domainsrdap1") .author("Sebastian Thiel ") - .version("4.0.1+20220307") + .version("5.0.2+20230124") .about("Read-only public API that lets users search for information about domain names.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_domainsrdap1_cli") .arg(Arg::with_name("folder") diff --git a/gen/domainsrdap1/Cargo.toml b/gen/domainsrdap1/Cargo.toml index 2209deb737..2c1166dad4 100644 --- a/gen/domainsrdap1/Cargo.toml +++ b/gen/domainsrdap1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-domainsrdap1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Domains RDAP (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/domainsrdap1" homepage = "https://developers.google.com/domains/rdap/" -documentation = "https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-domainsrdap1/5.0.2+20230124" license = "MIT" keywords = ["domainsrdap", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/domainsrdap1/README.md b/gen/domainsrdap1/README.md index ae2fc0d2ea..9cd313886f 100644 --- a/gen/domainsrdap1/README.md +++ b/gen/domainsrdap1/README.md @@ -5,32 +5,32 @@ DO NOT EDIT ! --> The `google-domainsrdap1` library allows access to all features of the *Google Domains RDAP* service. -This documentation was generated from *Domains RDAP* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *domainsrdap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Domains RDAP* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *domainsrdap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Domains RDAP* *v1* API can be found at the [official documentation site](https://developers.google.com/domains/rdap/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/DomainsRDAP) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/DomainsRDAP) ... * autnum - * [*get*](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::AutnumGetCall) + * [*get*](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::AutnumGetCall) * domain - * [*get*](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::DomainGetCall) + * [*get*](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::DomainGetCall) * entity - * [*get*](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::EntityGetCall) + * [*get*](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::EntityGetCall) * ip - * [*get*](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::IpGetCall) + * [*get*](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::IpGetCall) * nameserver - * [*get*](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::NameserverGetCall) + * [*get*](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::NameserverGetCall) Other activities are ... -* [get domains](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::MethodGetDomainCall) -* [get entities](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::MethodGetEntityCall) -* [get help](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::MethodGetHelpCall) -* [get ip](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::MethodGetIpCall) -* [get nameservers](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/api::MethodGetNameserverCall) +* [get domains](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::MethodGetDomainCall) +* [get entities](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::MethodGetEntityCall) +* [get help](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::MethodGetHelpCall) +* [get ip](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::MethodGetIpCall) +* [get nameservers](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/api::MethodGetNameserverCall) @@ -38,17 +38,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/DomainsRDAP)** +* **[Hub](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/DomainsRDAP)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::CallBuilder) -* **[Resources](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::CallBuilder) +* **[Resources](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Part)** + * **[Parts](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Delegate) to the -[Method Builder](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Delegate) to the +[Method Builder](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::RequestValue) and -[decodable](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::RequestValue) and +[decodable](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-domainsrdap1/5.0.2-beta-1+20230124/google_domainsrdap1/client::RequestValue) are moved +* [request values](https://docs.rs/google-domainsrdap1/5.0.2+20230124/google_domainsrdap1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/domainsrdap1/src/api.rs b/gen/domainsrdap1/src/api.rs index bca7f3dcc9..a598286f2d 100644 --- a/gen/domainsrdap1/src/api.rs +++ b/gen/domainsrdap1/src/api.rs @@ -97,7 +97,7 @@ impl<'a, S> DomainsRDAP { DomainsRDAP { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://domainsrdap.googleapis.com/".to_string(), _root_url: "https://domainsrdap.googleapis.com/".to_string(), } @@ -123,7 +123,7 @@ impl<'a, S> DomainsRDAP { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/domainsrdap1/src/client.rs b/gen/domainsrdap1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/domainsrdap1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/domainsrdap1/src/lib.rs b/gen/domainsrdap1/src/lib.rs index b63f19ad0a..54431512d1 100644 --- a/gen/domainsrdap1/src/lib.rs +++ b/gen/domainsrdap1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Domains RDAP* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *domainsrdap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Domains RDAP* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *domainsrdap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Domains RDAP* *v1* API can be found at the //! [official documentation site](https://developers.google.com/domains/rdap/). diff --git a/gen/doubleclickbidmanager1-cli/Cargo.toml b/gen/doubleclickbidmanager1-cli/Cargo.toml index 503a375432..4fb91d1a46 100644 --- a/gen/doubleclickbidmanager1-cli/Cargo.toml +++ b/gen/doubleclickbidmanager1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-doubleclickbidmanager1-cli" -version = "4.0.1+20210323" +version = "5.0.2+20210323" authors = ["Sebastian Thiel "] description = "A complete library to interact with DoubleClick Bid Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclickbidmanager1-cli" @@ -20,13 +20,13 @@ name = "doubleclickbidmanager1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-doubleclickbidmanager1] path = "../doubleclickbidmanager1" -version = "4.0.1+20210323" +version = "5.0.2+20210323" + diff --git a/gen/doubleclickbidmanager1-cli/README.md b/gen/doubleclickbidmanager1-cli/README.md index cd7366d062..d6b6c14e48 100644 --- a/gen/doubleclickbidmanager1-cli/README.md +++ b/gen/doubleclickbidmanager1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *DoubleClick Bid Manager* API at revision *20210323*. The CLI is at version *4.0.1*. +This documentation was generated from the *DoubleClick Bid Manager* API at revision *20210323*. The CLI is at version *5.0.2*. ```bash doubleclickbidmanager1 [options] diff --git a/gen/doubleclickbidmanager1-cli/mkdocs.yml b/gen/doubleclickbidmanager1-cli/mkdocs.yml index 7e456f0c6c..3dcf35e727 100644 --- a/gen/doubleclickbidmanager1-cli/mkdocs.yml +++ b/gen/doubleclickbidmanager1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: DoubleClick Bid Manager v4.0.1+20210323 +site_name: DoubleClick Bid Manager v5.0.2+20210323 site_url: http://byron.github.io/google-apis-rs/google-doubleclickbidmanager1-cli site_description: A complete library to interact with DoubleClick Bid Manager (protocol v1) @@ -7,17 +7,21 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclickbidma docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['lineitems_downloadlineitems.md', 'Lineitems', 'Downloadlineitems'] -- ['lineitems_uploadlineitems.md', 'Lineitems', 'Uploadlineitems'] -- ['queries_createquery.md', 'Queries', 'Createquery'] -- ['queries_deletequery.md', 'Queries', 'Deletequery'] -- ['queries_getquery.md', 'Queries', 'Getquery'] -- ['queries_listqueries.md', 'Queries', 'Listqueries'] -- ['queries_runquery.md', 'Queries', 'Runquery'] -- ['reports_listreports.md', 'Reports', 'Listreports'] -- ['sdf_download.md', 'Sdf', 'Download'] +nav: +- Home: 'index.md' +- 'Lineitems': + - 'Downloadlineitems': 'lineitems_downloadlineitems.md' + - 'Uploadlineitems': 'lineitems_uploadlineitems.md' +- 'Queries': + - 'Createquery': 'queries_createquery.md' + - 'Deletequery': 'queries_deletequery.md' + - 'Getquery': 'queries_getquery.md' + - 'Listqueries': 'queries_listqueries.md' + - 'Runquery': 'queries_runquery.md' +- 'Reports': + - 'Listreports': 'reports_listreports.md' +- 'Sdf': + - 'Download': 'sdf_download.md' theme: readthedocs diff --git a/gen/doubleclickbidmanager1-cli/src/client.rs b/gen/doubleclickbidmanager1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/doubleclickbidmanager1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/doubleclickbidmanager1-cli/src/main.rs b/gen/doubleclickbidmanager1-cli/src/main.rs index fdf246ae96..8aeb56208c 100644 --- a/gen/doubleclickbidmanager1-cli/src/main.rs +++ b/gen/doubleclickbidmanager1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_doubleclickbidmanager1::{api, Error, oauth2}; +use google_doubleclickbidmanager1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -1041,7 +1040,7 @@ async fn main() { let mut app = App::new("doubleclickbidmanager1") .author("Sebastian Thiel ") - .version("4.0.1+20210323") + .version("5.0.2+20210323") .about("DoubleClick Bid Manager API allows users to manage and create campaigns and reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_doubleclickbidmanager1_cli") .arg(Arg::with_name("url") diff --git a/gen/doubleclickbidmanager1/Cargo.toml b/gen/doubleclickbidmanager1/Cargo.toml index fc861ce265..2ff1549382 100644 --- a/gen/doubleclickbidmanager1/Cargo.toml +++ b/gen/doubleclickbidmanager1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-doubleclickbidmanager1" -version = "5.0.2-beta-1+20210323" +version = "5.0.2+20210323" authors = ["Sebastian Thiel "] description = "A complete library to interact with DoubleClick Bid Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclickbidmanager1" homepage = "https://developers.google.com/bid-manager/" -documentation = "https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323" +documentation = "https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323" license = "MIT" keywords = ["doubleclickbidmanage", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/doubleclickbidmanager1/README.md b/gen/doubleclickbidmanager1/README.md index 2c510e766d..abe182ba97 100644 --- a/gen/doubleclickbidmanager1/README.md +++ b/gen/doubleclickbidmanager1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-doubleclickbidmanager1` library allows access to all features of the *Google DoubleClick Bid Manager* service. -This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2-beta-1+20210323*, where *20210323* is the exact revision of the *doubleclickbidmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2+20210323*, where *20210323* is the exact revision of the *doubleclickbidmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *DoubleClick Bid Manager* *v1* API can be found at the [official documentation site](https://developers.google.com/bid-manager/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/DoubleClickBidManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/DoubleClickBidManager) ... * lineitems - * [*downloadlineitems*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::LineitemDownloadlineitemCall) and [*uploadlineitems*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::LineitemUploadlineitemCall) -* [queries](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::Query) - * [*createquery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::QueryCreatequeryCall), [*deletequery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::QueryDeletequeryCall), [*getquery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::QueryGetqueryCall), [*listqueries*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::QueryListqueryCall) and [*runquery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::QueryRunqueryCall) -* [reports](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::Report) - * [*listreports*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::ReportListreportCall) + * [*downloadlineitems*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::LineitemDownloadlineitemCall) and [*uploadlineitems*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::LineitemUploadlineitemCall) +* [queries](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::Query) + * [*createquery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::QueryCreatequeryCall), [*deletequery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::QueryDeletequeryCall), [*getquery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::QueryGetqueryCall), [*listqueries*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::QueryListqueryCall) and [*runquery*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::QueryRunqueryCall) +* [reports](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::Report) + * [*listreports*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::ReportListreportCall) * sdf - * [*download*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/api::SdfDownloadCall) + * [*download*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/api::SdfDownloadCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/DoubleClickBidManager)** +* **[Hub](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/DoubleClickBidManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::CallBuilder) -* **[Resources](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::CallBuilder) +* **[Resources](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Part)** + * **[Parts](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Delegate) to the -[Method Builder](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Delegate) to the +[Method Builder](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::RequestValue) and -[decodable](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::RequestValue) and +[decodable](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-doubleclickbidmanager1/5.0.2-beta-1+20210323/google_doubleclickbidmanager1/client::RequestValue) are moved +* [request values](https://docs.rs/google-doubleclickbidmanager1/5.0.2+20210323/google_doubleclickbidmanager1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/doubleclickbidmanager1/src/api.rs b/gen/doubleclickbidmanager1/src/api.rs index 40c9b4173a..39390c774e 100644 --- a/gen/doubleclickbidmanager1/src/api.rs +++ b/gen/doubleclickbidmanager1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> DoubleClickBidManager { DoubleClickBidManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://doubleclickbidmanager.googleapis.com/doubleclickbidmanager/v1/".to_string(), _root_url: "https://doubleclickbidmanager.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> DoubleClickBidManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/doubleclickbidmanager1/src/client.rs b/gen/doubleclickbidmanager1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/doubleclickbidmanager1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/doubleclickbidmanager1/src/lib.rs b/gen/doubleclickbidmanager1/src/lib.rs index a019046fdd..73bcd95198 100644 --- a/gen/doubleclickbidmanager1/src/lib.rs +++ b/gen/doubleclickbidmanager1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2-beta-1+20210323*, where *20210323* is the exact revision of the *doubleclickbidmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2+20210323*, where *20210323* is the exact revision of the *doubleclickbidmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *DoubleClick Bid Manager* *v1* API can be found at the //! [official documentation site](https://developers.google.com/bid-manager/). diff --git a/gen/doubleclickbidmanager1d1-cli/Cargo.toml b/gen/doubleclickbidmanager1d1-cli/Cargo.toml index 07e60699b5..4588fa5420 100644 --- a/gen/doubleclickbidmanager1d1-cli/Cargo.toml +++ b/gen/doubleclickbidmanager1d1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-doubleclickbidmanager1d1-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with DoubleClick Bid Manager (protocol v1.1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclickbidmanager1d1-cli" @@ -20,13 +20,13 @@ name = "doubleclickbidmanager1d1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-doubleclickbidmanager1d1] path = "../doubleclickbidmanager1d1" -version = "4.0.1+20220302" +version = "5.0.2+20230117" + diff --git a/gen/doubleclickbidmanager1d1-cli/README.md b/gen/doubleclickbidmanager1d1-cli/README.md index 297893842a..694f7261ab 100644 --- a/gen/doubleclickbidmanager1d1-cli/README.md +++ b/gen/doubleclickbidmanager1d1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *DoubleClick Bid Manager* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *DoubleClick Bid Manager* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash doubleclickbidmanager1d1 [options] diff --git a/gen/doubleclickbidmanager1d1-cli/mkdocs.yml b/gen/doubleclickbidmanager1d1-cli/mkdocs.yml index 034b9deead..dee9b13fd5 100644 --- a/gen/doubleclickbidmanager1d1-cli/mkdocs.yml +++ b/gen/doubleclickbidmanager1d1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: DoubleClick Bid Manager v4.0.1+20220302 +site_name: DoubleClick Bid Manager v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-doubleclickbidmanager1d1-cli site_description: A complete library to interact with DoubleClick Bid Manager (protocol v1.1) @@ -7,14 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclickbidma docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['queries_createquery.md', 'Queries', 'Createquery'] -- ['queries_deletequery.md', 'Queries', 'Deletequery'] -- ['queries_getquery.md', 'Queries', 'Getquery'] -- ['queries_listqueries.md', 'Queries', 'Listqueries'] -- ['queries_runquery.md', 'Queries', 'Runquery'] -- ['reports_listreports.md', 'Reports', 'Listreports'] +nav: +- Home: 'index.md' +- 'Queries': + - 'Createquery': 'queries_createquery.md' + - 'Deletequery': 'queries_deletequery.md' + - 'Getquery': 'queries_getquery.md' + - 'Listqueries': 'queries_listqueries.md' + - 'Runquery': 'queries_runquery.md' +- 'Reports': + - 'Listreports': 'reports_listreports.md' theme: readthedocs diff --git a/gen/doubleclickbidmanager1d1-cli/src/client.rs b/gen/doubleclickbidmanager1d1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/doubleclickbidmanager1d1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/doubleclickbidmanager1d1-cli/src/main.rs b/gen/doubleclickbidmanager1d1-cli/src/main.rs index 6d3dea41d0..06127d2e70 100644 --- a/gen/doubleclickbidmanager1d1-cli/src/main.rs +++ b/gen/doubleclickbidmanager1d1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_doubleclickbidmanager1d1::{api, Error, oauth2}; +use google_doubleclickbidmanager1d1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -118,7 +117,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "asynchronous" => { - call = call.asynchronous(arg_from_str(value.unwrap_or("false"), err, "asynchronous", "boolean")); + call = call.asynchronous( value.map(|v| arg_from_str(v, err, "asynchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -273,7 +272,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -365,7 +364,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "asynchronous" => { - call = call.asynchronous(arg_from_str(value.unwrap_or("false"), err, "asynchronous", "boolean")); + call = call.asynchronous( value.map(|v| arg_from_str(v, err, "asynchronous", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -416,7 +415,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -707,7 +706,7 @@ async fn main() { let mut app = App::new("doubleclickbidmanager1d1") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230117") .about("DoubleClick Bid Manager API allows users to manage and create campaigns and reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_doubleclickbidmanager1d1_cli") .arg(Arg::with_name("url") diff --git a/gen/doubleclickbidmanager1d1/Cargo.toml b/gen/doubleclickbidmanager1d1/Cargo.toml index dd563d6dde..4c8ca0cc30 100644 --- a/gen/doubleclickbidmanager1d1/Cargo.toml +++ b/gen/doubleclickbidmanager1d1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-doubleclickbidmanager1d1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with DoubleClick Bid Manager (protocol v1.1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclickbidmanager1d1" homepage = "https://developers.google.com/bid-manager/" -documentation = "https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117" license = "MIT" keywords = ["doubleclickbidmanage", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/doubleclickbidmanager1d1/README.md b/gen/doubleclickbidmanager1d1/README.md index 2ffd409adf..49b6274881 100644 --- a/gen/doubleclickbidmanager1d1/README.md +++ b/gen/doubleclickbidmanager1d1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-doubleclickbidmanager1d1` library allows access to all features of the *Google DoubleClick Bid Manager* service. -This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *doubleclickbidmanager:v1.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *doubleclickbidmanager:v1.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *DoubleClick Bid Manager* *v1d1* API can be found at the [official documentation site](https://developers.google.com/bid-manager/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/DoubleClickBidManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/DoubleClickBidManager) ... -* [queries](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::Query) - * [*createquery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::QueryCreatequeryCall), [*deletequery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::QueryDeletequeryCall), [*getquery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::QueryGetqueryCall), [*listqueries*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::QueryListqueryCall) and [*runquery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::QueryRunqueryCall) -* [reports](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::Report) - * [*listreports*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/api::ReportListreportCall) +* [queries](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::Query) + * [*createquery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::QueryCreatequeryCall), [*deletequery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::QueryDeletequeryCall), [*getquery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::QueryGetqueryCall), [*listqueries*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::QueryListqueryCall) and [*runquery*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::QueryRunqueryCall) +* [reports](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::Report) + * [*listreports*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/api::ReportListreportCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/DoubleClickBidManager)** +* **[Hub](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/DoubleClickBidManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::CallBuilder) -* **[Resources](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::CallBuilder) +* **[Resources](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Part)** + * **[Parts](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Delegate) to the -[Method Builder](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Delegate) to the +[Method Builder](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::RequestValue) and -[decodable](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::RequestValue) and +[decodable](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2-beta-1+20230117/google_doubleclickbidmanager1d1/client::RequestValue) are moved +* [request values](https://docs.rs/google-doubleclickbidmanager1d1/5.0.2+20230117/google_doubleclickbidmanager1d1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/doubleclickbidmanager1d1/src/api.rs b/gen/doubleclickbidmanager1d1/src/api.rs index bd43ee134d..b28681d997 100644 --- a/gen/doubleclickbidmanager1d1/src/api.rs +++ b/gen/doubleclickbidmanager1d1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> DoubleClickBidManager { DoubleClickBidManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://doubleclickbidmanager.googleapis.com/doubleclickbidmanager/v1.1/".to_string(), _root_url: "https://doubleclickbidmanager.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> DoubleClickBidManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/doubleclickbidmanager1d1/src/client.rs b/gen/doubleclickbidmanager1d1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/doubleclickbidmanager1d1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/doubleclickbidmanager1d1/src/lib.rs b/gen/doubleclickbidmanager1d1/src/lib.rs index 6e7c58d1dc..3faab7857e 100644 --- a/gen/doubleclickbidmanager1d1/src/lib.rs +++ b/gen/doubleclickbidmanager1d1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *doubleclickbidmanager:v1.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *DoubleClick Bid Manager* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *doubleclickbidmanager:v1.1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *DoubleClick Bid Manager* *v1d1* API can be found at the //! [official documentation site](https://developers.google.com/bid-manager/). diff --git a/gen/doubleclicksearch2-cli/Cargo.toml b/gen/doubleclicksearch2-cli/Cargo.toml index eefa285630..52f9c484d2 100644 --- a/gen/doubleclicksearch2-cli/Cargo.toml +++ b/gen/doubleclicksearch2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-doubleclicksearch2-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Doubleclicksearch (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclicksearch2-cli" @@ -20,13 +20,13 @@ name = "doubleclicksearch2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-doubleclicksearch2] path = "../doubleclicksearch2" -version = "4.0.1+20220301" +version = "5.0.2+20230118" + diff --git a/gen/doubleclicksearch2-cli/README.md b/gen/doubleclicksearch2-cli/README.md index c7921365ec..89d2927439 100644 --- a/gen/doubleclicksearch2-cli/README.md +++ b/gen/doubleclicksearch2-cli/README.md @@ -25,12 +25,13 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Doubleclicksearch* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Doubleclicksearch* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash doubleclicksearch2 [options] conversion get [-p ]... [-o ] + get-by-customer-id [-p ]... [-o ] insert (-r )... [-p ]... [-o ] update (-r )... [-p ]... [-o ] update-availability (-r )... [-p ]... [-o ] @@ -38,6 +39,7 @@ doubleclicksearch2 [options] generate (-r )... [-p ]... [-o ] get [-p ]... [-o ] get-file [-p ]... [-o ] + get-id-mapping-file [-p ]... [-o ] request (-r )... [-p ]... [-o ] saved-columns list [-p ]... [-o ] diff --git a/gen/doubleclicksearch2-cli/mkdocs.yml b/gen/doubleclicksearch2-cli/mkdocs.yml index 2214f16d2e..231fad6e3b 100644 --- a/gen/doubleclicksearch2-cli/mkdocs.yml +++ b/gen/doubleclicksearch2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Doubleclicksearch v4.0.1+20220301 +site_name: Doubleclicksearch v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-doubleclicksearch2-cli site_description: A complete library to interact with Doubleclicksearch (protocol v2) @@ -7,17 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclicksearc docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['conversion_get.md', 'Conversion', 'Get'] -- ['conversion_insert.md', 'Conversion', 'Insert'] -- ['conversion_update.md', 'Conversion', 'Update'] -- ['conversion_update-availability.md', 'Conversion', 'Update Availability'] -- ['reports_generate.md', 'Reports', 'Generate'] -- ['reports_get.md', 'Reports', 'Get'] -- ['reports_get-file.md', 'Reports', 'Get File'] -- ['reports_request.md', 'Reports', 'Request'] -- ['saved-columns_list.md', 'Saved Columns', 'List'] +nav: +- Home: 'index.md' +- 'Conversion': + - 'Get': 'conversion_get.md' + - 'Get By Customer Id': 'conversion_get-by-customer-id.md' + - 'Insert': 'conversion_insert.md' + - 'Update': 'conversion_update.md' + - 'Update Availability': 'conversion_update-availability.md' +- 'Reports': + - 'Generate': 'reports_generate.md' + - 'Get': 'reports_get.md' + - 'Get File': 'reports_get-file.md' + - 'Get Id Mapping File': 'reports_get-id-mapping-file.md' + - 'Request': 'reports_request.md' +- 'Saved Columns': + - 'List': 'saved-columns_list.md' theme: readthedocs diff --git a/gen/doubleclicksearch2-cli/src/client.rs b/gen/doubleclicksearch2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/doubleclicksearch2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/doubleclicksearch2-cli/src/main.rs b/gen/doubleclicksearch2-cli/src/main.rs index 87f8daa95b..6afd98e4ac 100644 --- a/gen/doubleclicksearch2-cli/src/main.rs +++ b/gen/doubleclicksearch2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_doubleclicksearch2::{api, Error, oauth2}; +use google_doubleclicksearch2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,17 +60,20 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "customer-id" => { + call = call.customer_id(value.unwrap_or("")); + }, "criterion-id" => { - call = call.criterion_id(value.unwrap_or("")); + call = call.criterion_id( value.map(|v| arg_from_str(v, err, "criterion-id", "int64")).unwrap_or(-0)); }, "campaign-id" => { - call = call.campaign_id(value.unwrap_or("")); + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); }, "ad-id" => { - call = call.ad_id(value.unwrap_or("")); + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); }, "ad-group-id" => { - call = call.ad_group_id(value.unwrap_or("")); + call = call.ad_group_id( value.map(|v| arg_from_str(v, err, "ad-group-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -86,7 +88,85 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["ad-group-id", "ad-id", "campaign-id", "criterion-id"].iter().map(|v|*v)); + v.extend(["ad-group-id", "ad-id", "campaign-id", "criterion-id", "customer-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _conversion_get_by_customer_id(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let end_date: i32 = arg_from_str(&opt.value_of("end-date").unwrap_or(""), err, "", "integer"); + let row_count: i32 = arg_from_str(&opt.value_of("row-count").unwrap_or(""), err, "", "integer"); + let start_date: i32 = arg_from_str(&opt.value_of("start-date").unwrap_or(""), err, "", "integer"); + let start_row: u32 = arg_from_str(&opt.value_of("start-row").unwrap_or(""), err, "", "integer"); + let mut call = self.hub.conversion().get_by_customer_id(opt.value_of("customer-id").unwrap_or(""), end_date, row_count, start_date, start_row); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "engine-account-id" => { + call = call.engine_account_id( value.map(|v| arg_from_str(v, err, "engine-account-id", "int64")).unwrap_or(-0)); + }, + "criterion-id" => { + call = call.criterion_id( value.map(|v| arg_from_str(v, err, "criterion-id", "int64")).unwrap_or(-0)); + }, + "campaign-id" => { + call = call.campaign_id( value.map(|v| arg_from_str(v, err, "campaign-id", "int64")).unwrap_or(-0)); + }, + "agency-id" => { + call = call.agency_id( value.map(|v| arg_from_str(v, err, "agency-id", "int64")).unwrap_or(-0)); + }, + "advertiser-id" => { + call = call.advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); + }, + "ad-id" => { + call = call.ad_id( value.map(|v| arg_from_str(v, err, "ad-id", "int64")).unwrap_or(-0)); + }, + "ad-group-id" => { + call = call.ad_group_id( value.map(|v| arg_from_str(v, err, "ad-group-id", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["ad-group-id", "ad-id", "advertiser-id", "agency-id", "campaign-id", "criterion-id", "engine-account-id"].iter().map(|v|*v)); v } )); } } @@ -589,6 +669,68 @@ where } } + async fn _reports_get_id_mapping_file(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut download_mode = false; + let mut call = self.hub.reports().get_id_mapping_file(opt.value_of("agency-id").unwrap_or(""), opt.value_of("advertiser-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + if key == "alt" && value.unwrap_or("unset") == "media" { + download_mode = true; + } + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + if !download_mode { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + } else { + let bytes = hyper::body::to_bytes(response.into_body()).await.expect("a string as API currently is inefficient").to_vec(); + ostream.write_all(&bytes).expect("write to be complete"); + ostream.flush().expect("io to never fail which should really be fixed one day"); + } + Ok(()) + } + } + } + } + async fn _reports_request(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -755,6 +897,9 @@ where ("get", Some(opt)) => { call_result = self._conversion_get(opt, dry_run, &mut err).await; }, + ("get-by-customer-id", Some(opt)) => { + call_result = self._conversion_get_by_customer_id(opt, dry_run, &mut err).await; + }, ("insert", Some(opt)) => { call_result = self._conversion_insert(opt, dry_run, &mut err).await; }, @@ -781,6 +926,9 @@ where ("get-file", Some(opt)) => { call_result = self._reports_get_file(opt, dry_run, &mut err).await; }, + ("get-id-mapping-file", Some(opt)) => { + call_result = self._reports_get_id_mapping_file(opt, dry_run, &mut err).await; + }, ("request", Some(opt)) => { call_result = self._reports_request(opt, dry_run, &mut err).await; }, @@ -874,7 +1022,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("conversion", "methods: 'get', 'insert', 'update' and 'update-availability'", vec![ + ("conversion", "methods: 'get', 'get-by-customer-id', 'insert', 'update' and 'update-availability'", vec![ ("get", Some(r##"Retrieves a list of conversions from a DoubleClick Search engine account."##), "Details at http://byron.github.io/google-apis-rs/google_doubleclicksearch2_cli/conversion_get", @@ -927,6 +1075,52 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-by-customer-id", + Some(r##"Retrieves a list of conversions from a DoubleClick Search engine account."##), + "Details at http://byron.github.io/google-apis-rs/google_doubleclicksearch2_cli/conversion_get-by-customer-id", + vec![ + (Some(r##"customer-id"##), + None, + Some(r##"Customer ID of a client account in the new Search Ads 360 experience."##), + Some(true), + Some(false)), + + (Some(r##"end-date"##), + None, + Some(r##"Last date (inclusive) on which to retrieve conversions. Format is yyyymmdd."##), + Some(true), + Some(false)), + + (Some(r##"row-count"##), + None, + Some(r##"The number of conversions to return per call."##), + Some(true), + Some(false)), + + (Some(r##"start-date"##), + None, + Some(r##"First date (inclusive) on which to retrieve conversions. Format is yyyymmdd."##), + Some(true), + Some(false)), + + (Some(r##"start-row"##), + None, + Some(r##"The 0-based starting index for retrieving conversions results."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1001,7 +1195,7 @@ async fn main() { ]), ]), - ("reports", "methods: 'generate', 'get', 'get-file' and 'request'", vec![ + ("reports", "methods: 'generate', 'get', 'get-file', 'get-id-mapping-file' and 'request'", vec![ ("generate", Some(r##"Generates and returns a report immediately."##), "Details at http://byron.github.io/google-apis-rs/google_doubleclicksearch2_cli/reports_generate", @@ -1068,6 +1262,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-id-mapping-file", + Some(r##"Downloads a csv file(encoded in UTF-8) that contains ID mappings between legacy SA360 and new SA360. The file includes all children entities of the given advertiser(e.g. engine accounts, campaigns, ad groups, etc.) that exist in both legacy SA360 and new SA360."##), + "Details at http://byron.github.io/google-apis-rs/google_doubleclicksearch2_cli/reports_get-id-mapping-file", + vec![ + (Some(r##"agency-id"##), + None, + Some(r##"Legacy SA360 agency ID."##), + Some(true), + Some(false)), + + (Some(r##"advertiser-id"##), + None, + Some(r##"Legacy SA360 advertiser ID."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1133,7 +1355,7 @@ async fn main() { let mut app = App::new("doubleclicksearch2") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230118") .about("The Search Ads 360 API allows developers to automate uploading conversions and downloading reports from Search Ads 360.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_doubleclicksearch2_cli") .arg(Arg::with_name("url") diff --git a/gen/doubleclicksearch2/Cargo.toml b/gen/doubleclicksearch2/Cargo.toml index 4de20941f2..43c5cf888a 100644 --- a/gen/doubleclicksearch2/Cargo.toml +++ b/gen/doubleclicksearch2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-doubleclicksearch2" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Doubleclicksearch (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/doubleclicksearch2" homepage = "https://developers.google.com/search-ads" -documentation = "https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-doubleclicksearch2/5.0.2+20230118" license = "MIT" keywords = ["doubleclicksearch", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/doubleclicksearch2/README.md b/gen/doubleclicksearch2/README.md index fcf6cad32f..215272bc8c 100644 --- a/gen/doubleclicksearch2/README.md +++ b/gen/doubleclicksearch2/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-doubleclicksearch2` library allows access to all features of the *Google Doubleclicksearch* service. -This documentation was generated from *Doubleclicksearch* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *doubleclicksearch:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Doubleclicksearch* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *doubleclicksearch:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Doubleclicksearch* *v2* API can be found at the [official documentation site](https://developers.google.com/search-ads). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/Doubleclicksearch) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/Doubleclicksearch) ... -* [conversion](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::Conversion) - * [*get*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ConversionGetCall), [*get by customer id*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ConversionGetByCustomerIdCall), [*insert*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ConversionInsertCall), [*update*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ConversionUpdateCall) and [*update availability*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ConversionUpdateAvailabilityCall) -* [reports](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::Report) - * [*generate*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ReportGenerateCall), [*get*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ReportGetCall), [*get file*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ReportGetFileCall), [*get id mapping file*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ReportGetIdMappingFileCall) and [*request*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ReportRequestCall) -* [saved columns](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::SavedColumn) - * [*list*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::SavedColumnListCall) +* [conversion](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::Conversion) + * [*get*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ConversionGetCall), [*get by customer id*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ConversionGetByCustomerIdCall), [*insert*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ConversionInsertCall), [*update*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ConversionUpdateCall) and [*update availability*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ConversionUpdateAvailabilityCall) +* [reports](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::Report) + * [*generate*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ReportGenerateCall), [*get*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ReportGetCall), [*get file*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ReportGetFileCall), [*get id mapping file*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ReportGetIdMappingFileCall) and [*request*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ReportRequestCall) +* [saved columns](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::SavedColumn) + * [*list*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::SavedColumnListCall) Download supported by ... -* [*get file reports*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ReportGetFileCall) -* [*get id mapping file reports*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/api::ReportGetIdMappingFileCall) +* [*get file reports*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ReportGetFileCall) +* [*get id mapping file reports*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/api::ReportGetIdMappingFileCall) @@ -32,17 +32,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/Doubleclicksearch)** +* **[Hub](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/Doubleclicksearch)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::CallBuilder) -* **[Resources](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::CallBuilder) +* **[Resources](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Part)** + * **[Parts](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Delegate) to the -[Method Builder](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Delegate) to the +[Method Builder](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::RequestValue) and -[decodable](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::RequestValue) and +[decodable](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-doubleclicksearch2/5.0.2-beta-1+20230118/google_doubleclicksearch2/client::RequestValue) are moved +* [request values](https://docs.rs/google-doubleclicksearch2/5.0.2+20230118/google_doubleclicksearch2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/doubleclicksearch2/src/api.rs b/gen/doubleclicksearch2/src/api.rs index 1b026229ec..6211798481 100644 --- a/gen/doubleclicksearch2/src/api.rs +++ b/gen/doubleclicksearch2/src/api.rs @@ -119,7 +119,7 @@ impl<'a, S> Doubleclicksearch { Doubleclicksearch { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://doubleclicksearch.googleapis.com/".to_string(), _root_url: "https://doubleclicksearch.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Doubleclicksearch { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/doubleclicksearch2/src/client.rs b/gen/doubleclicksearch2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/doubleclicksearch2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/doubleclicksearch2/src/lib.rs b/gen/doubleclicksearch2/src/lib.rs index d46b975d18..175b4b5048 100644 --- a/gen/doubleclicksearch2/src/lib.rs +++ b/gen/doubleclicksearch2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Doubleclicksearch* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *doubleclicksearch:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Doubleclicksearch* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *doubleclicksearch:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Doubleclicksearch* *v2* API can be found at the //! [official documentation site](https://developers.google.com/search-ads). diff --git a/gen/drive2-cli/Cargo.toml b/gen/drive2-cli/Cargo.toml index 320a08bcce..c8a2adaaff 100644 --- a/gen/drive2-cli/Cargo.toml +++ b/gen/drive2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-drive2-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with drive (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/drive2-cli" @@ -20,13 +20,13 @@ name = "drive2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-drive2] path = "../drive2" -version = "4.0.1+20220225" +version = "5.0.2+20230115" + diff --git a/gen/drive2-cli/README.md b/gen/drive2-cli/README.md index f048e52b71..4fdc76b475 100644 --- a/gen/drive2-cli/README.md +++ b/gen/drive2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *drive* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *drive* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash drive2 [options] @@ -70,6 +70,8 @@ drive2 [options] get [-p ]... [-o ] insert (-r )... (-u simple -f [-m ]) [-p ]... [-o ] list [-p ]... [-o ] + list-labels [-p ]... [-o ] + modify-labels (-r )... [-p ]... [-o ] patch (-r )... [-p ]... [-o ] touch [-p ]... [-o ] trash [-p ]... [-o ] diff --git a/gen/drive2-cli/mkdocs.yml b/gen/drive2-cli/mkdocs.yml index 3dc8dd2743..56e5fca18f 100644 --- a/gen/drive2-cli/mkdocs.yml +++ b/gen/drive2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: drive v4.0.1+20220225 +site_name: drive v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-drive2-cli site_description: A complete library to interact with drive (protocol v2) @@ -7,80 +7,96 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/drive2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['about_get.md', 'About', 'Get'] -- ['apps_get.md', 'Apps', 'Get'] -- ['apps_list.md', 'Apps', 'List'] -- ['changes_get.md', 'Changes', 'Get'] -- ['changes_get-start-page-token.md', 'Changes', 'Get Start Page Token'] -- ['changes_list.md', 'Changes', 'List'] -- ['changes_watch.md', 'Changes', 'Watch'] -- ['channels_stop.md', 'Channels', 'Stop'] -- ['children_delete.md', 'Children', 'Delete'] -- ['children_get.md', 'Children', 'Get'] -- ['children_insert.md', 'Children', 'Insert'] -- ['children_list.md', 'Children', 'List'] -- ['comments_delete.md', 'Comments', 'Delete'] -- ['comments_get.md', 'Comments', 'Get'] -- ['comments_insert.md', 'Comments', 'Insert'] -- ['comments_list.md', 'Comments', 'List'] -- ['comments_patch.md', 'Comments', 'Patch'] -- ['comments_update.md', 'Comments', 'Update'] -- ['drives_delete.md', 'Drives', 'Delete'] -- ['drives_get.md', 'Drives', 'Get'] -- ['drives_hide.md', 'Drives', 'Hide'] -- ['drives_insert.md', 'Drives', 'Insert'] -- ['drives_list.md', 'Drives', 'List'] -- ['drives_unhide.md', 'Drives', 'Unhide'] -- ['drives_update.md', 'Drives', 'Update'] -- ['files_copy.md', 'Files', 'Copy'] -- ['files_delete.md', 'Files', 'Delete'] -- ['files_empty-trash.md', 'Files', 'Empty Trash'] -- ['files_export.md', 'Files', 'Export'] -- ['files_generate-ids.md', 'Files', 'Generate Ids'] -- ['files_get.md', 'Files', 'Get'] -- ['files_insert.md', 'Files', 'Insert'] -- ['files_list.md', 'Files', 'List'] -- ['files_patch.md', 'Files', 'Patch'] -- ['files_touch.md', 'Files', 'Touch'] -- ['files_trash.md', 'Files', 'Trash'] -- ['files_untrash.md', 'Files', 'Untrash'] -- ['files_update.md', 'Files', 'Update'] -- ['files_watch.md', 'Files', 'Watch'] -- ['parents_delete.md', 'Parents', 'Delete'] -- ['parents_get.md', 'Parents', 'Get'] -- ['parents_insert.md', 'Parents', 'Insert'] -- ['parents_list.md', 'Parents', 'List'] -- ['permissions_delete.md', 'Permissions', 'Delete'] -- ['permissions_get.md', 'Permissions', 'Get'] -- ['permissions_get-id-for-email.md', 'Permissions', 'Get Id For Email'] -- ['permissions_insert.md', 'Permissions', 'Insert'] -- ['permissions_list.md', 'Permissions', 'List'] -- ['permissions_patch.md', 'Permissions', 'Patch'] -- ['permissions_update.md', 'Permissions', 'Update'] -- ['properties_delete.md', 'Properties', 'Delete'] -- ['properties_get.md', 'Properties', 'Get'] -- ['properties_insert.md', 'Properties', 'Insert'] -- ['properties_list.md', 'Properties', 'List'] -- ['properties_patch.md', 'Properties', 'Patch'] -- ['properties_update.md', 'Properties', 'Update'] -- ['replies_delete.md', 'Replies', 'Delete'] -- ['replies_get.md', 'Replies', 'Get'] -- ['replies_insert.md', 'Replies', 'Insert'] -- ['replies_list.md', 'Replies', 'List'] -- ['replies_patch.md', 'Replies', 'Patch'] -- ['replies_update.md', 'Replies', 'Update'] -- ['revisions_delete.md', 'Revisions', 'Delete'] -- ['revisions_get.md', 'Revisions', 'Get'] -- ['revisions_list.md', 'Revisions', 'List'] -- ['revisions_patch.md', 'Revisions', 'Patch'] -- ['revisions_update.md', 'Revisions', 'Update'] -- ['teamdrives_delete.md', 'Teamdrives', 'Delete'] -- ['teamdrives_get.md', 'Teamdrives', 'Get'] -- ['teamdrives_insert.md', 'Teamdrives', 'Insert'] -- ['teamdrives_list.md', 'Teamdrives', 'List'] -- ['teamdrives_update.md', 'Teamdrives', 'Update'] +nav: +- Home: 'index.md' +- 'About': + - 'Get': 'about_get.md' +- 'Apps': + - 'Get': 'apps_get.md' + - 'List': 'apps_list.md' +- 'Changes': + - 'Get': 'changes_get.md' + - 'Get Start Page Token': 'changes_get-start-page-token.md' + - 'List': 'changes_list.md' + - 'Watch': 'changes_watch.md' +- 'Channels': + - 'Stop': 'channels_stop.md' +- 'Children': + - 'Delete': 'children_delete.md' + - 'Get': 'children_get.md' + - 'Insert': 'children_insert.md' + - 'List': 'children_list.md' +- 'Comments': + - 'Delete': 'comments_delete.md' + - 'Get': 'comments_get.md' + - 'Insert': 'comments_insert.md' + - 'List': 'comments_list.md' + - 'Patch': 'comments_patch.md' + - 'Update': 'comments_update.md' +- 'Drives': + - 'Delete': 'drives_delete.md' + - 'Get': 'drives_get.md' + - 'Hide': 'drives_hide.md' + - 'Insert': 'drives_insert.md' + - 'List': 'drives_list.md' + - 'Unhide': 'drives_unhide.md' + - 'Update': 'drives_update.md' +- 'Files': + - 'Copy': 'files_copy.md' + - 'Delete': 'files_delete.md' + - 'Empty Trash': 'files_empty-trash.md' + - 'Export': 'files_export.md' + - 'Generate Ids': 'files_generate-ids.md' + - 'Get': 'files_get.md' + - 'Insert': 'files_insert.md' + - 'List': 'files_list.md' + - 'List Labels': 'files_list-labels.md' + - 'Modify Labels': 'files_modify-labels.md' + - 'Patch': 'files_patch.md' + - 'Touch': 'files_touch.md' + - 'Trash': 'files_trash.md' + - 'Untrash': 'files_untrash.md' + - 'Update': 'files_update.md' + - 'Watch': 'files_watch.md' +- 'Parents': + - 'Delete': 'parents_delete.md' + - 'Get': 'parents_get.md' + - 'Insert': 'parents_insert.md' + - 'List': 'parents_list.md' +- 'Permissions': + - 'Delete': 'permissions_delete.md' + - 'Get': 'permissions_get.md' + - 'Get Id For Email': 'permissions_get-id-for-email.md' + - 'Insert': 'permissions_insert.md' + - 'List': 'permissions_list.md' + - 'Patch': 'permissions_patch.md' + - 'Update': 'permissions_update.md' +- 'Properties': + - 'Delete': 'properties_delete.md' + - 'Get': 'properties_get.md' + - 'Insert': 'properties_insert.md' + - 'List': 'properties_list.md' + - 'Patch': 'properties_patch.md' + - 'Update': 'properties_update.md' +- 'Replies': + - 'Delete': 'replies_delete.md' + - 'Get': 'replies_get.md' + - 'Insert': 'replies_insert.md' + - 'List': 'replies_list.md' + - 'Patch': 'replies_patch.md' + - 'Update': 'replies_update.md' +- 'Revisions': + - 'Delete': 'revisions_delete.md' + - 'Get': 'revisions_get.md' + - 'List': 'revisions_list.md' + - 'Patch': 'revisions_patch.md' + - 'Update': 'revisions_update.md' +- 'Teamdrives': + - 'Delete': 'teamdrives_delete.md' + - 'Get': 'teamdrives_get.md' + - 'Insert': 'teamdrives_insert.md' + - 'List': 'teamdrives_list.md' + - 'Update': 'teamdrives_update.md' theme: readthedocs diff --git a/gen/drive2-cli/src/client.rs b/gen/drive2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/drive2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/drive2-cli/src/main.rs b/gen/drive2-cli/src/main.rs index d68cbc8cb6..ccbff8b521 100644 --- a/gen/drive2-cli/src/main.rs +++ b/gen/drive2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_drive2::{api, Error, oauth2}; +use google_drive2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,13 +57,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-change-id" => { - call = call.start_change_id(value.unwrap_or("")); + call = call.start_change_id( value.map(|v| arg_from_str(v, err, "start-change-id", "int64")).unwrap_or(-0)); }, "max-change-id-count" => { - call = call.max_change_id_count(value.unwrap_or("")); + call = call.max_change_id_count( value.map(|v| arg_from_str(v, err, "max-change-id-count", "int64")).unwrap_or(-0)); }, "include-subscribed" => { - call = call.include_subscribed(arg_from_str(value.unwrap_or("false"), err, "include-subscribed", "boolean")); + call = call.include_subscribed( value.map(|v| arg_from_str(v, err, "include-subscribed", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -237,10 +236,10 @@ where call = call.team_drive_id(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "drive-id" => { call = call.drive_id(value.unwrap_or("")); @@ -302,10 +301,10 @@ where call = call.team_drive_id(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "drive-id" => { call = call.drive_id(value.unwrap_or("")); @@ -367,13 +366,13 @@ where call = call.team_drive_id(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "start-change-id" => { - call = call.start_change_id(value.unwrap_or("")); + call = call.start_change_id( value.map(|v| arg_from_str(v, err, "start-change-id", "int64")).unwrap_or(-0)); }, "spaces" => { call = call.spaces(value.unwrap_or("")); @@ -382,25 +381,28 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-team-drive-items" => { - call = call.include_team_drive_items(arg_from_str(value.unwrap_or("false"), err, "include-team-drive-items", "boolean")); + call = call.include_team_drive_items( value.map(|v| arg_from_str(v, err, "include-team-drive-items", "boolean")).unwrap_or(false)); }, "include-subscribed" => { - call = call.include_subscribed(arg_from_str(value.unwrap_or("false"), err, "include-subscribed", "boolean")); + call = call.include_subscribed( value.map(|v| arg_from_str(v, err, "include-subscribed", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "include-items-from-all-drives" => { - call = call.include_items_from_all_drives(arg_from_str(value.unwrap_or("false"), err, "include-items-from-all-drives", "boolean")); + call = call.include_items_from_all_drives( value.map(|v| arg_from_str(v, err, "include-items-from-all-drives", "boolean")).unwrap_or(false)); }, "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, "include-corpus-removals" => { - call = call.include_corpus_removals(arg_from_str(value.unwrap_or("false"), err, "include-corpus-removals", "boolean")); + call = call.include_corpus_removals( value.map(|v| arg_from_str(v, err, "include-corpus-removals", "boolean")).unwrap_or(false)); }, "drive-id" => { call = call.drive_id(value.unwrap_or("")); @@ -418,7 +420,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["drive-id", "include-corpus-removals", "include-deleted", "include-items-from-all-drives", "include-permissions-for-view", "include-subscribed", "include-team-drive-items", "max-results", "page-token", "spaces", "start-change-id", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v.extend(["drive-id", "include-corpus-removals", "include-deleted", "include-items-from-all-drives", "include-labels", "include-permissions-for-view", "include-subscribed", "include-team-drive-items", "max-results", "page-token", "spaces", "start-change-id", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); v } )); } } @@ -504,13 +506,13 @@ where call = call.team_drive_id(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "start-change-id" => { - call = call.start_change_id(value.unwrap_or("")); + call = call.start_change_id( value.map(|v| arg_from_str(v, err, "start-change-id", "int64")).unwrap_or(-0)); }, "spaces" => { call = call.spaces(value.unwrap_or("")); @@ -519,25 +521,28 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-team-drive-items" => { - call = call.include_team_drive_items(arg_from_str(value.unwrap_or("false"), err, "include-team-drive-items", "boolean")); + call = call.include_team_drive_items( value.map(|v| arg_from_str(v, err, "include-team-drive-items", "boolean")).unwrap_or(false)); }, "include-subscribed" => { - call = call.include_subscribed(arg_from_str(value.unwrap_or("false"), err, "include-subscribed", "boolean")); + call = call.include_subscribed( value.map(|v| arg_from_str(v, err, "include-subscribed", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "include-items-from-all-drives" => { - call = call.include_items_from_all_drives(arg_from_str(value.unwrap_or("false"), err, "include-items-from-all-drives", "boolean")); + call = call.include_items_from_all_drives( value.map(|v| arg_from_str(v, err, "include-items-from-all-drives", "boolean")).unwrap_or(false)); }, "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, "include-corpus-removals" => { - call = call.include_corpus_removals(arg_from_str(value.unwrap_or("false"), err, "include-corpus-removals", "boolean")); + call = call.include_corpus_removals( value.map(|v| arg_from_str(v, err, "include-corpus-removals", "boolean")).unwrap_or(false)); }, "drive-id" => { call = call.drive_id(value.unwrap_or("")); @@ -555,7 +560,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["drive-id", "include-corpus-removals", "include-deleted", "include-items-from-all-drives", "include-permissions-for-view", "include-subscribed", "include-team-drive-items", "max-results", "page-token", "spaces", "start-change-id", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v.extend(["drive-id", "include-corpus-removals", "include-deleted", "include-items-from-all-drives", "include-labels", "include-permissions-for-view", "include-subscribed", "include-team-drive-items", "max-results", "page-token", "spaces", "start-change-id", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); v } )); } } @@ -682,7 +687,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -818,13 +823,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -889,7 +894,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -989,7 +994,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1155,10 +1160,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1421,6 +1426,12 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "use-domain-admin-access" => { + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); + }, + "allow-item-deletion" => { + call = call.allow_item_deletion( value.map(|v| arg_from_str(v, err, "allow-item-deletion", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -1434,6 +1445,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-item-deletion", "use-domain-admin-access"].iter().map(|v|*v)); v } )); } } @@ -1466,7 +1478,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1611,6 +1623,7 @@ where "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-drive" => Some(("capabilities.canRenameDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-drive-restrictions" => Some(("capabilities.canResetDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1626,7 +1639,7 @@ where "restrictions.drive-members-only" => Some(("restrictions.driveMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-reset-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1693,7 +1706,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "q" => { call = call.q(value.unwrap_or("")); @@ -1702,7 +1715,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1847,6 +1860,7 @@ where "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-drive" => Some(("capabilities.canRenameDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-drive-restrictions" => Some(("capabilities.canResetDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1862,7 +1876,7 @@ where "restrictions.drive-members-only" => Some(("restrictions.driveMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-reset-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1877,7 +1891,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1969,6 +1983,7 @@ where "capabilities.can-list-children" => Some(("capabilities.canListChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content" => Some(("capabilities.canModifyContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content-restriction" => Some(("capabilities.canModifyContentRestriction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-modify-labels" => Some(("capabilities.canModifyLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-drive" => Some(("capabilities.canMoveChildrenOutOfDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-team-drive" => Some(("capabilities.canMoveChildrenOutOfTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-within-drive" => Some(("capabilities.canMoveChildrenWithinDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1980,6 +1995,7 @@ where "capabilities.can-move-item-within-team-drive" => Some(("capabilities.canMoveItemWithinTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-team-drive-item" => Some(("capabilities.canMoveTeamDriveItem", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-drive" => Some(("capabilities.canReadDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-read-labels" => Some(("capabilities.canReadLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-team-drive" => Some(("capabilities.canReadTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2065,6 +2081,8 @@ where "quota-bytes-used" => Some(("quotaBytesUsed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-key" => Some(("resourceKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-checksum" => Some(("sha1Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha256-checksum" => Some(("sha256Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "shareable" => Some(("shareable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared" => Some(("shared", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared-with-me-date" => Some(("sharedWithMeDate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2117,7 +2135,7 @@ where "web-view-link" => Some(("webViewLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "writers-can-share" => Some(("writersCanShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-modify-labels", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-labels", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "sha1-checksum", "sha256-checksum", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2141,28 +2159,31 @@ where call = call.timed_text_language(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "pinned" => { - call = call.pinned(arg_from_str(value.unwrap_or("false"), err, "pinned", "boolean")); + call = call.pinned( value.map(|v| arg_from_str(v, err, "pinned", "boolean")).unwrap_or(false)); }, "ocr-language" => { call = call.ocr_language(value.unwrap_or("")); }, "ocr" => { - call = call.ocr(arg_from_str(value.unwrap_or("false"), err, "ocr", "boolean")); + call = call.ocr( value.map(|v| arg_from_str(v, err, "ocr", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, "convert" => { - call = call.convert(arg_from_str(value.unwrap_or("false"), err, "convert", "boolean")); + call = call.convert( value.map(|v| arg_from_str(v, err, "convert", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2177,7 +2198,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["convert", "enforce-single-parent", "include-permissions-for-view", "ocr", "ocr-language", "pinned", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "visibility"].iter().map(|v|*v)); + v.extend(["convert", "enforce-single-parent", "include-labels", "include-permissions-for-view", "ocr", "ocr-language", "pinned", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "visibility"].iter().map(|v|*v)); v } )); } } @@ -2218,13 +2239,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2272,7 +2293,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2384,7 +2405,7 @@ where call = call.space(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2441,13 +2462,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-viewed-date" => { - call = call.update_viewed_date(arg_from_str(value.unwrap_or("false"), err, "update-viewed-date", "boolean")); + call = call.update_viewed_date( value.map(|v| arg_from_str(v, err, "update-viewed-date", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "revision-id" => { call = call.revision_id(value.unwrap_or("")); @@ -2458,8 +2479,11 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "acknowledge-abuse" => { - call = call.acknowledge_abuse(arg_from_str(value.unwrap_or("false"), err, "acknowledge-abuse", "boolean")); + call = call.acknowledge_abuse( value.map(|v| arg_from_str(v, err, "acknowledge-abuse", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2477,7 +2501,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["acknowledge-abuse", "include-permissions-for-view", "projection", "revision-id", "supports-all-drives", "supports-team-drives", "update-viewed-date"].iter().map(|v|*v)); + v.extend(["acknowledge-abuse", "include-labels", "include-permissions-for-view", "projection", "revision-id", "supports-all-drives", "supports-team-drives", "update-viewed-date"].iter().map(|v|*v)); v } )); } } @@ -2560,6 +2584,7 @@ where "capabilities.can-list-children" => Some(("capabilities.canListChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content" => Some(("capabilities.canModifyContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content-restriction" => Some(("capabilities.canModifyContentRestriction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-modify-labels" => Some(("capabilities.canModifyLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-drive" => Some(("capabilities.canMoveChildrenOutOfDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-team-drive" => Some(("capabilities.canMoveChildrenOutOfTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-within-drive" => Some(("capabilities.canMoveChildrenWithinDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2571,6 +2596,7 @@ where "capabilities.can-move-item-within-team-drive" => Some(("capabilities.canMoveItemWithinTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-team-drive-item" => Some(("capabilities.canMoveTeamDriveItem", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-drive" => Some(("capabilities.canReadDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-read-labels" => Some(("capabilities.canReadLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-team-drive" => Some(("capabilities.canReadTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2656,6 +2682,8 @@ where "quota-bytes-used" => Some(("quotaBytesUsed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-key" => Some(("resourceKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-checksum" => Some(("sha1Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha256-checksum" => Some(("sha256Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "shareable" => Some(("shareable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared" => Some(("shared", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared-with-me-date" => Some(("sharedWithMeDate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2708,7 +2736,7 @@ where "web-view-link" => Some(("webViewLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "writers-can-share" => Some(("writersCanShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-modify-labels", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-labels", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "sha1-checksum", "sha256-checksum", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2726,7 +2754,7 @@ where call = call.visibility(value.unwrap_or("")); }, "use-content-as-indexable-text" => { - call = call.use_content_as_indexable_text(arg_from_str(value.unwrap_or("false"), err, "use-content-as-indexable-text", "boolean")); + call = call.use_content_as_indexable_text( value.map(|v| arg_from_str(v, err, "use-content-as-indexable-text", "boolean")).unwrap_or(false)); }, "timed-text-track-name" => { call = call.timed_text_track_name(value.unwrap_or("")); @@ -2735,28 +2763,31 @@ where call = call.timed_text_language(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "pinned" => { - call = call.pinned(arg_from_str(value.unwrap_or("false"), err, "pinned", "boolean")); + call = call.pinned( value.map(|v| arg_from_str(v, err, "pinned", "boolean")).unwrap_or(false)); }, "ocr-language" => { call = call.ocr_language(value.unwrap_or("")); }, "ocr" => { - call = call.ocr(arg_from_str(value.unwrap_or("false"), err, "ocr", "boolean")); + call = call.ocr( value.map(|v| arg_from_str(v, err, "ocr", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, "convert" => { - call = call.convert(arg_from_str(value.unwrap_or("false"), err, "convert", "boolean")); + call = call.convert( value.map(|v| arg_from_str(v, err, "convert", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2771,7 +2802,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["convert", "enforce-single-parent", "include-permissions-for-view", "ocr", "ocr-language", "pinned", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "use-content-as-indexable-text", "visibility"].iter().map(|v|*v)); + v.extend(["convert", "enforce-single-parent", "include-labels", "include-permissions-for-view", "ocr", "ocr-language", "pinned", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "use-content-as-indexable-text", "visibility"].iter().map(|v|*v)); v } )); } } @@ -2818,10 +2849,10 @@ where call = call.team_drive_id(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "spaces" => { call = call.spaces(value.unwrap_or("")); @@ -2839,16 +2870,19 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-team-drive-items" => { - call = call.include_team_drive_items(arg_from_str(value.unwrap_or("false"), err, "include-team-drive-items", "boolean")); + call = call.include_team_drive_items( value.map(|v| arg_from_str(v, err, "include-team-drive-items", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "include-items-from-all-drives" => { - call = call.include_items_from_all_drives(arg_from_str(value.unwrap_or("false"), err, "include-items-from-all-drives", "boolean")); + call = call.include_items_from_all_drives( value.map(|v| arg_from_str(v, err, "include-items-from-all-drives", "boolean")).unwrap_or(false)); }, "drive-id" => { call = call.drive_id(value.unwrap_or("")); @@ -2872,7 +2906,151 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["corpora", "corpus", "drive-id", "include-items-from-all-drives", "include-permissions-for-view", "include-team-drive-items", "max-results", "order-by", "page-token", "projection", "q", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v.extend(["corpora", "corpus", "drive-id", "include-items-from-all-drives", "include-labels", "include-permissions-for-view", "include-team-drive-items", "max-results", "order-by", "page-token", "projection", "q", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _files_list_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.files().list_labels(opt.value_of("file-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["max-results", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _files_modify_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["kind"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ModifyLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.files().modify_labels(request, opt.value_of("file-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); v } )); } } @@ -2949,6 +3127,7 @@ where "capabilities.can-list-children" => Some(("capabilities.canListChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content" => Some(("capabilities.canModifyContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content-restriction" => Some(("capabilities.canModifyContentRestriction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-modify-labels" => Some(("capabilities.canModifyLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-drive" => Some(("capabilities.canMoveChildrenOutOfDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-team-drive" => Some(("capabilities.canMoveChildrenOutOfTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-within-drive" => Some(("capabilities.canMoveChildrenWithinDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2960,6 +3139,7 @@ where "capabilities.can-move-item-within-team-drive" => Some(("capabilities.canMoveItemWithinTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-team-drive-item" => Some(("capabilities.canMoveTeamDriveItem", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-drive" => Some(("capabilities.canReadDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-read-labels" => Some(("capabilities.canReadLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-team-drive" => Some(("capabilities.canReadTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3045,6 +3225,8 @@ where "quota-bytes-used" => Some(("quotaBytesUsed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-key" => Some(("resourceKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-checksum" => Some(("sha1Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha256-checksum" => Some(("sha256Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "shareable" => Some(("shareable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared" => Some(("shared", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared-with-me-date" => Some(("sharedWithMeDate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3097,7 +3279,7 @@ where "web-view-link" => Some(("webViewLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "writers-can-share" => Some(("writersCanShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-modify-labels", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-labels", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "sha1-checksum", "sha256-checksum", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3112,10 +3294,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-content-as-indexable-text" => { - call = call.use_content_as_indexable_text(arg_from_str(value.unwrap_or("false"), err, "use-content-as-indexable-text", "boolean")); + call = call.use_content_as_indexable_text( value.map(|v| arg_from_str(v, err, "use-content-as-indexable-text", "boolean")).unwrap_or(false)); }, "update-viewed-date" => { - call = call.update_viewed_date(arg_from_str(value.unwrap_or("false"), err, "update-viewed-date", "boolean")); + call = call.update_viewed_date( value.map(|v| arg_from_str(v, err, "update-viewed-date", "boolean")).unwrap_or(false)); }, "timed-text-track-name" => { call = call.timed_text_track_name(value.unwrap_or("")); @@ -3124,28 +3306,28 @@ where call = call.timed_text_language(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "set-modified-date" => { - call = call.set_modified_date(arg_from_str(value.unwrap_or("false"), err, "set-modified-date", "boolean")); + call = call.set_modified_date( value.map(|v| arg_from_str(v, err, "set-modified-date", "boolean")).unwrap_or(false)); }, "remove-parents" => { call = call.remove_parents(value.unwrap_or("")); }, "pinned" => { - call = call.pinned(arg_from_str(value.unwrap_or("false"), err, "pinned", "boolean")); + call = call.pinned( value.map(|v| arg_from_str(v, err, "pinned", "boolean")).unwrap_or(false)); }, "ocr-language" => { call = call.ocr_language(value.unwrap_or("")); }, "ocr" => { - call = call.ocr(arg_from_str(value.unwrap_or("false"), err, "ocr", "boolean")); + call = call.ocr( value.map(|v| arg_from_str(v, err, "ocr", "boolean")).unwrap_or(false)); }, "new-revision" => { - call = call.new_revision(arg_from_str(value.unwrap_or("false"), err, "new-revision", "boolean")); + call = call.new_revision( value.map(|v| arg_from_str(v, err, "new-revision", "boolean")).unwrap_or(false)); }, "modified-date-behavior" => { call = call.modified_date_behavior(value.unwrap_or("")); @@ -3153,11 +3335,14 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, "convert" => { - call = call.convert(arg_from_str(value.unwrap_or("false"), err, "convert", "boolean")); + call = call.convert( value.map(|v| arg_from_str(v, err, "convert", "boolean")).unwrap_or(false)); }, "add-parents" => { call = call.add_parents(value.unwrap_or("")); @@ -3175,7 +3360,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["add-parents", "convert", "enforce-single-parent", "include-permissions-for-view", "modified-date-behavior", "new-revision", "ocr", "ocr-language", "pinned", "remove-parents", "set-modified-date", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "update-viewed-date", "use-content-as-indexable-text"].iter().map(|v|*v)); + v.extend(["add-parents", "convert", "enforce-single-parent", "include-labels", "include-permissions-for-view", "modified-date-behavior", "new-revision", "ocr", "ocr-language", "pinned", "remove-parents", "set-modified-date", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "update-viewed-date", "use-content-as-indexable-text"].iter().map(|v|*v)); v } )); } } @@ -3216,14 +3401,17 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, _ => { let mut found = false; for param in &self.gp { @@ -3237,7 +3425,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); + v.extend(["include-labels", "include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); v } )); } } @@ -3278,14 +3466,17 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, _ => { let mut found = false; for param in &self.gp { @@ -3299,7 +3490,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); + v.extend(["include-labels", "include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); v } )); } } @@ -3340,14 +3531,17 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, _ => { let mut found = false; for param in &self.gp { @@ -3361,7 +3555,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); + v.extend(["include-labels", "include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); v } )); } } @@ -3438,6 +3632,7 @@ where "capabilities.can-list-children" => Some(("capabilities.canListChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content" => Some(("capabilities.canModifyContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content-restriction" => Some(("capabilities.canModifyContentRestriction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-modify-labels" => Some(("capabilities.canModifyLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-drive" => Some(("capabilities.canMoveChildrenOutOfDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-team-drive" => Some(("capabilities.canMoveChildrenOutOfTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-within-drive" => Some(("capabilities.canMoveChildrenWithinDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3449,6 +3644,7 @@ where "capabilities.can-move-item-within-team-drive" => Some(("capabilities.canMoveItemWithinTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-team-drive-item" => Some(("capabilities.canMoveTeamDriveItem", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-drive" => Some(("capabilities.canReadDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-read-labels" => Some(("capabilities.canReadLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-team-drive" => Some(("capabilities.canReadTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -3534,6 +3730,8 @@ where "quota-bytes-used" => Some(("quotaBytesUsed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-key" => Some(("resourceKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-checksum" => Some(("sha1Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha256-checksum" => Some(("sha256Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "shareable" => Some(("shareable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared" => Some(("shared", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared-with-me-date" => Some(("sharedWithMeDate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3586,7 +3784,7 @@ where "web-view-link" => Some(("webViewLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "writers-can-share" => Some(("writersCanShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-roles", "alternate-link", "altitude", "aperture", "app-data-contents", "auth-key", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-restricted-download", "can-change-security-update-enabled", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-modify-labels", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-labels", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "copy-requires-writer-permission", "copyable", "created-date", "date", "default-open-with-link", "deleted", "description", "display-name", "domain", "download-url", "drive-id", "duration-millis", "editable", "email-address", "embed-link", "etag", "expiration-date", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "file-size", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "hidden", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "is-authenticated-user", "iso-speed", "kind", "labels", "last-modifying-user", "last-modifying-user-name", "last-viewed-by-me-date", "latitude", "lens", "link-share-metadata", "location", "longitude", "marked-viewed-by-me-date", "max-aperture-value", "md5-checksum", "metering-mode", "mime-type", "modified", "modified-by-me-date", "modified-date", "name", "open-with-links", "original-filename", "owned-by-me", "owner-names", "pending-owner", "permission-id", "permission-ids", "photo-link", "picture", "quota-bytes-used", "resource-key", "restricted", "role", "rotation", "security-update-eligible", "security-update-enabled", "self-link", "sensor", "sha1-checksum", "sha256-checksum", "shareable", "shared", "shared-with-me-date", "sharing-user", "shortcut-details", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "text", "thumbnail", "thumbnail-link", "thumbnail-version", "title", "trashed", "trashed-date", "trashing-user", "type", "url", "user-permission", "value", "version", "video-media-metadata", "view", "viewed", "web-content-link", "web-view-link", "white-balance", "width", "with-link", "writers-can-share"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3601,10 +3799,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-content-as-indexable-text" => { - call = call.use_content_as_indexable_text(arg_from_str(value.unwrap_or("false"), err, "use-content-as-indexable-text", "boolean")); + call = call.use_content_as_indexable_text( value.map(|v| arg_from_str(v, err, "use-content-as-indexable-text", "boolean")).unwrap_or(false)); }, "update-viewed-date" => { - call = call.update_viewed_date(arg_from_str(value.unwrap_or("false"), err, "update-viewed-date", "boolean")); + call = call.update_viewed_date( value.map(|v| arg_from_str(v, err, "update-viewed-date", "boolean")).unwrap_or(false)); }, "timed-text-track-name" => { call = call.timed_text_track_name(value.unwrap_or("")); @@ -3613,28 +3811,28 @@ where call = call.timed_text_language(value.unwrap_or("")); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "set-modified-date" => { - call = call.set_modified_date(arg_from_str(value.unwrap_or("false"), err, "set-modified-date", "boolean")); + call = call.set_modified_date( value.map(|v| arg_from_str(v, err, "set-modified-date", "boolean")).unwrap_or(false)); }, "remove-parents" => { call = call.remove_parents(value.unwrap_or("")); }, "pinned" => { - call = call.pinned(arg_from_str(value.unwrap_or("false"), err, "pinned", "boolean")); + call = call.pinned( value.map(|v| arg_from_str(v, err, "pinned", "boolean")).unwrap_or(false)); }, "ocr-language" => { call = call.ocr_language(value.unwrap_or("")); }, "ocr" => { - call = call.ocr(arg_from_str(value.unwrap_or("false"), err, "ocr", "boolean")); + call = call.ocr( value.map(|v| arg_from_str(v, err, "ocr", "boolean")).unwrap_or(false)); }, "new-revision" => { - call = call.new_revision(arg_from_str(value.unwrap_or("false"), err, "new-revision", "boolean")); + call = call.new_revision( value.map(|v| arg_from_str(v, err, "new-revision", "boolean")).unwrap_or(false)); }, "modified-date-behavior" => { call = call.modified_date_behavior(value.unwrap_or("")); @@ -3642,11 +3840,14 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, "convert" => { - call = call.convert(arg_from_str(value.unwrap_or("false"), err, "convert", "boolean")); + call = call.convert( value.map(|v| arg_from_str(v, err, "convert", "boolean")).unwrap_or(false)); }, "add-parents" => { call = call.add_parents(value.unwrap_or("")); @@ -3664,7 +3865,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["add-parents", "convert", "enforce-single-parent", "include-permissions-for-view", "modified-date-behavior", "new-revision", "ocr", "ocr-language", "pinned", "remove-parents", "set-modified-date", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "update-viewed-date", "use-content-as-indexable-text"].iter().map(|v|*v)); + v.extend(["add-parents", "convert", "enforce-single-parent", "include-labels", "include-permissions-for-view", "modified-date-behavior", "new-revision", "ocr", "ocr-language", "pinned", "remove-parents", "set-modified-date", "supports-all-drives", "supports-team-drives", "timed-text-language", "timed-text-track-name", "update-viewed-date", "use-content-as-indexable-text"].iter().map(|v|*v)); v } )); } } @@ -3751,13 +3952,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-viewed-date" => { - call = call.update_viewed_date(arg_from_str(value.unwrap_or("false"), err, "update-viewed-date", "boolean")); + call = call.update_viewed_date( value.map(|v| arg_from_str(v, err, "update-viewed-date", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "revision-id" => { call = call.revision_id(value.unwrap_or("")); @@ -3768,8 +3969,11 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "acknowledge-abuse" => { - call = call.acknowledge_abuse(arg_from_str(value.unwrap_or("false"), err, "acknowledge-abuse", "boolean")); + call = call.acknowledge_abuse( value.map(|v| arg_from_str(v, err, "acknowledge-abuse", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3787,7 +3991,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["acknowledge-abuse", "include-permissions-for-view", "projection", "revision-id", "supports-all-drives", "supports-team-drives", "update-viewed-date"].iter().map(|v|*v)); + v.extend(["acknowledge-abuse", "include-labels", "include-permissions-for-view", "projection", "revision-id", "supports-all-drives", "supports-team-drives", "update-viewed-date"].iter().map(|v|*v)); v } )); } } @@ -3834,7 +4038,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -3971,13 +4175,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4085,13 +4289,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4139,13 +4343,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4303,22 +4507,22 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "send-notification-emails" => { - call = call.send_notification_emails(arg_from_str(value.unwrap_or("false"), err, "send-notification-emails", "boolean")); + call = call.send_notification_emails( value.map(|v| arg_from_str(v, err, "send-notification-emails", "boolean")).unwrap_or(false)); }, "move-to-new-owners-root" => { - call = call.move_to_new_owners_root(arg_from_str(value.unwrap_or("false"), err, "move-to-new-owners-root", "boolean")); + call = call.move_to_new_owners_root( value.map(|v| arg_from_str(v, err, "move-to-new-owners-root", "boolean")).unwrap_or(false)); }, "enforce-single-parent" => { - call = call.enforce_single_parent(arg_from_str(value.unwrap_or("false"), err, "enforce-single-parent", "boolean")); + call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, "email-message" => { call = call.email_message(value.unwrap_or("")); @@ -4377,19 +4581,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); @@ -4498,19 +4702,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "transfer-ownership" => { - call = call.transfer_ownership(arg_from_str(value.unwrap_or("false"), err, "transfer-ownership", "boolean")); + call = call.transfer_ownership( value.map(|v| arg_from_str(v, err, "transfer-ownership", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "remove-expiration" => { - call = call.remove_expiration(arg_from_str(value.unwrap_or("false"), err, "remove-expiration", "boolean")); + call = call.remove_expiration( value.map(|v| arg_from_str(v, err, "remove-expiration", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4616,19 +4820,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "transfer-ownership" => { - call = call.transfer_ownership(arg_from_str(value.unwrap_or("false"), err, "transfer-ownership", "boolean")); + call = call.transfer_ownership( value.map(|v| arg_from_str(v, err, "transfer-ownership", "boolean")).unwrap_or(false)); }, "supports-team-drives" => { - call = call.supports_team_drives(arg_from_str(value.unwrap_or("false"), err, "supports-team-drives", "boolean")); + call = call.supports_team_drives( value.map(|v| arg_from_str(v, err, "supports-team-drives", "boolean")).unwrap_or(false)); }, "supports-all-drives" => { - call = call.supports_all_drives(arg_from_str(value.unwrap_or("false"), err, "supports-all-drives", "boolean")); + call = call.supports_all_drives( value.map(|v| arg_from_str(v, err, "supports-all-drives", "boolean")).unwrap_or(false)); }, "remove-expiration" => { - call = call.remove_expiration(arg_from_str(value.unwrap_or("false"), err, "remove-expiration", "boolean")); + call = call.remove_expiration( value.map(|v| arg_from_str(v, err, "remove-expiration", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5162,7 +5366,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5319,10 +5523,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5673,7 +5877,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5987,7 +6191,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6081,6 +6285,7 @@ where "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-team-drive" => Some(("capabilities.canRenameTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-team-drive-restrictions" => Some(("capabilities.canResetTeamDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -6095,7 +6300,7 @@ where "restrictions.team-members-only" => Some(("restrictions.teamMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-reset-team-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6162,7 +6367,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, "q" => { call = call.q(value.unwrap_or("")); @@ -6171,7 +6376,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6265,6 +6470,7 @@ where "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-team-drive" => Some(("capabilities.canRenameTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-team-drive-restrictions" => Some(("capabilities.canResetTeamDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -6279,7 +6485,7 @@ where "restrictions.team-members-only" => Some(("restrictions.teamMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-reset-team-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-date", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6294,7 +6500,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "use-domain-admin-access" => { - call = call.use_domain_admin_access(arg_from_str(value.unwrap_or("false"), err, "use-domain-admin-access", "boolean")); + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -6505,6 +6711,12 @@ where ("list", Some(opt)) => { call_result = self._files_list(opt, dry_run, &mut err).await; }, + ("list-labels", Some(opt)) => { + call_result = self._files_list_labels(opt, dry_run, &mut err).await; + }, + ("modify-labels", Some(opt)) => { + call_result = self._files_modify_labels(opt, dry_run, &mut err).await; + }, ("patch", Some(opt)) => { call_result = self._files_patch(opt, dry_run, &mut err).await; }, @@ -7336,7 +7548,7 @@ async fn main() { ]), ]), - ("files", "methods: 'copy', 'delete', 'empty-trash', 'export', 'generate-ids', 'get', 'insert', 'list', 'patch', 'touch', 'trash', 'untrash', 'update' and 'watch'", vec![ + ("files", "methods: 'copy', 'delete', 'empty-trash', 'export', 'generate-ids', 'get', 'insert', 'list', 'list-labels', 'modify-labels', 'patch', 'touch', 'trash', 'untrash', 'update' and 'watch'", vec![ ("copy", Some(r##"Creates a copy of the specified file. Folders cannot be copied."##), "Details at http://byron.github.io/google-apis-rs/google_drive2_cli/files_copy", @@ -7495,6 +7707,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list-labels", + Some(r##"Lists the labels on a file."##), + "Details at http://byron.github.io/google-apis-rs/google_drive2_cli/files_list-labels", + vec![ + (Some(r##"file-id"##), + None, + Some(r##"The ID of the file."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("modify-labels", + Some(r##"Modifies the set of labels on a file."##), + "Details at http://byron.github.io/google-apis-rs/google_drive2_cli/files_modify-labels", + vec![ + (Some(r##"file-id"##), + None, + Some(r##"The ID of the file for which the labels are modified."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -7630,7 +7892,7 @@ async fn main() { Some(false)), ]), ("watch", - Some(r##"Subscribe to changes on a file"##), + Some(r##"Subscribes to changes to a file. While you can establish a channel for changes to a file on a shared drive, a change to a shared drive file won't create a notification."##), "Details at http://byron.github.io/google-apis-rs/google_drive2_cli/files_watch", vec![ (Some(r##"file-id"##), @@ -8593,7 +8855,7 @@ async fn main() { let mut app = App::new("drive2") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230115") .about("Manages files in Drive including uploading, downloading, searching, detecting changes, and updating sharing permissions.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_drive2_cli") .arg(Arg::with_name("url") diff --git a/gen/drive2/Cargo.toml b/gen/drive2/Cargo.toml index 1345ea5f60..4e5745e4c0 100644 --- a/gen/drive2/Cargo.toml +++ b/gen/drive2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-drive2" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with drive (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/drive2" homepage = "https://developers.google.com/drive/" -documentation = "https://docs.rs/google-drive2/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-drive2/5.0.2+20230115" license = "MIT" keywords = ["drive", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/drive2/README.md b/gen/drive2/README.md index 2ceee3e2c8..e43c780725 100644 --- a/gen/drive2/README.md +++ b/gen/drive2/README.md @@ -5,62 +5,62 @@ DO NOT EDIT ! --> The `google-drive2` library allows access to all features of the *Google drive* service. -This documentation was generated from *drive* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *drive:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *drive* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *drive:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *drive* *v2* API can be found at the [official documentation site](https://developers.google.com/drive/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/DriveHub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/DriveHub) ... -* [about](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::About) - * [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::AboutGetCall) -* [apps](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::App) - * [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::AppGetCall) and [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::AppListCall) -* [changes](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::Change) - * [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChangeGetCall), [*get start page token*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChangeGetStartPageTokenCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChangeListCall) and [*watch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChangeWatchCall) -* [channels](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::Channel) - * [*stop*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChannelStopCall) +* [about](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::About) + * [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::AboutGetCall) +* [apps](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::App) + * [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::AppGetCall) and [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::AppListCall) +* [changes](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::Change) + * [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChangeGetCall), [*get start page token*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChangeGetStartPageTokenCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChangeListCall) and [*watch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChangeWatchCall) +* [channels](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::Channel) + * [*stop*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChannelStopCall) * children - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChildDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChildGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChildInsertCall) and [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChildListCall) -* [comments](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::Comment) - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::CommentDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::CommentGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::CommentInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::CommentListCall), [*patch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::CommentPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::CommentUpdateCall) -* [drives](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::Drive) - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::DriveDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::DriveGetCall), [*hide*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::DriveHideCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::DriveInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::DriveListCall), [*unhide*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::DriveUnhideCall) and [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::DriveUpdateCall) -* [files](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::File) - * [*copy*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileCopyCall), [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileDeleteCall), [*empty trash*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileEmptyTrashCall), [*export*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileExportCall), [*generate ids*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileGenerateIdCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileListCall), [*list labels*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileListLabelCall), [*modify labels*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileModifyLabelCall), [*patch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FilePatchCall), [*touch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileTouchCall), [*trash*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileTrashCall), [*untrash*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileUntrashCall), [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileUpdateCall) and [*watch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileWatchCall) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChildDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChildGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChildInsertCall) and [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChildListCall) +* [comments](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::Comment) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::CommentDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::CommentGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::CommentInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::CommentListCall), [*patch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::CommentPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::CommentUpdateCall) +* [drives](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::Drive) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::DriveDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::DriveGetCall), [*hide*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::DriveHideCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::DriveInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::DriveListCall), [*unhide*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::DriveUnhideCall) and [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::DriveUpdateCall) +* [files](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::File) + * [*copy*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileCopyCall), [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileDeleteCall), [*empty trash*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileEmptyTrashCall), [*export*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileExportCall), [*generate ids*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileGenerateIdCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileListCall), [*list labels*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileListLabelCall), [*modify labels*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileModifyLabelCall), [*patch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FilePatchCall), [*touch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileTouchCall), [*trash*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileTrashCall), [*untrash*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileUntrashCall), [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileUpdateCall) and [*watch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileWatchCall) * parents - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ParentDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ParentGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ParentInsertCall) and [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ParentListCall) -* [permissions](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::Permission) - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PermissionDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PermissionGetCall), [*get id for email*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PermissionGetIdForEmailCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PermissionInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PermissionListCall), [*patch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PermissionPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PermissionUpdateCall) -* [properties](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::Property) - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PropertyDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PropertyGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PropertyInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PropertyListCall), [*patch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PropertyPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::PropertyUpdateCall) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ParentDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ParentGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ParentInsertCall) and [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ParentListCall) +* [permissions](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::Permission) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PermissionDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PermissionGetCall), [*get id for email*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PermissionGetIdForEmailCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PermissionInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PermissionListCall), [*patch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PermissionPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PermissionUpdateCall) +* [properties](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::Property) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PropertyDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PropertyGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PropertyInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PropertyListCall), [*patch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PropertyPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::PropertyUpdateCall) * replies - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ReplyDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ReplyGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ReplyInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ReplyListCall), [*patch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ReplyPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ReplyUpdateCall) -* [revisions](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::Revision) - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::RevisionDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::RevisionGetCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::RevisionListCall), [*patch*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::RevisionPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::RevisionUpdateCall) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ReplyDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ReplyGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ReplyInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ReplyListCall), [*patch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ReplyPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ReplyUpdateCall) +* [revisions](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::Revision) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::RevisionDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::RevisionGetCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::RevisionListCall), [*patch*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::RevisionPatchCall) and [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::RevisionUpdateCall) * teamdrives - * [*delete*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::TeamdriveDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::TeamdriveGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::TeamdriveInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::TeamdriveListCall) and [*update*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::TeamdriveUpdateCall) + * [*delete*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::TeamdriveDeleteCall), [*get*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::TeamdriveGetCall), [*insert*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::TeamdriveInsertCall), [*list*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::TeamdriveListCall) and [*update*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::TeamdriveUpdateCall) Upload supported by ... -* [*insert files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileInsertCall) -* [*update files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileUpdateCall) +* [*insert files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileInsertCall) +* [*update files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileUpdateCall) Download supported by ... -* [*export files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileExportCall) -* [*get files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileGetCall) -* [*watch files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileWatchCall) +* [*export files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileExportCall) +* [*get files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileGetCall) +* [*watch files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileWatchCall) Subscription supported by ... -* [*list changes*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChangeListCall) -* [*watch changes*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::ChangeWatchCall) -* [*get files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileGetCall) -* [*insert files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileInsertCall) -* [*watch files*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/api::FileWatchCall) +* [*list changes*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChangeListCall) +* [*watch changes*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::ChangeWatchCall) +* [*get files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileGetCall) +* [*insert files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileInsertCall) +* [*watch files*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/api::FileWatchCall) @@ -68,17 +68,17 @@ Subscription supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/DriveHub)** +* **[Hub](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/DriveHub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::CallBuilder) -* **[Resources](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::CallBuilder) +* **[Resources](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Part)** + * **[Parts](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -202,17 +202,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -222,29 +222,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Delegate) to the -[Method Builder](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Delegate) to the +[Method Builder](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::RequestValue) and -[decodable](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::RequestValue) and +[decodable](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-drive2/5.0.2-beta-1+20230115/google_drive2/client::RequestValue) are moved +* [request values](https://docs.rs/google-drive2/5.0.2+20230115/google_drive2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/drive2/src/api.rs b/gen/drive2/src/api.rs index 5c435ab068..c0acb68103 100644 --- a/gen/drive2/src/api.rs +++ b/gen/drive2/src/api.rs @@ -175,7 +175,7 @@ impl<'a, S> DriveHub { DriveHub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/drive/v2/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -225,7 +225,7 @@ impl<'a, S> DriveHub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/drive2/src/client.rs b/gen/drive2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/drive2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/drive2/src/lib.rs b/gen/drive2/src/lib.rs index b629c7b099..75195e5305 100644 --- a/gen/drive2/src/lib.rs +++ b/gen/drive2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *drive* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *drive:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *drive* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *drive:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *drive* *v2* API can be found at the //! [official documentation site](https://developers.google.com/drive/). diff --git a/gen/drive3-cli/Cargo.toml b/gen/drive3-cli/Cargo.toml index 9c01421229..42518e613e 100644 --- a/gen/drive3-cli/Cargo.toml +++ b/gen/drive3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-drive3-cli" -version = "5.0.2-beta-1+20220225" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with drive (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/drive3-cli" @@ -38,5 +38,5 @@ tower-service = "^0.3.1" [dependencies.google-drive3] path = "../drive3" -version = "5.0.2-beta-1+20220225" +version = "5.0.2+20230115" diff --git a/gen/drive3-cli/README.md b/gen/drive3-cli/README.md index a0789baff0..983cd5c18a 100644 --- a/gen/drive3-cli/README.md +++ b/gen/drive3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *drive* API at revision *20220225*. The CLI is at version *5.0.2-beta-1*. +This documentation was generated from the *drive* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash drive3 [options] @@ -60,6 +60,8 @@ drive3 [options] generate-ids [-p ]... [-o ] get [-p ]... [-o ] list [-p ]... [-o ] + list-labels [-p ]... [-o ] + modify-labels (-r )... [-p ]... [-o ] update (-r )... (-u simple -f [-m ]) [-p ]... [-o ] watch (-r )... [-p ]... [-o ] permissions diff --git a/gen/drive3-cli/mkdocs.yml b/gen/drive3-cli/mkdocs.yml index 01a6b76451..35d09b99ea 100644 --- a/gen/drive3-cli/mkdocs.yml +++ b/gen/drive3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: drive v5.0.2-beta-1+20220225 +site_name: drive v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-drive3-cli site_description: A complete library to interact with drive (protocol v3) @@ -40,6 +40,8 @@ nav: - 'Generate Ids': 'files_generate-ids.md' - 'Get': 'files_get.md' - 'List': 'files_list.md' + - 'List Labels': 'files_list-labels.md' + - 'Modify Labels': 'files_modify-labels.md' - 'Update': 'files_update.md' - 'Watch': 'files_watch.md' - 'Permissions': diff --git a/gen/drive3-cli/src/client.rs b/gen/drive3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/drive3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/drive3-cli/src/main.rs b/gen/drive3-cli/src/main.rs index 06bdbca5a3..b188a7a923 100644 --- a/gen/drive3-cli/src/main.rs +++ b/gen/drive3-cli/src/main.rs @@ -200,6 +200,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "include-items-from-all-drives" => { call = call.include_items_from_all_drives( value.map(|v| arg_from_str(v, err, "include-items-from-all-drives", "boolean")).unwrap_or(false)); }, @@ -222,7 +225,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["drive-id", "include-corpus-removals", "include-items-from-all-drives", "include-permissions-for-view", "include-removed", "include-team-drive-items", "page-size", "restrict-to-my-drive", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v.extend(["drive-id", "include-corpus-removals", "include-items-from-all-drives", "include-labels", "include-permissions-for-view", "include-removed", "include-team-drive-items", "page-size", "restrict-to-my-drive", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); v } )); } } @@ -331,6 +334,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "include-items-from-all-drives" => { call = call.include_items_from_all_drives( value.map(|v| arg_from_str(v, err, "include-items-from-all-drives", "boolean")).unwrap_or(false)); }, @@ -353,7 +359,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["drive-id", "include-corpus-removals", "include-items-from-all-drives", "include-permissions-for-view", "include-removed", "include-team-drive-items", "page-size", "restrict-to-my-drive", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v.extend(["drive-id", "include-corpus-removals", "include-items-from-all-drives", "include-labels", "include-permissions-for-view", "include-removed", "include-team-drive-items", "page-size", "restrict-to-my-drive", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); v } )); } } @@ -884,6 +890,7 @@ where "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-drive" => Some(("capabilities.canRenameDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-drive-restrictions" => Some(("capabilities.canResetDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -899,7 +906,7 @@ where "restrictions.drive-members-only" => Some(("restrictions.driveMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-reset-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -965,6 +972,12 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "use-domain-admin-access" => { + call = call.use_domain_admin_access( value.map(|v| arg_from_str(v, err, "use-domain-admin-access", "boolean")).unwrap_or(false)); + }, + "allow-item-deletion" => { + call = call.allow_item_deletion( value.map(|v| arg_from_str(v, err, "allow-item-deletion", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -978,6 +991,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-item-deletion", "use-domain-admin-access"].iter().map(|v|*v)); v } )); } } @@ -1272,6 +1286,7 @@ where "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-drive" => Some(("capabilities.canRenameDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-drive-restrictions" => Some(("capabilities.canResetDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1287,7 +1302,7 @@ where "restrictions.drive-members-only" => Some(("restrictions.driveMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-drive-background", "can-change-drive-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-rename", "can-rename-drive", "can-reset-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "drive-members-only", "hidden", "id", "kind", "name", "org-unit-id", "restrictions", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1391,6 +1406,7 @@ where "capabilities.can-list-children" => Some(("capabilities.canListChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content" => Some(("capabilities.canModifyContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content-restriction" => Some(("capabilities.canModifyContentRestriction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-modify-labels" => Some(("capabilities.canModifyLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-drive" => Some(("capabilities.canMoveChildrenOutOfDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-team-drive" => Some(("capabilities.canMoveChildrenOutOfTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-within-drive" => Some(("capabilities.canMoveChildrenWithinDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1402,6 +1418,7 @@ where "capabilities.can-move-item-within-team-drive" => Some(("capabilities.canMoveItemWithinTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-team-drive-item" => Some(("capabilities.canMoveTeamDriveItem", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-drive" => Some(("capabilities.canReadDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-read-labels" => Some(("capabilities.canReadLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-team-drive" => Some(("capabilities.canReadTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1474,6 +1491,8 @@ where "properties" => Some(("properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "quota-bytes-used" => Some(("quotaBytesUsed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-key" => Some(("resourceKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-checksum" => Some(("sha1Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha256-checksum" => Some(("sha256Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "shared" => Some(("shared", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared-with-me-time" => Some(("sharedWithMeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sharing-user.display-name" => Some(("sharingUser.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1510,7 +1529,7 @@ where "web-view-link" => Some(("webViewLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "writers-can-share" => Some(("writersCanShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["altitude", "aperture", "app-properties", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-security-update-enabled", "can-change-viewers-can-copy-content", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "content-hints", "copy-requires-writer-permission", "created-time", "description", "display-name", "drive-id", "duration-millis", "email-address", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "iso-speed", "kind", "last-modifying-user", "latitude", "lens", "link-share-metadata", "location", "longitude", "max-aperture-value", "md5-checksum", "me", "metering-mode", "mime-type", "modified-by-me", "modified-by-me-time", "modified-time", "name", "original-filename", "owned-by-me", "parents", "permission-id", "permission-ids", "photo-link", "properties", "quota-bytes-used", "resource-key", "rotation", "security-update-eligible", "security-update-enabled", "sensor", "shared", "shared-with-me-time", "sharing-user", "shortcut-details", "size", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "thumbnail", "thumbnail-link", "thumbnail-version", "time", "trashed", "trashed-time", "trashing-user", "version", "video-media-metadata", "viewed-by-me", "viewed-by-me-time", "viewers-can-copy-content", "web-content-link", "web-view-link", "white-balance", "width", "writers-can-share"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["altitude", "aperture", "app-properties", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-security-update-enabled", "can-change-viewers-can-copy-content", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-modify-labels", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-labels", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "content-hints", "copy-requires-writer-permission", "created-time", "description", "display-name", "drive-id", "duration-millis", "email-address", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "iso-speed", "kind", "last-modifying-user", "latitude", "lens", "link-share-metadata", "location", "longitude", "max-aperture-value", "md5-checksum", "me", "metering-mode", "mime-type", "modified-by-me", "modified-by-me-time", "modified-time", "name", "original-filename", "owned-by-me", "parents", "permission-id", "permission-ids", "photo-link", "properties", "quota-bytes-used", "resource-key", "rotation", "security-update-eligible", "security-update-enabled", "sensor", "sha1-checksum", "sha256-checksum", "shared", "shared-with-me-time", "sharing-user", "shortcut-details", "size", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "thumbnail", "thumbnail-link", "thumbnail-version", "time", "trashed", "trashed-time", "trashing-user", "version", "video-media-metadata", "viewed-by-me", "viewed-by-me-time", "viewers-can-copy-content", "web-content-link", "web-view-link", "white-balance", "width", "writers-can-share"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1539,6 +1558,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "ignore-default-visibility" => { call = call.ignore_default_visibility( value.map(|v| arg_from_str(v, err, "ignore-default-visibility", "boolean")).unwrap_or(false)); }, @@ -1558,7 +1580,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["enforce-single-parent", "ignore-default-visibility", "include-permissions-for-view", "keep-revision-forever", "ocr-language", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); + v.extend(["enforce-single-parent", "ignore-default-visibility", "include-labels", "include-permissions-for-view", "keep-revision-forever", "ocr-language", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); v } )); } } @@ -1632,6 +1654,7 @@ where "capabilities.can-list-children" => Some(("capabilities.canListChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content" => Some(("capabilities.canModifyContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content-restriction" => Some(("capabilities.canModifyContentRestriction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-modify-labels" => Some(("capabilities.canModifyLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-drive" => Some(("capabilities.canMoveChildrenOutOfDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-team-drive" => Some(("capabilities.canMoveChildrenOutOfTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-within-drive" => Some(("capabilities.canMoveChildrenWithinDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1643,6 +1666,7 @@ where "capabilities.can-move-item-within-team-drive" => Some(("capabilities.canMoveItemWithinTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-team-drive-item" => Some(("capabilities.canMoveTeamDriveItem", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-drive" => Some(("capabilities.canReadDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-read-labels" => Some(("capabilities.canReadLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-team-drive" => Some(("capabilities.canReadTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1715,6 +1739,8 @@ where "properties" => Some(("properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "quota-bytes-used" => Some(("quotaBytesUsed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-key" => Some(("resourceKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-checksum" => Some(("sha1Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha256-checksum" => Some(("sha256Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "shared" => Some(("shared", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared-with-me-time" => Some(("sharedWithMeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sharing-user.display-name" => Some(("sharingUser.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1751,7 +1777,7 @@ where "web-view-link" => Some(("webViewLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "writers-can-share" => Some(("writersCanShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["altitude", "aperture", "app-properties", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-security-update-enabled", "can-change-viewers-can-copy-content", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "content-hints", "copy-requires-writer-permission", "created-time", "description", "display-name", "drive-id", "duration-millis", "email-address", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "iso-speed", "kind", "last-modifying-user", "latitude", "lens", "link-share-metadata", "location", "longitude", "max-aperture-value", "md5-checksum", "me", "metering-mode", "mime-type", "modified-by-me", "modified-by-me-time", "modified-time", "name", "original-filename", "owned-by-me", "parents", "permission-id", "permission-ids", "photo-link", "properties", "quota-bytes-used", "resource-key", "rotation", "security-update-eligible", "security-update-enabled", "sensor", "shared", "shared-with-me-time", "sharing-user", "shortcut-details", "size", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "thumbnail", "thumbnail-link", "thumbnail-version", "time", "trashed", "trashed-time", "trashing-user", "version", "video-media-metadata", "viewed-by-me", "viewed-by-me-time", "viewers-can-copy-content", "web-content-link", "web-view-link", "white-balance", "width", "writers-can-share"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["altitude", "aperture", "app-properties", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-security-update-enabled", "can-change-viewers-can-copy-content", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-modify-labels", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-labels", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "content-hints", "copy-requires-writer-permission", "created-time", "description", "display-name", "drive-id", "duration-millis", "email-address", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "iso-speed", "kind", "last-modifying-user", "latitude", "lens", "link-share-metadata", "location", "longitude", "max-aperture-value", "md5-checksum", "me", "metering-mode", "mime-type", "modified-by-me", "modified-by-me-time", "modified-time", "name", "original-filename", "owned-by-me", "parents", "permission-id", "permission-ids", "photo-link", "properties", "quota-bytes-used", "resource-key", "rotation", "security-update-eligible", "security-update-enabled", "sensor", "sha1-checksum", "sha256-checksum", "shared", "shared-with-me-time", "sharing-user", "shortcut-details", "size", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "thumbnail", "thumbnail-link", "thumbnail-version", "time", "trashed", "trashed-time", "trashing-user", "version", "video-media-metadata", "viewed-by-me", "viewed-by-me-time", "viewers-can-copy-content", "web-content-link", "web-view-link", "white-balance", "width", "writers-can-share"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1783,6 +1809,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "ignore-default-visibility" => { call = call.ignore_default_visibility( value.map(|v| arg_from_str(v, err, "ignore-default-visibility", "boolean")).unwrap_or(false)); }, @@ -1802,7 +1831,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["enforce-single-parent", "ignore-default-visibility", "include-permissions-for-view", "keep-revision-forever", "ocr-language", "supports-all-drives", "supports-team-drives", "use-content-as-indexable-text"].iter().map(|v|*v)); + v.extend(["enforce-single-parent", "ignore-default-visibility", "include-labels", "include-permissions-for-view", "keep-revision-forever", "ocr-language", "supports-all-drives", "supports-team-drives", "use-content-as-indexable-text"].iter().map(|v|*v)); v } )); } } @@ -2077,6 +2106,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "acknowledge-abuse" => { call = call.acknowledge_abuse( value.map(|v| arg_from_str(v, err, "acknowledge-abuse", "boolean")).unwrap_or(false)); }, @@ -2096,7 +2128,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["acknowledge-abuse", "include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); + v.extend(["acknowledge-abuse", "include-labels", "include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); v } )); } } @@ -2172,6 +2204,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "include-items-from-all-drives" => { call = call.include_items_from_all_drives( value.map(|v| arg_from_str(v, err, "include-items-from-all-drives", "boolean")).unwrap_or(false)); }, @@ -2197,7 +2232,151 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["corpora", "corpus", "drive-id", "include-items-from-all-drives", "include-permissions-for-view", "include-team-drive-items", "order-by", "page-size", "page-token", "q", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v.extend(["corpora", "corpus", "drive-id", "include-items-from-all-drives", "include-labels", "include-permissions-for-view", "include-team-drive-items", "order-by", "page-size", "page-token", "q", "spaces", "supports-all-drives", "supports-team-drives", "team-drive-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _files_list_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.files().list_labels(opt.value_of("file-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "max-results" => { + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["max-results", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _files_modify_labels(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["kind"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ModifyLabelsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.files().modify_labels(request, opt.value_of("file-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); v } )); } } @@ -2271,6 +2450,7 @@ where "capabilities.can-list-children" => Some(("capabilities.canListChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content" => Some(("capabilities.canModifyContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-modify-content-restriction" => Some(("capabilities.canModifyContentRestriction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-modify-labels" => Some(("capabilities.canModifyLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-drive" => Some(("capabilities.canMoveChildrenOutOfDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-out-of-team-drive" => Some(("capabilities.canMoveChildrenOutOfTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-children-within-drive" => Some(("capabilities.canMoveChildrenWithinDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2282,6 +2462,7 @@ where "capabilities.can-move-item-within-team-drive" => Some(("capabilities.canMoveItemWithinTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-move-team-drive-item" => Some(("capabilities.canMoveTeamDriveItem", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-drive" => Some(("capabilities.canReadDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-read-labels" => Some(("capabilities.canReadLabels", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-revisions" => Some(("capabilities.canReadRevisions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-read-team-drive" => Some(("capabilities.canReadTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2354,6 +2535,8 @@ where "properties" => Some(("properties", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "quota-bytes-used" => Some(("quotaBytesUsed", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-key" => Some(("resourceKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-checksum" => Some(("sha1Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha256-checksum" => Some(("sha256Checksum", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "shared" => Some(("shared", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "shared-with-me-time" => Some(("sharedWithMeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "sharing-user.display-name" => Some(("sharingUser.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2390,7 +2573,7 @@ where "web-view-link" => Some(("webViewLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "writers-can-share" => Some(("writersCanShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["altitude", "aperture", "app-properties", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-security-update-enabled", "can-change-viewers-can-copy-content", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "content-hints", "copy-requires-writer-permission", "created-time", "description", "display-name", "drive-id", "duration-millis", "email-address", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "iso-speed", "kind", "last-modifying-user", "latitude", "lens", "link-share-metadata", "location", "longitude", "max-aperture-value", "md5-checksum", "me", "metering-mode", "mime-type", "modified-by-me", "modified-by-me-time", "modified-time", "name", "original-filename", "owned-by-me", "parents", "permission-id", "permission-ids", "photo-link", "properties", "quota-bytes-used", "resource-key", "rotation", "security-update-eligible", "security-update-enabled", "sensor", "shared", "shared-with-me-time", "sharing-user", "shortcut-details", "size", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "thumbnail", "thumbnail-link", "thumbnail-version", "time", "trashed", "trashed-time", "trashing-user", "version", "video-media-metadata", "viewed-by-me", "viewed-by-me-time", "viewers-can-copy-content", "web-content-link", "web-view-link", "white-balance", "width", "writers-can-share"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["altitude", "aperture", "app-properties", "camera-make", "camera-model", "can-accept-ownership", "can-add-children", "can-add-folder-from-another-drive", "can-add-my-drive-parent", "can-change-copy-requires-writer-permission", "can-change-security-update-enabled", "can-change-viewers-can-copy-content", "can-comment", "can-copy", "can-delete", "can-delete-children", "can-download", "can-edit", "can-list-children", "can-modify-content", "can-modify-content-restriction", "can-modify-labels", "can-move-children-out-of-drive", "can-move-children-out-of-team-drive", "can-move-children-within-drive", "can-move-children-within-team-drive", "can-move-item-into-team-drive", "can-move-item-out-of-drive", "can-move-item-out-of-team-drive", "can-move-item-within-drive", "can-move-item-within-team-drive", "can-move-team-drive-item", "can-read-drive", "can-read-labels", "can-read-revisions", "can-read-team-drive", "can-remove-children", "can-remove-my-drive-parent", "can-rename", "can-share", "can-trash", "can-trash-children", "can-untrash", "capabilities", "color-space", "content-hints", "copy-requires-writer-permission", "created-time", "description", "display-name", "drive-id", "duration-millis", "email-address", "explicitly-trashed", "export-links", "exposure-bias", "exposure-mode", "exposure-time", "file-extension", "flash-used", "focal-length", "folder-color-rgb", "full-file-extension", "has-augmented-permissions", "has-thumbnail", "head-revision-id", "height", "icon-link", "id", "image", "image-media-metadata", "indexable-text", "is-app-authorized", "iso-speed", "kind", "last-modifying-user", "latitude", "lens", "link-share-metadata", "location", "longitude", "max-aperture-value", "md5-checksum", "me", "metering-mode", "mime-type", "modified-by-me", "modified-by-me-time", "modified-time", "name", "original-filename", "owned-by-me", "parents", "permission-id", "permission-ids", "photo-link", "properties", "quota-bytes-used", "resource-key", "rotation", "security-update-eligible", "security-update-enabled", "sensor", "sha1-checksum", "sha256-checksum", "shared", "shared-with-me-time", "sharing-user", "shortcut-details", "size", "spaces", "starred", "subject-distance", "target-id", "target-mime-type", "target-resource-key", "team-drive-id", "thumbnail", "thumbnail-link", "thumbnail-version", "time", "trashed", "trashed-time", "trashing-user", "version", "video-media-metadata", "viewed-by-me", "viewed-by-me-time", "viewers-can-copy-content", "web-content-link", "web-view-link", "white-balance", "width", "writers-can-share"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2425,6 +2608,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "enforce-single-parent" => { call = call.enforce_single_parent( value.map(|v| arg_from_str(v, err, "enforce-single-parent", "boolean")).unwrap_or(false)); }, @@ -2444,7 +2630,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["add-parents", "enforce-single-parent", "include-permissions-for-view", "keep-revision-forever", "ocr-language", "remove-parents", "supports-all-drives", "supports-team-drives", "use-content-as-indexable-text"].iter().map(|v|*v)); + v.extend(["add-parents", "enforce-single-parent", "include-labels", "include-permissions-for-view", "keep-revision-forever", "ocr-language", "remove-parents", "supports-all-drives", "supports-team-drives", "use-content-as-indexable-text"].iter().map(|v|*v)); v } )); } } @@ -2539,6 +2725,9 @@ where "include-permissions-for-view" => { call = call.include_permissions_for_view(value.unwrap_or("")); }, + "include-labels" => { + call = call.include_labels(value.unwrap_or("")); + }, "acknowledge-abuse" => { call = call.acknowledge_abuse( value.map(|v| arg_from_str(v, err, "acknowledge-abuse", "boolean")).unwrap_or(false)); }, @@ -2558,7 +2747,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["acknowledge-abuse", "include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); + v.extend(["acknowledge-abuse", "include-labels", "include-permissions-for-view", "supports-all-drives", "supports-team-drives"].iter().map(|v|*v)); v } )); } } @@ -3695,6 +3884,7 @@ where "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-team-drive" => Some(("capabilities.canRenameTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-team-drive-restrictions" => Some(("capabilities.canResetTeamDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3709,7 +3899,7 @@ where "restrictions.team-members-only" => Some(("restrictions.teamMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-reset-team-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3979,6 +4169,7 @@ where "capabilities.can-remove-children" => Some(("capabilities.canRemoveChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename" => Some(("capabilities.canRename", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-rename-team-drive" => Some(("capabilities.canRenameTeamDrive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "capabilities.can-reset-team-drive-restrictions" => Some(("capabilities.canResetTeamDriveRestrictions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-share" => Some(("capabilities.canShare", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "capabilities.can-trash-children" => Some(("capabilities.canTrashChildren", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "color-rgb" => Some(("colorRgb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3993,7 +4184,7 @@ where "restrictions.team-members-only" => Some(("restrictions.teamMembersOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "theme-id" => Some(("themeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-managed-restrictions", "background-image-file", "background-image-link", "can-add-children", "can-change-copy-requires-writer-permission-restriction", "can-change-domain-users-only-restriction", "can-change-team-drive-background", "can-change-team-members-only-restriction", "can-comment", "can-copy", "can-delete-children", "can-delete-team-drive", "can-download", "can-edit", "can-list-children", "can-manage-members", "can-read-revisions", "can-remove-children", "can-rename", "can-rename-team-drive", "can-reset-team-drive-restrictions", "can-share", "can-trash-children", "capabilities", "color-rgb", "copy-requires-writer-permission", "created-time", "domain-users-only", "id", "kind", "name", "org-unit-id", "restrictions", "team-members-only", "theme-id", "width", "x-coordinate", "y-coordinate"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4179,6 +4370,12 @@ where ("list", Some(opt)) => { call_result = self._files_list(opt, dry_run, &mut err).await; }, + ("list-labels", Some(opt)) => { + call_result = self._files_list_labels(opt, dry_run, &mut err).await; + }, + ("modify-labels", Some(opt)) => { + call_result = self._files_modify_labels(opt, dry_run, &mut err).await; + }, ("update", Some(opt)) => { call_result = self._files_update(opt, dry_run, &mut err).await; }, @@ -4460,7 +4657,7 @@ async fn main() { ("comments", "methods: 'create', 'delete', 'get', 'list' and 'update'", vec![ ("create", - Some(r##"Creates a new comment on a file."##), + Some(r##"Creates a comment on a file."##), "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/comments_create", vec![ (Some(r##"file-id"##), @@ -4597,7 +4794,7 @@ async fn main() { ("drives", "methods: 'create', 'delete', 'get', 'hide', 'list', 'unhide' and 'update'", vec![ ("create", - Some(r##"Creates a new shared drive."##), + Some(r##"Creates a shared drive."##), "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/drives_create", vec![ (Some(r##"request-id"##), @@ -4752,7 +4949,7 @@ async fn main() { ]), ]), - ("files", "methods: 'copy', 'create', 'delete', 'empty-trash', 'export', 'generate-ids', 'get', 'list', 'update' and 'watch'", vec![ + ("files", "methods: 'copy', 'create', 'delete', 'empty-trash', 'export', 'generate-ids', 'get', 'list', 'list-labels', 'modify-labels', 'update' and 'watch'", vec![ ("copy", Some(r##"Creates a copy of a file and applies any requested updates with patch semantics. Folders cannot be copied."##), "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/files_copy", @@ -4782,7 +4979,7 @@ async fn main() { Some(false)), ]), ("create", - Some(r##"Creates a new file."##), + Some(r##"Creates a file."##), "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/files_create", vec![ (Some(r##"kv"##), @@ -4911,6 +5108,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("list-labels", + Some(r##"Lists the labels on a file."##), + "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/files_list-labels", + vec![ + (Some(r##"file-id"##), + None, + Some(r##"The ID of the file."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("modify-labels", + Some(r##"Modifies the set of labels on a file."##), + "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/files_modify-labels", + vec![ + (Some(r##"file-id"##), + None, + Some(r##"The ID of the file for which the labels are modified."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4952,7 +5199,7 @@ async fn main() { Some(false)), ]), ("watch", - Some(r##"Subscribes to changes to a file. While you can establish a channel forchanges to a file on a shared drive, a change to a shared drive file won't create a notification."##), + Some(r##"Subscribes to changes to a file. While you can establish a channel for changes to a file on a shared drive, a change to a shared drive file won't create a notification."##), "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/files_watch", vec![ (Some(r##"file-id"##), @@ -5120,7 +5367,7 @@ async fn main() { ("replies", "methods: 'create', 'delete', 'get', 'list' and 'update'", vec![ ("create", - Some(r##"Creates a new reply to a comment."##), + Some(r##"Creates a reply to a comment."##), "Details at http://byron.github.io/google-apis-rs/google_drive3_cli/replies_create", vec![ (Some(r##"file-id"##), @@ -5511,7 +5758,7 @@ async fn main() { let mut app = App::new("drive3") .author("Sebastian Thiel ") - .version("5.0.2-beta-1+20220225") + .version("5.0.2+20230115") .about("Manages files in Drive including uploading, downloading, searching, detecting changes, and updating sharing permissions.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_drive3_cli") .arg(Arg::with_name("url") diff --git a/gen/drive3/src/client.rs b/gen/drive3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/drive3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/driveactivity2-cli/Cargo.toml b/gen/driveactivity2-cli/Cargo.toml index fabe74b47f..6ee3d596fe 100644 --- a/gen/driveactivity2-cli/Cargo.toml +++ b/gen/driveactivity2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-driveactivity2-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Drive Activity (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/driveactivity2-cli" @@ -20,13 +20,13 @@ name = "driveactivity2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-driveactivity2] path = "../driveactivity2" -version = "4.0.1+20220301" +version = "5.0.2+20230122" + diff --git a/gen/driveactivity2-cli/README.md b/gen/driveactivity2-cli/README.md index 10b706fcc0..476c750e45 100644 --- a/gen/driveactivity2-cli/README.md +++ b/gen/driveactivity2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Drive Activity* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Drive Activity* API at revision *20230122*. The CLI is at version *5.0.2*. ```bash driveactivity2 [options] diff --git a/gen/driveactivity2-cli/mkdocs.yml b/gen/driveactivity2-cli/mkdocs.yml index aab8b217f6..64a2caf083 100644 --- a/gen/driveactivity2-cli/mkdocs.yml +++ b/gen/driveactivity2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Drive Activity v4.0.1+20220301 +site_name: Drive Activity v5.0.2+20230122 site_url: http://byron.github.io/google-apis-rs/google-driveactivity2-cli site_description: A complete library to interact with Drive Activity (protocol v2) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/driveactivity2-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['activity_query.md', 'Activity', 'Query'] +nav: +- Home: 'index.md' +- 'Activity': + - 'Query': 'activity_query.md' theme: readthedocs diff --git a/gen/driveactivity2-cli/src/client.rs b/gen/driveactivity2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/driveactivity2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/driveactivity2-cli/src/main.rs b/gen/driveactivity2-cli/src/main.rs index d32321fae8..84f00ad193 100644 --- a/gen/driveactivity2-cli/src/main.rs +++ b/gen/driveactivity2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_driveactivity2::{api, Error, oauth2}; +use google_driveactivity2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -258,7 +257,7 @@ async fn main() { let mut app = App::new("driveactivity2") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230122") .about("Provides a historical view of activity in Google Drive.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_driveactivity2_cli") .arg(Arg::with_name("url") diff --git a/gen/driveactivity2/Cargo.toml b/gen/driveactivity2/Cargo.toml index 36564b0a15..05a1588edf 100644 --- a/gen/driveactivity2/Cargo.toml +++ b/gen/driveactivity2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-driveactivity2" -version = "5.0.2-beta-1+20230122" +version = "5.0.2+20230122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Drive Activity (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/driveactivity2" homepage = "https://developers.google.com/drive/activity/" -documentation = "https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122" +documentation = "https://docs.rs/google-driveactivity2/5.0.2+20230122" license = "MIT" keywords = ["driveactivity", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/driveactivity2/README.md b/gen/driveactivity2/README.md index a05146b427..905a0215cc 100644 --- a/gen/driveactivity2/README.md +++ b/gen/driveactivity2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-driveactivity2` library allows access to all features of the *Google Drive Activity* service. -This documentation was generated from *Drive Activity* crate version *5.0.2-beta-1+20230122*, where *20230122* is the exact revision of the *driveactivity:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Drive Activity* crate version *5.0.2+20230122*, where *20230122* is the exact revision of the *driveactivity:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Drive Activity* *v2* API can be found at the [official documentation site](https://developers.google.com/drive/activity/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/DriveActivityHub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/DriveActivityHub) ... * activity - * [*query*](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/api::ActivityQueryCall) + * [*query*](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/api::ActivityQueryCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/DriveActivityHub)** +* **[Hub](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/DriveActivityHub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::CallBuilder) -* **[Resources](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::CallBuilder) +* **[Resources](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Part)** + * **[Parts](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Delegate) to the -[Method Builder](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Delegate) to the +[Method Builder](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::RequestValue) and -[decodable](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::RequestValue) and +[decodable](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-driveactivity2/5.0.2-beta-1+20230122/google_driveactivity2/client::RequestValue) are moved +* [request values](https://docs.rs/google-driveactivity2/5.0.2+20230122/google_driveactivity2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/driveactivity2/src/api.rs b/gen/driveactivity2/src/api.rs index 0779983741..9313d8a2d9 100644 --- a/gen/driveactivity2/src/api.rs +++ b/gen/driveactivity2/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> DriveActivityHub { DriveActivityHub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://driveactivity.googleapis.com/".to_string(), _root_url: "https://driveactivity.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> DriveActivityHub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/driveactivity2/src/client.rs b/gen/driveactivity2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/driveactivity2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/driveactivity2/src/lib.rs b/gen/driveactivity2/src/lib.rs index 9d14835afc..012312bf0d 100644 --- a/gen/driveactivity2/src/lib.rs +++ b/gen/driveactivity2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Drive Activity* crate version *5.0.2-beta-1+20230122*, where *20230122* is the exact revision of the *driveactivity:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Drive Activity* crate version *5.0.2+20230122*, where *20230122* is the exact revision of the *driveactivity:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Drive Activity* *v2* API can be found at the //! [official documentation site](https://developers.google.com/drive/activity/). diff --git a/gen/essentialcontacts1-cli/Cargo.toml b/gen/essentialcontacts1-cli/Cargo.toml index c3e9809fc3..811d15008e 100644 --- a/gen/essentialcontacts1-cli/Cargo.toml +++ b/gen/essentialcontacts1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-essentialcontacts1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Essentialcontacts (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/essentialcontacts1-cli" @@ -20,13 +20,13 @@ name = "essentialcontacts1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-essentialcontacts1] path = "../essentialcontacts1" -version = "4.0.1+20220227" +version = "5.0.2+20230123" + diff --git a/gen/essentialcontacts1-cli/README.md b/gen/essentialcontacts1-cli/README.md index 88b68bb816..12837d31a7 100644 --- a/gen/essentialcontacts1-cli/README.md +++ b/gen/essentialcontacts1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Essentialcontacts* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Essentialcontacts* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash essentialcontacts1 [options] diff --git a/gen/essentialcontacts1-cli/mkdocs.yml b/gen/essentialcontacts1-cli/mkdocs.yml index 688ce3267a..04e501bb65 100644 --- a/gen/essentialcontacts1-cli/mkdocs.yml +++ b/gen/essentialcontacts1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Essentialcontacts v4.0.1+20220227 +site_name: Essentialcontacts v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-essentialcontacts1-cli site_description: A complete library to interact with Essentialcontacts (protocol v1) @@ -7,29 +7,32 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/essentialcontact docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_contacts-compute.md', 'Folders', 'Contacts Compute'] -- ['folders_contacts-create.md', 'Folders', 'Contacts Create'] -- ['folders_contacts-delete.md', 'Folders', 'Contacts Delete'] -- ['folders_contacts-get.md', 'Folders', 'Contacts Get'] -- ['folders_contacts-list.md', 'Folders', 'Contacts List'] -- ['folders_contacts-patch.md', 'Folders', 'Contacts Patch'] -- ['folders_contacts-send-test-message.md', 'Folders', 'Contacts Send Test Message'] -- ['organizations_contacts-compute.md', 'Organizations', 'Contacts Compute'] -- ['organizations_contacts-create.md', 'Organizations', 'Contacts Create'] -- ['organizations_contacts-delete.md', 'Organizations', 'Contacts Delete'] -- ['organizations_contacts-get.md', 'Organizations', 'Contacts Get'] -- ['organizations_contacts-list.md', 'Organizations', 'Contacts List'] -- ['organizations_contacts-patch.md', 'Organizations', 'Contacts Patch'] -- ['organizations_contacts-send-test-message.md', 'Organizations', 'Contacts Send Test Message'] -- ['projects_contacts-compute.md', 'Projects', 'Contacts Compute'] -- ['projects_contacts-create.md', 'Projects', 'Contacts Create'] -- ['projects_contacts-delete.md', 'Projects', 'Contacts Delete'] -- ['projects_contacts-get.md', 'Projects', 'Contacts Get'] -- ['projects_contacts-list.md', 'Projects', 'Contacts List'] -- ['projects_contacts-patch.md', 'Projects', 'Contacts Patch'] -- ['projects_contacts-send-test-message.md', 'Projects', 'Contacts Send Test Message'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Contacts Compute': 'folders_contacts-compute.md' + - 'Contacts Create': 'folders_contacts-create.md' + - 'Contacts Delete': 'folders_contacts-delete.md' + - 'Contacts Get': 'folders_contacts-get.md' + - 'Contacts List': 'folders_contacts-list.md' + - 'Contacts Patch': 'folders_contacts-patch.md' + - 'Contacts Send Test Message': 'folders_contacts-send-test-message.md' +- 'Organizations': + - 'Contacts Compute': 'organizations_contacts-compute.md' + - 'Contacts Create': 'organizations_contacts-create.md' + - 'Contacts Delete': 'organizations_contacts-delete.md' + - 'Contacts Get': 'organizations_contacts-get.md' + - 'Contacts List': 'organizations_contacts-list.md' + - 'Contacts Patch': 'organizations_contacts-patch.md' + - 'Contacts Send Test Message': 'organizations_contacts-send-test-message.md' +- 'Projects': + - 'Contacts Compute': 'projects_contacts-compute.md' + - 'Contacts Create': 'projects_contacts-create.md' + - 'Contacts Delete': 'projects_contacts-delete.md' + - 'Contacts Get': 'projects_contacts-get.md' + - 'Contacts List': 'projects_contacts-list.md' + - 'Contacts Patch': 'projects_contacts-patch.md' + - 'Contacts Send Test Message': 'projects_contacts-send-test-message.md' theme: readthedocs diff --git a/gen/essentialcontacts1-cli/src/client.rs b/gen/essentialcontacts1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/essentialcontacts1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/essentialcontacts1-cli/src/main.rs b/gen/essentialcontacts1-cli/src/main.rs index fb4adc481e..7e6f651e1f 100644 --- a/gen/essentialcontacts1-cli/src/main.rs +++ b/gen/essentialcontacts1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_essentialcontacts1::{api, Error, oauth2}; +use google_essentialcontacts1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "notification-categories" => { call = call.add_notification_categories(value.unwrap_or("")); @@ -317,7 +316,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -411,7 +410,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -556,7 +555,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "notification-categories" => { call = call.add_notification_categories(value.unwrap_or("")); @@ -812,7 +811,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -906,7 +905,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1051,7 +1050,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "notification-categories" => { call = call.add_notification_categories(value.unwrap_or("")); @@ -1307,7 +1306,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1401,7 +1400,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1824,7 +1823,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The identifier for the contact. Format: {resource_type}/{resource_id}/contacts/{contact_id}"##), + Some(r##"Output only. The identifier for the contact. Format: {resource_type}/{resource_id}/contacts/{contact_id}"##), Some(true), Some(false)), @@ -1999,7 +1998,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The identifier for the contact. Format: {resource_type}/{resource_id}/contacts/{contact_id}"##), + Some(r##"Output only. The identifier for the contact. Format: {resource_type}/{resource_id}/contacts/{contact_id}"##), Some(true), Some(false)), @@ -2174,7 +2173,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The identifier for the contact. Format: {resource_type}/{resource_id}/contacts/{contact_id}"##), + Some(r##"Output only. The identifier for the contact. Format: {resource_type}/{resource_id}/contacts/{contact_id}"##), Some(true), Some(false)), @@ -2230,7 +2229,7 @@ async fn main() { let mut app = App::new("essentialcontacts1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230123") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_essentialcontacts1_cli") .arg(Arg::with_name("url") diff --git a/gen/essentialcontacts1/Cargo.toml b/gen/essentialcontacts1/Cargo.toml index d46652eb18..1b988820c3 100644 --- a/gen/essentialcontacts1/Cargo.toml +++ b/gen/essentialcontacts1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-essentialcontacts1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Essentialcontacts (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/essentialcontacts1" homepage = "https://cloud.google.com/essentialcontacts/docs/" -documentation = "https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-essentialcontacts1/5.0.2+20230123" license = "MIT" keywords = ["essentialcontacts", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/essentialcontacts1/README.md b/gen/essentialcontacts1/README.md index 88e4ad8013..baff6b1694 100644 --- a/gen/essentialcontacts1/README.md +++ b/gen/essentialcontacts1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-essentialcontacts1` library allows access to all features of the *Google Essentialcontacts* service. -This documentation was generated from *Essentialcontacts* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *essentialcontacts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Essentialcontacts* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *essentialcontacts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Essentialcontacts* *v1* API can be found at the [official documentation site](https://cloud.google.com/essentialcontacts/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/Essentialcontacts) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/Essentialcontacts) ... * folders - * [*contacts compute*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::FolderContactComputeCall), [*contacts create*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::FolderContactCreateCall), [*contacts delete*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::FolderContactDeleteCall), [*contacts get*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::FolderContactGetCall), [*contacts list*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::FolderContactListCall), [*contacts patch*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::FolderContactPatchCall) and [*contacts send test message*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::FolderContactSendTestMessageCall) + * [*contacts compute*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::FolderContactComputeCall), [*contacts create*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::FolderContactCreateCall), [*contacts delete*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::FolderContactDeleteCall), [*contacts get*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::FolderContactGetCall), [*contacts list*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::FolderContactListCall), [*contacts patch*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::FolderContactPatchCall) and [*contacts send test message*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::FolderContactSendTestMessageCall) * organizations - * [*contacts compute*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::OrganizationContactComputeCall), [*contacts create*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::OrganizationContactCreateCall), [*contacts delete*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::OrganizationContactDeleteCall), [*contacts get*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::OrganizationContactGetCall), [*contacts list*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::OrganizationContactListCall), [*contacts patch*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::OrganizationContactPatchCall) and [*contacts send test message*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::OrganizationContactSendTestMessageCall) + * [*contacts compute*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::OrganizationContactComputeCall), [*contacts create*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::OrganizationContactCreateCall), [*contacts delete*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::OrganizationContactDeleteCall), [*contacts get*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::OrganizationContactGetCall), [*contacts list*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::OrganizationContactListCall), [*contacts patch*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::OrganizationContactPatchCall) and [*contacts send test message*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::OrganizationContactSendTestMessageCall) * projects - * [*contacts compute*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::ProjectContactComputeCall), [*contacts create*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::ProjectContactCreateCall), [*contacts delete*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::ProjectContactDeleteCall), [*contacts get*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::ProjectContactGetCall), [*contacts list*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::ProjectContactListCall), [*contacts patch*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::ProjectContactPatchCall) and [*contacts send test message*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/api::ProjectContactSendTestMessageCall) + * [*contacts compute*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::ProjectContactComputeCall), [*contacts create*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::ProjectContactCreateCall), [*contacts delete*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::ProjectContactDeleteCall), [*contacts get*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::ProjectContactGetCall), [*contacts list*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::ProjectContactListCall), [*contacts patch*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::ProjectContactPatchCall) and [*contacts send test message*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/api::ProjectContactSendTestMessageCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/Essentialcontacts)** +* **[Hub](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/Essentialcontacts)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::CallBuilder) -* **[Resources](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::CallBuilder) +* **[Resources](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Part)** + * **[Parts](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Delegate) to the -[Method Builder](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Delegate) to the +[Method Builder](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::RequestValue) and -[decodable](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::RequestValue) and +[decodable](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-essentialcontacts1/5.0.2-beta-1+20230123/google_essentialcontacts1/client::RequestValue) are moved +* [request values](https://docs.rs/google-essentialcontacts1/5.0.2+20230123/google_essentialcontacts1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/essentialcontacts1/src/api.rs b/gen/essentialcontacts1/src/api.rs index ae74bfe01c..319243d1a8 100644 --- a/gen/essentialcontacts1/src/api.rs +++ b/gen/essentialcontacts1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Essentialcontacts { Essentialcontacts { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://essentialcontacts.googleapis.com/".to_string(), _root_url: "https://essentialcontacts.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> Essentialcontacts { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/essentialcontacts1/src/client.rs b/gen/essentialcontacts1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/essentialcontacts1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/essentialcontacts1/src/lib.rs b/gen/essentialcontacts1/src/lib.rs index 8da718bae1..239f32d001 100644 --- a/gen/essentialcontacts1/src/lib.rs +++ b/gen/essentialcontacts1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Essentialcontacts* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *essentialcontacts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Essentialcontacts* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *essentialcontacts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Essentialcontacts* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/essentialcontacts/docs/). diff --git a/gen/eventarc1-cli/Cargo.toml b/gen/eventarc1-cli/Cargo.toml index 26a225333c..9c3498c67d 100644 --- a/gen/eventarc1-cli/Cargo.toml +++ b/gen/eventarc1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-eventarc1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Eventarc (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/eventarc1-cli" @@ -20,13 +20,13 @@ name = "eventarc1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-eventarc1] path = "../eventarc1" -version = "4.0.1+20220301" +version = "5.0.2+20230113" + diff --git a/gen/eventarc1-cli/README.md b/gen/eventarc1-cli/README.md index 973045809e..74c6272463 100644 --- a/gen/eventarc1-cli/README.md +++ b/gen/eventarc1-cli/README.md @@ -25,18 +25,28 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Eventarc* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Eventarc* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash eventarc1 [options] projects + locations-channel-connections-create (-r )... [-p ]... [-o ] + locations-channel-connections-delete [-p ]... [-o ] + locations-channel-connections-get [-p ]... [-o ] locations-channel-connections-get-iam-policy [-p ]... [-o ] + locations-channel-connections-list [-p ]... [-o ] locations-channel-connections-set-iam-policy (-r )... [-p ]... [-o ] locations-channel-connections-test-iam-permissions (-r )... [-p ]... [-o ] + locations-channels-create (-r )... [-p ]... [-o ] + locations-channels-delete [-p ]... [-o ] + locations-channels-get [-p ]... [-o ] locations-channels-get-iam-policy [-p ]... [-o ] + locations-channels-list [-p ]... [-o ] + locations-channels-patch (-r )... [-p ]... [-o ] locations-channels-set-iam-policy (-r )... [-p ]... [-o ] locations-channels-test-iam-permissions (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] + locations-get-google-channel-config [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] @@ -52,6 +62,7 @@ eventarc1 [options] locations-triggers-patch (-r )... [-p ]... [-o ] locations-triggers-set-iam-policy (-r )... [-p ]... [-o ] locations-triggers-test-iam-permissions (-r )... [-p ]... [-o ] + locations-update-google-channel-config (-r )... [-p ]... [-o ] eventarc1 --help Configuration: diff --git a/gen/eventarc1-cli/mkdocs.yml b/gen/eventarc1-cli/mkdocs.yml index 1fc330a29f..32df4e2733 100644 --- a/gen/eventarc1-cli/mkdocs.yml +++ b/gen/eventarc1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Eventarc v4.0.1+20220301 +site_name: Eventarc v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-eventarc1-cli site_description: A complete library to interact with Eventarc (protocol v1) @@ -7,30 +7,42 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/eventarc1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-channel-connections-get-iam-policy.md', 'Projects', 'Locations Channel Connections Get Iam Policy'] -- ['projects_locations-channel-connections-set-iam-policy.md', 'Projects', 'Locations Channel Connections Set Iam Policy'] -- ['projects_locations-channel-connections-test-iam-permissions.md', 'Projects', 'Locations Channel Connections Test Iam Permissions'] -- ['projects_locations-channels-get-iam-policy.md', 'Projects', 'Locations Channels Get Iam Policy'] -- ['projects_locations-channels-set-iam-policy.md', 'Projects', 'Locations Channels Set Iam Policy'] -- ['projects_locations-channels-test-iam-permissions.md', 'Projects', 'Locations Channels Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-providers-get.md', 'Projects', 'Locations Providers Get'] -- ['projects_locations-providers-list.md', 'Projects', 'Locations Providers List'] -- ['projects_locations-triggers-create.md', 'Projects', 'Locations Triggers Create'] -- ['projects_locations-triggers-delete.md', 'Projects', 'Locations Triggers Delete'] -- ['projects_locations-triggers-get.md', 'Projects', 'Locations Triggers Get'] -- ['projects_locations-triggers-get-iam-policy.md', 'Projects', 'Locations Triggers Get Iam Policy'] -- ['projects_locations-triggers-list.md', 'Projects', 'Locations Triggers List'] -- ['projects_locations-triggers-patch.md', 'Projects', 'Locations Triggers Patch'] -- ['projects_locations-triggers-set-iam-policy.md', 'Projects', 'Locations Triggers Set Iam Policy'] -- ['projects_locations-triggers-test-iam-permissions.md', 'Projects', 'Locations Triggers Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Channel Connections Create': 'projects_locations-channel-connections-create.md' + - 'Locations Channel Connections Delete': 'projects_locations-channel-connections-delete.md' + - 'Locations Channel Connections Get': 'projects_locations-channel-connections-get.md' + - 'Locations Channel Connections Get Iam Policy': 'projects_locations-channel-connections-get-iam-policy.md' + - 'Locations Channel Connections List': 'projects_locations-channel-connections-list.md' + - 'Locations Channel Connections Set Iam Policy': 'projects_locations-channel-connections-set-iam-policy.md' + - 'Locations Channel Connections Test Iam Permissions': 'projects_locations-channel-connections-test-iam-permissions.md' + - 'Locations Channels Create': 'projects_locations-channels-create.md' + - 'Locations Channels Delete': 'projects_locations-channels-delete.md' + - 'Locations Channels Get': 'projects_locations-channels-get.md' + - 'Locations Channels Get Iam Policy': 'projects_locations-channels-get-iam-policy.md' + - 'Locations Channels List': 'projects_locations-channels-list.md' + - 'Locations Channels Patch': 'projects_locations-channels-patch.md' + - 'Locations Channels Set Iam Policy': 'projects_locations-channels-set-iam-policy.md' + - 'Locations Channels Test Iam Permissions': 'projects_locations-channels-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Get Google Channel Config': 'projects_locations-get-google-channel-config.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Providers Get': 'projects_locations-providers-get.md' + - 'Locations Providers List': 'projects_locations-providers-list.md' + - 'Locations Triggers Create': 'projects_locations-triggers-create.md' + - 'Locations Triggers Delete': 'projects_locations-triggers-delete.md' + - 'Locations Triggers Get': 'projects_locations-triggers-get.md' + - 'Locations Triggers Get Iam Policy': 'projects_locations-triggers-get-iam-policy.md' + - 'Locations Triggers List': 'projects_locations-triggers-list.md' + - 'Locations Triggers Patch': 'projects_locations-triggers-patch.md' + - 'Locations Triggers Set Iam Policy': 'projects_locations-triggers-set-iam-policy.md' + - 'Locations Triggers Test Iam Permissions': 'projects_locations-triggers-test-iam-permissions.md' + - 'Locations Update Google Channel Config': 'projects_locations-update-google-channel-config.md' theme: readthedocs diff --git a/gen/eventarc1-cli/src/client.rs b/gen/eventarc1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/eventarc1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/eventarc1-cli/src/main.rs b/gen/eventarc1-cli/src/main.rs index f1a3af3035..2757498d86 100644 --- a/gen/eventarc1-cli/src/main.rs +++ b/gen/eventarc1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_eventarc1::{api, Error, oauth2}; +use google_eventarc1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,204 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_channel_connections_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "activation-token" => Some(("activationToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "channel" => Some(("channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-token", "channel", "create-time", "name", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ChannelConnection = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_channel_connections_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "channel-connection-id" => { + call = call.channel_connection_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["channel-connection-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_channel_connections_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_channel_connections_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_channel_connections_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_channel_connections_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_channel_connections_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_channel_connections_get_iam_policy(opt.value_of("resource").unwrap_or("")); @@ -58,7 +255,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -107,6 +304,65 @@ where } } + async fn _projects_locations_channel_connections_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_channel_connections_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_channel_connections_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -279,6 +535,214 @@ where } } + async fn _projects_locations_channels_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "activation-token" => Some(("activationToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "crypto-key-name" => Some(("cryptoKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provider" => Some(("provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pubsub-topic" => Some(("pubsubTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-token", "create-time", "crypto-key-name", "name", "provider", "pubsub-topic", "state", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Channel = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_channels_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "channel-id" => { + call = call.channel_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["channel-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_channels_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_channels_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_channels_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_channels_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_channels_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_channels_get_iam_policy(opt.value_of("resource").unwrap_or("")); @@ -286,7 +750,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -335,6 +799,168 @@ where } } + async fn _projects_locations_channels_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_channels_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_channels_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "activation-token" => Some(("activationToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "crypto-key-name" => Some(("cryptoKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "provider" => Some(("provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pubsub-topic" => Some(("pubsubTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-token", "create-time", "crypto-key-name", "name", "provider", "pubsub-topic", "state", "uid", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Channel = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_channels_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_channels_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -559,6 +1185,58 @@ where } } + async fn _projects_locations_get_google_channel_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_get_google_channel_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or("")); @@ -569,7 +1247,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -819,7 +1497,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -933,7 +1611,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1011,6 +1689,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "channel" => Some(("channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.cloud-function" => Some(("destination.cloudFunction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.cloud-run.path" => Some(("destination.cloudRun.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1021,6 +1700,7 @@ where "destination.gke.namespace" => Some(("destination.gke.namespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.gke.path" => Some(("destination.gke.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.gke.service" => Some(("destination.gke.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.workflow" => Some(("destination.workflow", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1030,7 +1710,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-function", "cloud-run", "cluster", "create-time", "destination", "etag", "gke", "labels", "location", "name", "namespace", "path", "pubsub", "region", "service", "service-account", "subscription", "topic", "transport", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["channel", "cloud-function", "cloud-run", "cluster", "create-time", "destination", "etag", "gke", "labels", "location", "name", "namespace", "path", "pubsub", "region", "service", "service-account", "subscription", "topic", "transport", "uid", "update-time", "workflow"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1045,7 +1725,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "trigger-id" => { call = call.trigger_id(value.unwrap_or("")); @@ -1104,13 +1784,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "etag" => { call = call.etag(value.unwrap_or("")); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1218,7 +1898,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1277,7 +1957,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1355,6 +2035,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "channel" => Some(("channel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.cloud-function" => Some(("destination.cloudFunction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.cloud-run.path" => Some(("destination.cloudRun.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1365,6 +2046,7 @@ where "destination.gke.namespace" => Some(("destination.gke.namespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.gke.path" => Some(("destination.gke.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.gke.service" => Some(("destination.gke.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.workflow" => Some(("destination.workflow", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1374,7 +2056,7 @@ where "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-function", "cloud-run", "cluster", "create-time", "destination", "etag", "gke", "labels", "location", "name", "namespace", "path", "pubsub", "region", "service", "service-account", "subscription", "topic", "transport", "uid", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["channel", "cloud-function", "cloud-run", "cluster", "create-time", "destination", "etag", "gke", "labels", "location", "name", "namespace", "path", "pubsub", "region", "service", "service-account", "subscription", "topic", "transport", "uid", "update-time", "workflow"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1389,13 +2071,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1616,6 +2298,97 @@ where } } + async fn _projects_locations_update_google_channel_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "crypto-key-name" => Some(("cryptoKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["crypto-key-name", "name", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleChannelConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_update_google_channel_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -1623,18 +2396,45 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-channel-connections-create", Some(opt)) => { + call_result = self._projects_locations_channel_connections_create(opt, dry_run, &mut err).await; + }, + ("locations-channel-connections-delete", Some(opt)) => { + call_result = self._projects_locations_channel_connections_delete(opt, dry_run, &mut err).await; + }, + ("locations-channel-connections-get", Some(opt)) => { + call_result = self._projects_locations_channel_connections_get(opt, dry_run, &mut err).await; + }, ("locations-channel-connections-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_channel_connections_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-channel-connections-list", Some(opt)) => { + call_result = self._projects_locations_channel_connections_list(opt, dry_run, &mut err).await; + }, ("locations-channel-connections-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_channel_connections_set_iam_policy(opt, dry_run, &mut err).await; }, ("locations-channel-connections-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_channel_connections_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-channels-create", Some(opt)) => { + call_result = self._projects_locations_channels_create(opt, dry_run, &mut err).await; + }, + ("locations-channels-delete", Some(opt)) => { + call_result = self._projects_locations_channels_delete(opt, dry_run, &mut err).await; + }, + ("locations-channels-get", Some(opt)) => { + call_result = self._projects_locations_channels_get(opt, dry_run, &mut err).await; + }, ("locations-channels-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_channels_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-channels-list", Some(opt)) => { + call_result = self._projects_locations_channels_list(opt, dry_run, &mut err).await; + }, + ("locations-channels-patch", Some(opt)) => { + call_result = self._projects_locations_channels_patch(opt, dry_run, &mut err).await; + }, ("locations-channels-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_channels_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -1644,6 +2444,9 @@ where ("locations-get", Some(opt)) => { call_result = self._projects_locations_get(opt, dry_run, &mut err).await; }, + ("locations-get-google-channel-config", Some(opt)) => { + call_result = self._projects_locations_get_google_channel_config(opt, dry_run, &mut err).await; + }, ("locations-list", Some(opt)) => { call_result = self._projects_locations_list(opt, dry_run, &mut err).await; }, @@ -1689,6 +2492,9 @@ where ("locations-triggers-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_triggers_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-update-google-channel-config", Some(opt)) => { + call_result = self._projects_locations_update_google_channel_config(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -1768,14 +2574,108 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-channel-connections-get-iam-policy', 'locations-channel-connections-set-iam-policy', 'locations-channel-connections-test-iam-permissions', 'locations-channels-get-iam-policy', 'locations-channels-set-iam-policy', 'locations-channels-test-iam-permissions', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-providers-get', 'locations-providers-list', 'locations-triggers-create', 'locations-triggers-delete', 'locations-triggers-get', 'locations-triggers-get-iam-policy', 'locations-triggers-list', 'locations-triggers-patch', 'locations-triggers-set-iam-policy' and 'locations-triggers-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-channel-connections-create', 'locations-channel-connections-delete', 'locations-channel-connections-get', 'locations-channel-connections-get-iam-policy', 'locations-channel-connections-list', 'locations-channel-connections-set-iam-policy', 'locations-channel-connections-test-iam-permissions', 'locations-channels-create', 'locations-channels-delete', 'locations-channels-get', 'locations-channels-get-iam-policy', 'locations-channels-list', 'locations-channels-patch', 'locations-channels-set-iam-policy', 'locations-channels-test-iam-permissions', 'locations-get', 'locations-get-google-channel-config', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-providers-get', 'locations-providers-list', 'locations-triggers-create', 'locations-triggers-delete', 'locations-triggers-get', 'locations-triggers-get-iam-policy', 'locations-triggers-list', 'locations-triggers-patch', 'locations-triggers-set-iam-policy', 'locations-triggers-test-iam-permissions' and 'locations-update-google-channel-config'", vec![ + ("locations-channel-connections-create", + Some(r##"Create a new ChannelConnection in a particular project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channel-connections-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent collection in which to add this channel connection."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channel-connections-delete", + Some(r##"Delete a single ChannelConnection."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channel-connections-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the channel connection to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channel-connections-get", + Some(r##"Get a single ChannelConnection."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channel-connections-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the channel connection to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-channel-connections-get-iam-policy", Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channel-connections-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channel-connections-list", + Some(r##"List channel connections."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channel-connections-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent collection from which to list channel connections."##), Some(true), Some(false)), @@ -1797,7 +2697,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1825,7 +2725,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1841,6 +2741,78 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channels-create", + Some(r##"Create a new channel in a particular project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channels-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent collection in which to add this channel."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channels-delete", + Some(r##"Delete a single channel."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channels-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the channel to be deleted."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channels-get", + Some(r##"Get a single Channel."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channels-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the channel to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1853,7 +2825,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1863,6 +2835,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channels-list", + Some(r##"List channels."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channels-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent collection to list channels on."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-channels-patch", + Some(r##"Update a single channel."##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-channels-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the channel. Must be unique within the location on the project and must be in `projects/{project}/locations/{location}/channels/{channel_id}` format."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1875,7 +2897,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1903,7 +2925,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1941,6 +2963,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-get-google-channel-config", + Some(r##"Get a GoogleChannelConfig"##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-get-google-channel-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the config to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2185,7 +3229,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2257,7 +3301,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2285,7 +3329,35 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-update-google-channel-config", + Some(r##"Update a single GoogleChannelConfig"##), + "Details at http://byron.github.io/google-apis-rs/google_eventarc1_cli/projects_locations-update-google-channel-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the config. Must be in the format of, `projects/{project}/locations/{location}/googleChannelConfig`."##), Some(true), Some(false)), @@ -2313,8 +3385,8 @@ async fn main() { let mut app = App::new("eventarc1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") - .about("") + .version("5.0.2+20230113") + .about("Build event-driven applications on Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_eventarc1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/eventarc1/Cargo.toml b/gen/eventarc1/Cargo.toml index 54eb1623e8..a324f06761 100644 --- a/gen/eventarc1/Cargo.toml +++ b/gen/eventarc1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-eventarc1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Eventarc (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/eventarc1" homepage = "https://cloud.google.com/eventarc" -documentation = "https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-eventarc1/5.0.2+20230113" license = "MIT" keywords = ["eventarc", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/eventarc1/README.md b/gen/eventarc1/README.md index 77f397ef8a..c971673c0c 100644 --- a/gen/eventarc1/README.md +++ b/gen/eventarc1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-eventarc1` library allows access to all features of the *Google Eventarc* service. -This documentation was generated from *Eventarc* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *eventarc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Eventarc* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *eventarc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Eventarc* *v1* API can be found at the [official documentation site](https://cloud.google.com/eventarc). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/Eventarc) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/Eventarc) ... * projects - * [*locations channel connections create*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelConnectionCreateCall), [*locations channel connections delete*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelConnectionDeleteCall), [*locations channel connections get*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelConnectionGetCall), [*locations channel connections get iam policy*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelConnectionGetIamPolicyCall), [*locations channel connections list*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelConnectionListCall), [*locations channel connections set iam policy*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelConnectionSetIamPolicyCall), [*locations channel connections test iam permissions*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelConnectionTestIamPermissionCall), [*locations channels create*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelCreateCall), [*locations channels delete*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelDeleteCall), [*locations channels get*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelGetCall), [*locations channels get iam policy*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelGetIamPolicyCall), [*locations channels list*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelListCall), [*locations channels patch*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelPatchCall), [*locations channels set iam policy*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelSetIamPolicyCall), [*locations channels test iam permissions*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationChannelTestIamPermissionCall), [*locations get*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationGetCall), [*locations get google channel config*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationGetGoogleChannelConfigCall), [*locations list*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationOperationListCall), [*locations providers get*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationProviderGetCall), [*locations providers list*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationProviderListCall), [*locations triggers create*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerCreateCall), [*locations triggers delete*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerDeleteCall), [*locations triggers get*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerGetCall), [*locations triggers get iam policy*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerGetIamPolicyCall), [*locations triggers list*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerListCall), [*locations triggers patch*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerPatchCall), [*locations triggers set iam policy*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerSetIamPolicyCall), [*locations triggers test iam permissions*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationTriggerTestIamPermissionCall) and [*locations update google channel config*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/api::ProjectLocationUpdateGoogleChannelConfigCall) + * [*locations channel connections create*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelConnectionCreateCall), [*locations channel connections delete*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelConnectionDeleteCall), [*locations channel connections get*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelConnectionGetCall), [*locations channel connections get iam policy*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelConnectionGetIamPolicyCall), [*locations channel connections list*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelConnectionListCall), [*locations channel connections set iam policy*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelConnectionSetIamPolicyCall), [*locations channel connections test iam permissions*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelConnectionTestIamPermissionCall), [*locations channels create*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelCreateCall), [*locations channels delete*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelDeleteCall), [*locations channels get*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelGetCall), [*locations channels get iam policy*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelGetIamPolicyCall), [*locations channels list*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelListCall), [*locations channels patch*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelPatchCall), [*locations channels set iam policy*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelSetIamPolicyCall), [*locations channels test iam permissions*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationChannelTestIamPermissionCall), [*locations get*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationGetCall), [*locations get google channel config*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationGetGoogleChannelConfigCall), [*locations list*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationOperationListCall), [*locations providers get*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationProviderGetCall), [*locations providers list*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationProviderListCall), [*locations triggers create*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerCreateCall), [*locations triggers delete*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerDeleteCall), [*locations triggers get*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerGetCall), [*locations triggers get iam policy*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerGetIamPolicyCall), [*locations triggers list*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerListCall), [*locations triggers patch*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerPatchCall), [*locations triggers set iam policy*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerSetIamPolicyCall), [*locations triggers test iam permissions*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationTriggerTestIamPermissionCall) and [*locations update google channel config*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/api::ProjectLocationUpdateGoogleChannelConfigCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/Eventarc)** +* **[Hub](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/Eventarc)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::CallBuilder) -* **[Resources](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::CallBuilder) +* **[Resources](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Part)** + * **[Parts](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -135,17 +135,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -155,29 +155,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Delegate) to the -[Method Builder](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Delegate) to the +[Method Builder](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::RequestValue) and -[decodable](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::RequestValue) and +[decodable](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-eventarc1/5.0.2-beta-1+20230113/google_eventarc1/client::RequestValue) are moved +* [request values](https://docs.rs/google-eventarc1/5.0.2+20230113/google_eventarc1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/eventarc1/src/api.rs b/gen/eventarc1/src/api.rs index 25b1f0803c..c039ebba6e 100644 --- a/gen/eventarc1/src/api.rs +++ b/gen/eventarc1/src/api.rs @@ -128,7 +128,7 @@ impl<'a, S> Eventarc { Eventarc { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://eventarc.googleapis.com/".to_string(), _root_url: "https://eventarc.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> Eventarc { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/eventarc1/src/client.rs b/gen/eventarc1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/eventarc1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/eventarc1/src/lib.rs b/gen/eventarc1/src/lib.rs index b90776a698..af397e43ad 100644 --- a/gen/eventarc1/src/lib.rs +++ b/gen/eventarc1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Eventarc* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *eventarc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Eventarc* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *eventarc:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Eventarc* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/eventarc). diff --git a/gen/factchecktools1_alpha1-cli/Cargo.toml b/gen/factchecktools1_alpha1-cli/Cargo.toml index 6644b60255..fe8c4bd3e8 100644 --- a/gen/factchecktools1_alpha1-cli/Cargo.toml +++ b/gen/factchecktools1_alpha1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-factchecktools1_alpha1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Fact Check Tools (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/factchecktools1_alpha1-cli" @@ -20,13 +20,13 @@ name = "factchecktools1-alpha1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-factchecktools1_alpha1] path = "../factchecktools1_alpha1" -version = "4.0.1+20220305" +version = "5.0.2+20230121" + diff --git a/gen/factchecktools1_alpha1-cli/README.md b/gen/factchecktools1_alpha1-cli/README.md index 674008bc71..6f85e0e830 100644 --- a/gen/factchecktools1_alpha1-cli/README.md +++ b/gen/factchecktools1_alpha1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Fact Check Tools* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Fact Check Tools* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash factchecktools1-alpha1 [options] diff --git a/gen/factchecktools1_alpha1-cli/mkdocs.yml b/gen/factchecktools1_alpha1-cli/mkdocs.yml index 4d5a5893d8..8c1bdc2b31 100644 --- a/gen/factchecktools1_alpha1-cli/mkdocs.yml +++ b/gen/factchecktools1_alpha1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Fact Check Tools v4.0.1+20220305 +site_name: Fact Check Tools v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-factchecktools1_alpha1-cli site_description: A complete library to interact with Fact Check Tools (protocol v1alpha1) @@ -7,14 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/factchecktools1_ docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['claims_search.md', 'Claims', 'Search'] -- ['pages_create.md', 'Pages', 'Create'] -- ['pages_delete.md', 'Pages', 'Delete'] -- ['pages_get.md', 'Pages', 'Get'] -- ['pages_list.md', 'Pages', 'List'] -- ['pages_update.md', 'Pages', 'Update'] +nav: +- Home: 'index.md' +- 'Claims': + - 'Search': 'claims_search.md' +- 'Pages': + - 'Create': 'pages_create.md' + - 'Delete': 'pages_delete.md' + - 'Get': 'pages_get.md' + - 'List': 'pages_list.md' + - 'Update': 'pages_update.md' theme: readthedocs diff --git a/gen/factchecktools1_alpha1-cli/src/client.rs b/gen/factchecktools1_alpha1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/factchecktools1_alpha1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/factchecktools1_alpha1-cli/src/main.rs b/gen/factchecktools1_alpha1-cli/src/main.rs index 647a24e416..70e5ec03a4 100644 --- a/gen/factchecktools1_alpha1-cli/src/main.rs +++ b/gen/factchecktools1_alpha1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_factchecktools1_alpha1::{api, Error, oauth2}; +use google_factchecktools1_alpha1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -67,13 +66,13 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "offset" => { - call = call.offset(arg_from_str(value.unwrap_or("-0"), err, "offset", "integer")); + call = call.offset( value.map(|v| arg_from_str(v, err, "offset", "int32")).unwrap_or(-0)); }, "max-age-days" => { - call = call.max_age_days(arg_from_str(value.unwrap_or("-0"), err, "max-age-days", "integer")); + call = call.max_age_days( value.map(|v| arg_from_str(v, err, "max-age-days", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -329,13 +328,13 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "organization" => { call = call.organization(value.unwrap_or("")); }, "offset" => { - call = call.offset(arg_from_str(value.unwrap_or("-0"), err, "offset", "integer")); + call = call.offset( value.map(|v| arg_from_str(v, err, "offset", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -722,7 +721,7 @@ async fn main() { let mut app = App::new("factchecktools1-alpha1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230121") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_factchecktools1_alpha1_cli") .arg(Arg::with_name("url") diff --git a/gen/factchecktools1_alpha1/Cargo.toml b/gen/factchecktools1_alpha1/Cargo.toml index 29d44ddb87..84ee82455c 100644 --- a/gen/factchecktools1_alpha1/Cargo.toml +++ b/gen/factchecktools1_alpha1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-factchecktools1_alpha1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Fact Check Tools (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/factchecktools1_alpha1" homepage = "https://developers.google.com/fact-check/tools/api/" -documentation = "https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121" license = "MIT" keywords = ["factchecktools", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/factchecktools1_alpha1/README.md b/gen/factchecktools1_alpha1/README.md index 9565c73cce..a99735b268 100644 --- a/gen/factchecktools1_alpha1/README.md +++ b/gen/factchecktools1_alpha1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-factchecktools1_alpha1` library allows access to all features of the *Google Fact Check Tools* service. -This documentation was generated from *Fact Check Tools* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *factchecktools:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Fact Check Tools* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *factchecktools:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Fact Check Tools* *v1_alpha1* API can be found at the [official documentation site](https://developers.google.com/fact-check/tools/api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/FactCheckTools) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/FactCheckTools) ... * claims - * [*search*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/api::ClaimSearchCall) + * [*search*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/api::ClaimSearchCall) * pages - * [*create*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/api::PageCreateCall), [*delete*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/api::PageDeleteCall), [*get*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/api::PageGetCall), [*list*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/api::PageListCall) and [*update*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/api::PageUpdateCall) + * [*create*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/api::PageCreateCall), [*delete*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/api::PageDeleteCall), [*get*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/api::PageGetCall), [*list*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/api::PageListCall) and [*update*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/api::PageUpdateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/FactCheckTools)** +* **[Hub](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/FactCheckTools)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::CallBuilder) -* **[Resources](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::CallBuilder) +* **[Resources](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Part)** + * **[Parts](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Delegate) to the -[Method Builder](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Delegate) to the +[Method Builder](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::RequestValue) and -[decodable](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::RequestValue) and +[decodable](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-factchecktools1_alpha1/5.0.2-beta-1+20230121/google_factchecktools1_alpha1/client::RequestValue) are moved +* [request values](https://docs.rs/google-factchecktools1_alpha1/5.0.2+20230121/google_factchecktools1_alpha1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/factchecktools1_alpha1/src/api.rs b/gen/factchecktools1_alpha1/src/api.rs index d4755acdff..86e845a1e7 100644 --- a/gen/factchecktools1_alpha1/src/api.rs +++ b/gen/factchecktools1_alpha1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> FactCheckTools { FactCheckTools { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://factchecktools.googleapis.com/".to_string(), _root_url: "https://factchecktools.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> FactCheckTools { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/factchecktools1_alpha1/src/client.rs b/gen/factchecktools1_alpha1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/factchecktools1_alpha1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/factchecktools1_alpha1/src/lib.rs b/gen/factchecktools1_alpha1/src/lib.rs index 1c8352813b..7233429741 100644 --- a/gen/factchecktools1_alpha1/src/lib.rs +++ b/gen/factchecktools1_alpha1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Fact Check Tools* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *factchecktools:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Fact Check Tools* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *factchecktools:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Fact Check Tools* *v1_alpha1* API can be found at the //! [official documentation site](https://developers.google.com/fact-check/tools/api/). diff --git a/gen/fcm1-cli/Cargo.toml b/gen/fcm1-cli/Cargo.toml index 31a12569c5..749d58729e 100644 --- a/gen/fcm1-cli/Cargo.toml +++ b/gen/fcm1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-fcm1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Cloud Messaging (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/fcm1-cli" @@ -20,13 +20,13 @@ name = "fcm1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-fcm1] path = "../fcm1" -version = "4.0.1+20220228" +version = "5.0.2+20230106" + diff --git a/gen/fcm1-cli/README.md b/gen/fcm1-cli/README.md index 4a50c73042..509441d712 100644 --- a/gen/fcm1-cli/README.md +++ b/gen/fcm1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase Cloud Messaging* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase Cloud Messaging* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash fcm1 [options] diff --git a/gen/fcm1-cli/mkdocs.yml b/gen/fcm1-cli/mkdocs.yml index 74ad9250e8..ad140575f0 100644 --- a/gen/fcm1-cli/mkdocs.yml +++ b/gen/fcm1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase Cloud Messaging v4.0.1+20220228 +site_name: Firebase Cloud Messaging v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-fcm1-cli site_description: A complete library to interact with Firebase Cloud Messaging (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/fcm1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_messages-send.md', 'Projects', 'Messages Send'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Messages Send': 'projects_messages-send.md' theme: readthedocs diff --git a/gen/fcm1-cli/src/client.rs b/gen/fcm1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/fcm1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/fcm1-cli/src/main.rs b/gen/fcm1-cli/src/main.rs index 482d9f987e..6f568497e1 100644 --- a/gen/fcm1-cli/src/main.rs +++ b/gen/fcm1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_fcm1::{api, Error, oauth2}; +use google_fcm1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -81,6 +80,7 @@ where "message.android.notification.body" => Some(("message.android.notification.body", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "message.android.notification.body-loc-args" => Some(("message.android.notification.bodyLocArgs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "message.android.notification.body-loc-key" => Some(("message.android.notification.bodyLocKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "message.android.notification.bypass-proxy-notification" => Some(("message.android.notification.bypassProxyNotification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "message.android.notification.channel-id" => Some(("message.android.notification.channelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "message.android.notification.click-action" => Some(("message.android.notification.clickAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "message.android.notification.color" => Some(("message.android.notification.color", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -129,7 +129,7 @@ where "message.webpush.headers" => Some(("message.webpush.headers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alpha", "analytics-label", "android", "apns", "blue", "body", "body-loc-args", "body-loc-key", "channel-id", "click-action", "collapse-key", "color", "condition", "data", "default-light-settings", "default-sound", "default-vibrate-timings", "direct-boot-ok", "event-time", "fcm-options", "green", "headers", "icon", "image", "light-off-duration", "light-on-duration", "light-settings", "link", "local-only", "message", "name", "notification", "notification-count", "notification-priority", "priority", "red", "restricted-package-name", "sound", "sticky", "tag", "ticker", "title", "title-loc-args", "title-loc-key", "token", "topic", "ttl", "validate-only", "vibrate-timings", "visibility", "webpush"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alpha", "analytics-label", "android", "apns", "blue", "body", "body-loc-args", "body-loc-key", "bypass-proxy-notification", "channel-id", "click-action", "collapse-key", "color", "condition", "data", "default-light-settings", "default-sound", "default-vibrate-timings", "direct-boot-ok", "event-time", "fcm-options", "green", "headers", "icon", "image", "light-off-duration", "light-on-duration", "light-settings", "link", "local-only", "message", "name", "notification", "notification-count", "notification-priority", "priority", "red", "restricted-package-name", "sound", "sticky", "tag", "ticker", "title", "title-loc-args", "title-loc-key", "token", "topic", "ttl", "validate-only", "vibrate-timings", "visibility", "webpush"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -313,7 +313,7 @@ async fn main() { let mut app = App::new("fcm1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230106") .about("FCM send API that provides a cross-platform messaging solution to reliably deliver messages at no cost.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_fcm1_cli") .arg(Arg::with_name("url") diff --git a/gen/fcm1/src/client.rs b/gen/fcm1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/fcm1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/fcmdata1_beta1-cli/Cargo.toml b/gen/fcmdata1_beta1-cli/Cargo.toml index e8500ff6a5..fecf98b5f7 100644 --- a/gen/fcmdata1_beta1-cli/Cargo.toml +++ b/gen/fcmdata1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-fcmdata1_beta1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Fcmdata (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/fcmdata1_beta1-cli" @@ -20,13 +20,13 @@ name = "fcmdata1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-fcmdata1_beta1] path = "../fcmdata1_beta1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/fcmdata1_beta1-cli/README.md b/gen/fcmdata1_beta1-cli/README.md index 52c9493cc7..896a52a204 100644 --- a/gen/fcmdata1_beta1-cli/README.md +++ b/gen/fcmdata1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Fcmdata* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Fcmdata* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash fcmdata1-beta1 [options] diff --git a/gen/fcmdata1_beta1-cli/mkdocs.yml b/gen/fcmdata1_beta1-cli/mkdocs.yml index 14c549264d..7a29441ebd 100644 --- a/gen/fcmdata1_beta1-cli/mkdocs.yml +++ b/gen/fcmdata1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Fcmdata v4.0.1+20220305 +site_name: Fcmdata v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-fcmdata1_beta1-cli site_description: A complete library to interact with Fcmdata (protocol v1beta1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/fcmdata1_beta1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_android-apps-delivery-data-list.md', 'Projects', 'Android Apps Delivery Data List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Android Apps Delivery Data List': 'projects_android-apps-delivery-data-list.md' theme: readthedocs diff --git a/gen/fcmdata1_beta1-cli/src/client.rs b/gen/fcmdata1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/fcmdata1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/fcmdata1_beta1-cli/src/main.rs b/gen/fcmdata1_beta1-cli/src/main.rs index b24a8fc8c4..da807a53bc 100644 --- a/gen/fcmdata1_beta1-cli/src/main.rs +++ b/gen/fcmdata1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_fcmdata1_beta1::{api, Error, oauth2}; +use google_fcmdata1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -228,7 +227,7 @@ async fn main() { let mut app = App::new("fcmdata1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("Provides additional information about Firebase Cloud Messaging (FCM) message sends and deliveries.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_fcmdata1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/fcmdata1_beta1/Cargo.toml b/gen/fcmdata1_beta1/Cargo.toml index f22b48b53c..8b0571d088 100644 --- a/gen/fcmdata1_beta1/Cargo.toml +++ b/gen/fcmdata1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-fcmdata1_beta1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Fcmdata (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/fcmdata1_beta1" homepage = "https://firebase.google.com/docs/cloud-messaging" -documentation = "https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123" license = "MIT" keywords = ["fcmdata", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/fcmdata1_beta1/README.md b/gen/fcmdata1_beta1/README.md index 0930766c0a..54d2475b1f 100644 --- a/gen/fcmdata1_beta1/README.md +++ b/gen/fcmdata1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-fcmdata1_beta1` library allows access to all features of the *Google Fcmdata* service. -This documentation was generated from *Fcmdata* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *fcmdata:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Fcmdata* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *fcmdata:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Fcmdata* *v1_beta1* API can be found at the [official documentation site](https://firebase.google.com/docs/cloud-messaging). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/Fcmdata) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/Fcmdata) ... * projects - * [*android apps delivery data list*](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/api::ProjectAndroidAppDeliveryDataListCall) + * [*android apps delivery data list*](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/api::ProjectAndroidAppDeliveryDataListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/Fcmdata)** +* **[Hub](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/Fcmdata)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-fcmdata1_beta1/5.0.2-beta-1+20230123/google_fcmdata1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-fcmdata1_beta1/5.0.2+20230123/google_fcmdata1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/fcmdata1_beta1/src/api.rs b/gen/fcmdata1_beta1/src/api.rs index 066ec74447..05480a222c 100644 --- a/gen/fcmdata1_beta1/src/api.rs +++ b/gen/fcmdata1_beta1/src/api.rs @@ -121,7 +121,7 @@ impl<'a, S> Fcmdata { Fcmdata { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://fcmdata.googleapis.com/".to_string(), _root_url: "https://fcmdata.googleapis.com/".to_string(), } @@ -132,7 +132,7 @@ impl<'a, S> Fcmdata { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/fcmdata1_beta1/src/client.rs b/gen/fcmdata1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/fcmdata1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/fcmdata1_beta1/src/lib.rs b/gen/fcmdata1_beta1/src/lib.rs index 5bc727232d..56022e3f99 100644 --- a/gen/fcmdata1_beta1/src/lib.rs +++ b/gen/fcmdata1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Fcmdata* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *fcmdata:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Fcmdata* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *fcmdata:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Fcmdata* *v1_beta1* API can be found at the //! [official documentation site](https://firebase.google.com/docs/cloud-messaging). diff --git a/gen/file1-cli/Cargo.toml b/gen/file1-cli/Cargo.toml index 06d092bc3f..fc97f6f413 100644 --- a/gen/file1-cli/Cargo.toml +++ b/gen/file1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-file1-cli" -version = "4.0.1+20220214" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Filestore (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/file1-cli" @@ -20,13 +20,13 @@ name = "file1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-file1] path = "../file1" -version = "4.0.1+20220214" +version = "5.0.2+20230103" + diff --git a/gen/file1-cli/README.md b/gen/file1-cli/README.md index 8ea96fa9b2..b9c913c448 100644 --- a/gen/file1-cli/README.md +++ b/gen/file1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Filestore* API at revision *20220214*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Filestore* API at revision *20230103*. The CLI is at version *5.0.2*. ```bash file1 [options] diff --git a/gen/file1-cli/mkdocs.yml b/gen/file1-cli/mkdocs.yml index fd2889bf0b..ee4ba97af5 100644 --- a/gen/file1-cli/mkdocs.yml +++ b/gen/file1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Filestore v4.0.1+20220214 +site_name: Cloud Filestore v5.0.2+20230103 site_url: http://byron.github.io/google-apis-rs/google-file1-cli site_description: A complete library to interact with Cloud Filestore (protocol v1) @@ -7,30 +7,31 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/file1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-backups-create.md', 'Projects', 'Locations Backups Create'] -- ['projects_locations-backups-delete.md', 'Projects', 'Locations Backups Delete'] -- ['projects_locations-backups-get.md', 'Projects', 'Locations Backups Get'] -- ['projects_locations-backups-list.md', 'Projects', 'Locations Backups List'] -- ['projects_locations-backups-patch.md', 'Projects', 'Locations Backups Patch'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-restore.md', 'Projects', 'Locations Instances Restore'] -- ['projects_locations-instances-snapshots-create.md', 'Projects', 'Locations Instances Snapshots Create'] -- ['projects_locations-instances-snapshots-delete.md', 'Projects', 'Locations Instances Snapshots Delete'] -- ['projects_locations-instances-snapshots-get.md', 'Projects', 'Locations Instances Snapshots Get'] -- ['projects_locations-instances-snapshots-list.md', 'Projects', 'Locations Instances Snapshots List'] -- ['projects_locations-instances-snapshots-patch.md', 'Projects', 'Locations Instances Snapshots Patch'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Backups Create': 'projects_locations-backups-create.md' + - 'Locations Backups Delete': 'projects_locations-backups-delete.md' + - 'Locations Backups Get': 'projects_locations-backups-get.md' + - 'Locations Backups List': 'projects_locations-backups-list.md' + - 'Locations Backups Patch': 'projects_locations-backups-patch.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Restore': 'projects_locations-instances-restore.md' + - 'Locations Instances Snapshots Create': 'projects_locations-instances-snapshots-create.md' + - 'Locations Instances Snapshots Delete': 'projects_locations-instances-snapshots-delete.md' + - 'Locations Instances Snapshots Get': 'projects_locations-instances-snapshots-get.md' + - 'Locations Instances Snapshots List': 'projects_locations-instances-snapshots-list.md' + - 'Locations Instances Snapshots Patch': 'projects_locations-instances-snapshots-patch.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/file1-cli/src/client.rs b/gen/file1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/file1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/file1-cli/src/main.rs b/gen/file1-cli/src/main.rs index ea1fa94fa0..71f53d666b 100644 --- a/gen/file1-cli/src/main.rs +++ b/gen/file1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_file1::{api, Error, oauth2}; +use google_file1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -265,7 +264,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -371,7 +370,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -578,7 +577,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -689,7 +688,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -794,7 +793,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1137,7 +1136,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1237,7 +1236,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1296,10 +1295,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "include-unrevealed-locations" => { - call = call.include_unrevealed_locations(arg_from_str(value.unwrap_or("false"), err, "include-unrevealed-locations", "boolean")); + call = call.include_unrevealed_locations( value.map(|v| arg_from_str(v, err, "include-unrevealed-locations", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1549,7 +1548,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1760,7 +1759,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The backup's project and location, in the format `projects/{project_number}/locations/{location}`. In Cloud Filestore, backup locations map to GCP regions, for example **us-west1**."##), + Some(r##"Required. The backup's project and location, in the format `projects/{project_number}/locations/{location}`. In Filestore, backup locations map to Google Cloud regions, for example **us-west1**."##), Some(true), Some(false)), @@ -1832,7 +1831,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The project and location for which to retrieve backup information, in the format `projects/{project_number}/locations/{location}`. In Cloud Filestore, backup locations map to GCP regions, for example **us-west1**. To retrieve backup information for all locations, use "-" for the `{location}` value."##), + Some(r##"Required. The project and location for which to retrieve backup information, in the format `projects/{project_number}/locations/{location}`. In Filestore, backup locations map to Google Cloud regions, for example **us-west1**. To retrieve backup information for all locations, use "-" for the `{location}` value."##), Some(true), Some(false)), @@ -1904,7 +1903,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The instance's project and location, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, locations map to GCP zones, for example **us-west1-b**."##), + Some(r##"Required. The instance's project and location, in the format `projects/{project_id}/locations/{location}`. In Filestore, locations map to Google Cloud zones, for example **us-west1-b**."##), Some(true), Some(false)), @@ -1976,7 +1975,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The project and location for which to retrieve instance information, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, locations map to GCP zones, for example **us-west1-b**. To retrieve instance information for all locations, use "-" for the `{location}` value."##), + Some(r##"Required. The project and location for which to retrieve instance information, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, locations map to Google Cloud zones, for example **us-west1-b**. To retrieve instance information for all locations, use "-" for the `{location}` value."##), Some(true), Some(false)), @@ -2292,7 +2291,7 @@ async fn main() { let mut app = App::new("file1") .author("Sebastian Thiel ") - .version("4.0.1+20220214") + .version("5.0.2+20230103") .about("The Cloud Filestore API is used for creating and managing cloud file servers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_file1_cli") .arg(Arg::with_name("url") diff --git a/gen/file1/Cargo.toml b/gen/file1/Cargo.toml index 559c278e2f..79fab13bab 100644 --- a/gen/file1/Cargo.toml +++ b/gen/file1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-file1" -version = "5.0.2-beta-1+20230103" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Filestore (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/file1" homepage = "https://cloud.google.com/filestore/" -documentation = "https://docs.rs/google-file1/5.0.2-beta-1+20230103" +documentation = "https://docs.rs/google-file1/5.0.2+20230103" license = "MIT" keywords = ["file", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/file1/README.md b/gen/file1/README.md index b64333c9e9..b03185cf09 100644 --- a/gen/file1/README.md +++ b/gen/file1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-file1` library allows access to all features of the *Google Cloud Filestore* service. -This documentation was generated from *Cloud Filestore* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *file:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Filestore* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *file:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Filestore* *v1* API can be found at the [official documentation site](https://cloud.google.com/filestore/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/CloudFilestore) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-file1/5.0.2+20230103/google_file1/CloudFilestore) ... * projects - * [*locations backups create*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationBackupCreateCall), [*locations backups delete*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationBackupDeleteCall), [*locations backups get*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationBackupGetCall), [*locations backups list*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationBackupListCall), [*locations backups patch*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationBackupPatchCall), [*locations get*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstancePatchCall), [*locations instances restore*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceRestoreCall), [*locations instances snapshots create*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceSnapshotCreateCall), [*locations instances snapshots delete*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceSnapshotDeleteCall), [*locations instances snapshots get*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceSnapshotGetCall), [*locations instances snapshots list*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceSnapshotListCall), [*locations instances snapshots patch*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationInstanceSnapshotPatchCall), [*locations list*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/api::ProjectLocationOperationListCall) + * [*locations backups create*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationBackupCreateCall), [*locations backups delete*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationBackupDeleteCall), [*locations backups get*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationBackupGetCall), [*locations backups list*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationBackupListCall), [*locations backups patch*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationBackupPatchCall), [*locations get*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstancePatchCall), [*locations instances restore*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceRestoreCall), [*locations instances snapshots create*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceSnapshotCreateCall), [*locations instances snapshots delete*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceSnapshotDeleteCall), [*locations instances snapshots get*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceSnapshotGetCall), [*locations instances snapshots list*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceSnapshotListCall), [*locations instances snapshots patch*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationInstanceSnapshotPatchCall), [*locations list*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/CloudFilestore)** +* **[Hub](https://docs.rs/google-file1/5.0.2+20230103/google_file1/CloudFilestore)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::CallBuilder) -* **[Resources](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::CallBuilder) +* **[Resources](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Part)** + * **[Parts](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -135,17 +135,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -155,29 +155,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Delegate) to the -[Method Builder](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Delegate) to the +[Method Builder](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::RequestValue) and -[decodable](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::RequestValue) and +[decodable](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-file1/5.0.2-beta-1+20230103/google_file1/client::RequestValue) are moved +* [request values](https://docs.rs/google-file1/5.0.2+20230103/google_file1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/file1/src/api.rs b/gen/file1/src/api.rs index 2a7cfa15ee..a5de299318 100644 --- a/gen/file1/src/api.rs +++ b/gen/file1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudFilestore { CloudFilestore { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://file.googleapis.com/".to_string(), _root_url: "https://file.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudFilestore { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/file1/src/client.rs b/gen/file1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/file1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/file1/src/lib.rs b/gen/file1/src/lib.rs index 98c05633f4..246ac0b184 100644 --- a/gen/file1/src/lib.rs +++ b/gen/file1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Filestore* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *file:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Filestore* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *file:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Filestore* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/filestore/). diff --git a/gen/file1_beta1-cli/Cargo.toml b/gen/file1_beta1-cli/Cargo.toml index 09efd992c3..de1f8d5f44 100644 --- a/gen/file1_beta1-cli/Cargo.toml +++ b/gen/file1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-file1_beta1-cli" -version = "4.0.1+20220214" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Filestore (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/file1_beta1-cli" @@ -20,13 +20,13 @@ name = "file1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-file1_beta1] path = "../file1_beta1" -version = "4.0.1+20220214" +version = "5.0.2+20230103" + diff --git a/gen/file1_beta1-cli/README.md b/gen/file1_beta1-cli/README.md index acc72ccbfa..522b81cf05 100644 --- a/gen/file1_beta1-cli/README.md +++ b/gen/file1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Filestore* API at revision *20220214*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Filestore* API at revision *20230103*. The CLI is at version *5.0.2*. ```bash file1-beta1 [options] @@ -43,6 +43,11 @@ file1-beta1 [options] locations-instances-patch (-r )... [-p ]... [-o ] locations-instances-restore (-r )... [-p ]... [-o ] locations-instances-revert (-r )... [-p ]... [-o ] + locations-instances-shares-create (-r )... [-p ]... [-o ] + locations-instances-shares-delete [-p ]... [-o ] + locations-instances-shares-get [-p ]... [-o ] + locations-instances-shares-list [-p ]... [-o ] + locations-instances-shares-patch (-r )... [-p ]... [-o ] locations-instances-snapshots-create (-r )... [-p ]... [-o ] locations-instances-snapshots-delete [-p ]... [-o ] locations-instances-snapshots-get [-p ]... [-o ] diff --git a/gen/file1_beta1-cli/mkdocs.yml b/gen/file1_beta1-cli/mkdocs.yml index c22795b043..8e480e1c5b 100644 --- a/gen/file1_beta1-cli/mkdocs.yml +++ b/gen/file1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Filestore v4.0.1+20220214 +site_name: Cloud Filestore v5.0.2+20230103 site_url: http://byron.github.io/google-apis-rs/google-file1_beta1-cli site_description: A complete library to interact with Cloud Filestore (protocol v1beta1) @@ -7,31 +7,37 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/file1_beta1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-backups-create.md', 'Projects', 'Locations Backups Create'] -- ['projects_locations-backups-delete.md', 'Projects', 'Locations Backups Delete'] -- ['projects_locations-backups-get.md', 'Projects', 'Locations Backups Get'] -- ['projects_locations-backups-list.md', 'Projects', 'Locations Backups List'] -- ['projects_locations-backups-patch.md', 'Projects', 'Locations Backups Patch'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-restore.md', 'Projects', 'Locations Instances Restore'] -- ['projects_locations-instances-revert.md', 'Projects', 'Locations Instances Revert'] -- ['projects_locations-instances-snapshots-create.md', 'Projects', 'Locations Instances Snapshots Create'] -- ['projects_locations-instances-snapshots-delete.md', 'Projects', 'Locations Instances Snapshots Delete'] -- ['projects_locations-instances-snapshots-get.md', 'Projects', 'Locations Instances Snapshots Get'] -- ['projects_locations-instances-snapshots-list.md', 'Projects', 'Locations Instances Snapshots List'] -- ['projects_locations-instances-snapshots-patch.md', 'Projects', 'Locations Instances Snapshots Patch'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Backups Create': 'projects_locations-backups-create.md' + - 'Locations Backups Delete': 'projects_locations-backups-delete.md' + - 'Locations Backups Get': 'projects_locations-backups-get.md' + - 'Locations Backups List': 'projects_locations-backups-list.md' + - 'Locations Backups Patch': 'projects_locations-backups-patch.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Restore': 'projects_locations-instances-restore.md' + - 'Locations Instances Revert': 'projects_locations-instances-revert.md' + - 'Locations Instances Shares Create': 'projects_locations-instances-shares-create.md' + - 'Locations Instances Shares Delete': 'projects_locations-instances-shares-delete.md' + - 'Locations Instances Shares Get': 'projects_locations-instances-shares-get.md' + - 'Locations Instances Shares List': 'projects_locations-instances-shares-list.md' + - 'Locations Instances Shares Patch': 'projects_locations-instances-shares-patch.md' + - 'Locations Instances Snapshots Create': 'projects_locations-instances-snapshots-create.md' + - 'Locations Instances Snapshots Delete': 'projects_locations-instances-snapshots-delete.md' + - 'Locations Instances Snapshots Get': 'projects_locations-instances-snapshots-get.md' + - 'Locations Instances Snapshots List': 'projects_locations-instances-snapshots-list.md' + - 'Locations Instances Snapshots Patch': 'projects_locations-instances-snapshots-patch.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/file1_beta1-cli/src/client.rs b/gen/file1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/file1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/file1_beta1-cli/src/main.rs b/gen/file1_beta1-cli/src/main.rs index 05f9675664..111b66a0f8 100644 --- a/gen/file1_beta1-cli/src/main.rs +++ b/gen/file1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_file1_beta1::{api, Error, oauth2}; +use google_file1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -78,6 +77,7 @@ where "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "download-bytes" => Some(("downloadBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -87,7 +87,7 @@ where "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes" => Some(("storageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "download-bytes", "labels", "name", "satisfies-pzs", "source-file-share", "source-instance", "source-instance-tier", "state", "storage-bytes"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "download-bytes", "kms-key-name", "labels", "name", "satisfies-pzs", "source-file-share", "source-instance", "source-instance-tier", "state", "storage-bytes"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -265,7 +265,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -347,6 +347,7 @@ where "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "download-bytes" => Some(("downloadBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -356,7 +357,7 @@ where "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "storage-bytes" => Some(("storageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "download-bytes", "labels", "name", "satisfies-pzs", "source-file-share", "source-instance", "source-instance-tier", "state", "storage-bytes"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "download-bytes", "kms-key-name", "labels", "name", "satisfies-pzs", "source-file-share", "source-instance", "source-instance-tier", "state", "storage-bytes"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -371,7 +372,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -495,20 +496,25 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "capacity-gb" => Some(("capacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "capacity-step-size-gb" => Some(("capacityStepSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "max-capacity-gb" => Some(("maxCapacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "max-share-count" => Some(("maxShareCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "multi-share-enabled" => Some(("multiShareEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "protocol" => Some(("protocol", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reasons" => Some(("suspensionReasons", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "kms-key-name", "labels", "max-share-count", "name", "satisfies-pzs", "state", "status-message", "suspension-reasons", "tier"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "capacity-step-size-gb", "create-time", "description", "etag", "kms-key-name", "labels", "max-capacity-gb", "max-share-count", "multi-share-enabled", "name", "protocol", "satisfies-pzs", "state", "status-message", "suspension-reasons", "tier"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -579,7 +585,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -690,7 +696,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -768,20 +774,25 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "capacity-gb" => Some(("capacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "capacity-step-size-gb" => Some(("capacityStepSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "max-capacity-gb" => Some(("maxCapacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "max-share-count" => Some(("maxShareCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "multi-share-enabled" => Some(("multiShareEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "protocol" => Some(("protocol", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reasons" => Some(("suspensionReasons", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "kms-key-name", "labels", "max-share-count", "name", "satisfies-pzs", "state", "status-message", "suspension-reasons", "tier"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "capacity-step-size-gb", "create-time", "description", "etag", "kms-key-name", "labels", "max-capacity-gb", "max-share-count", "multi-share-enabled", "name", "protocol", "satisfies-pzs", "state", "status-message", "suspension-reasons", "tier"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -796,7 +807,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1017,6 +1028,365 @@ where } } + async fn _projects_locations_instances_shares_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "capacity-gb" => Some(("capacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "mount-name" => Some(("mountName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "labels", "mount-name", "name", "state"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Share = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_shares_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "share-id" => { + call = call.share_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["share-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_shares_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_instances_shares_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_shares_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_instances_shares_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_shares_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_instances_shares_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_instances_shares_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "capacity-gb" => Some(("capacityGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "mount-name" => Some(("mountName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["capacity-gb", "create-time", "description", "labels", "mount-name", "name", "state"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Share = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_shares_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_instances_snapshots_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1225,7 +1595,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1325,7 +1695,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1384,10 +1754,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "include-unrevealed-locations" => { - call = call.include_unrevealed_locations(arg_from_str(value.unwrap_or("false"), err, "include-unrevealed-locations", "boolean")); + call = call.include_unrevealed_locations( value.map(|v| arg_from_str(v, err, "include-unrevealed-locations", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1637,7 +2007,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1735,6 +2105,21 @@ where ("locations-instances-revert", Some(opt)) => { call_result = self._projects_locations_instances_revert(opt, dry_run, &mut err).await; }, + ("locations-instances-shares-create", Some(opt)) => { + call_result = self._projects_locations_instances_shares_create(opt, dry_run, &mut err).await; + }, + ("locations-instances-shares-delete", Some(opt)) => { + call_result = self._projects_locations_instances_shares_delete(opt, dry_run, &mut err).await; + }, + ("locations-instances-shares-get", Some(opt)) => { + call_result = self._projects_locations_instances_shares_get(opt, dry_run, &mut err).await; + }, + ("locations-instances-shares-list", Some(opt)) => { + call_result = self._projects_locations_instances_shares_list(opt, dry_run, &mut err).await; + }, + ("locations-instances-shares-patch", Some(opt)) => { + call_result = self._projects_locations_instances_shares_patch(opt, dry_run, &mut err).await; + }, ("locations-instances-snapshots-create", Some(opt)) => { call_result = self._projects_locations_instances_snapshots_create(opt, dry_run, &mut err).await; }, @@ -1844,14 +2229,14 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-backups-create', 'locations-backups-delete', 'locations-backups-get', 'locations-backups-list', 'locations-backups-patch', 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-restore', 'locations-instances-revert', 'locations-instances-snapshots-create', 'locations-instances-snapshots-delete', 'locations-instances-snapshots-get', 'locations-instances-snapshots-list', 'locations-instances-snapshots-patch', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-backups-create', 'locations-backups-delete', 'locations-backups-get', 'locations-backups-list', 'locations-backups-patch', 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-restore', 'locations-instances-revert', 'locations-instances-shares-create', 'locations-instances-shares-delete', 'locations-instances-shares-get', 'locations-instances-shares-list', 'locations-instances-shares-patch', 'locations-instances-snapshots-create', 'locations-instances-snapshots-delete', 'locations-instances-snapshots-get', 'locations-instances-snapshots-list', 'locations-instances-snapshots-patch', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ ("locations-backups-create", Some(r##"Creates a backup."##), "Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-backups-create", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The backup's project and location, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, backup locations map to GCP regions, for example **us-west1**."##), + Some(r##"Required. The backup's project and location, in the format `projects/{project_id}/locations/{location}`. In Filestore, backup locations map to Google Cloud regions, for example **us-west1**."##), Some(true), Some(false)), @@ -1923,7 +2308,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The project and location for which to retrieve backup information, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, backup locations map to GCP regions, for example **us-west1**. To retrieve backup information for all locations, use "-" for the `{location}` value."##), + Some(r##"Required. The project and location for which to retrieve backup information, in the format `projects/{project_id}/locations/{location}`. In Filestore, backup locations map to Google Cloud regions, for example **us-west1**. To retrieve backup information for all locations, use "-" for the `{location}` value."##), Some(true), Some(false)), @@ -1995,7 +2380,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The instance's project and location, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, locations map to GCP zones, for example **us-west1-b**."##), + Some(r##"Required. The instance's project and location, in the format `projects/{project_id}/locations/{location}`. In Filestore, locations map to Google Cloud zones, for example **us-west1-b**."##), Some(true), Some(false)), @@ -2067,7 +2452,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The project and location for which to retrieve instance information, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, locations map to GCP zones, for example **us-west1-b**. To retrieve instance information for all locations, use "-" for the `{location}` value."##), + Some(r##"Required. The project and location for which to retrieve instance information, in the format `projects/{project_id}/locations/{location}`. In Cloud Filestore, locations map to Google Cloud zones, for example **us-west1-b**. To retrieve instance information for all locations, use "-" for the `{location}` value."##), Some(true), Some(false)), @@ -2161,6 +2546,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-shares-create", + Some(r##"Creates a share."##), + "Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-shares-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The Filestore Instance to create the share for, in the format `projects/{project_id}/locations/{location}/instances/{instance_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-shares-delete", + Some(r##"Deletes a share."##), + "Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-shares-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The share resource name, in the format `projects/{project_id}/locations/{location}/instances/{instance_id}/share/{share_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-shares-get", + Some(r##"Gets the details of a specific share."##), + "Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-shares-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The share resource name, in the format `projects/{project_id}/locations/{location}/instances/{instance_id}/shares/{share_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-shares-list", + Some(r##"Lists all shares for a specified instance."##), + "Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-shares-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The instance for which to retrieve share information, in the format `projects/{project_id}/locations/{location}/instances/{instance_id}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-shares-patch", + Some(r##"Updates the settings of a specific share."##), + "Details at http://byron.github.io/google-apis-rs/google_file1_beta1_cli/projects_locations-instances-shares-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The resource name of the share, in the format `projects/{project_id}/locations/{location_id}/instances/{instance_id}/shares/{share_id}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2411,7 +2918,7 @@ async fn main() { let mut app = App::new("file1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220214") + .version("5.0.2+20230103") .about("The Cloud Filestore API is used for creating and managing cloud file servers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_file1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/file1_beta1/Cargo.toml b/gen/file1_beta1/Cargo.toml index 0c1fa48552..4370014060 100644 --- a/gen/file1_beta1/Cargo.toml +++ b/gen/file1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-file1_beta1" -version = "5.0.2-beta-1+20230103" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Filestore (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/file1_beta1" homepage = "https://cloud.google.com/filestore/" -documentation = "https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103" +documentation = "https://docs.rs/google-file1_beta1/5.0.2+20230103" license = "MIT" keywords = ["file", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/file1_beta1/README.md b/gen/file1_beta1/README.md index 9cc7b5e03b..bb7624b3b3 100644 --- a/gen/file1_beta1/README.md +++ b/gen/file1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-file1_beta1` library allows access to all features of the *Google Cloud Filestore* service. -This documentation was generated from *Cloud Filestore* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *file:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Filestore* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *file:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Filestore* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/filestore/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/CloudFilestore) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/CloudFilestore) ... * projects - * [*locations backups create*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationBackupCreateCall), [*locations backups delete*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationBackupDeleteCall), [*locations backups get*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationBackupGetCall), [*locations backups list*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationBackupListCall), [*locations backups patch*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationBackupPatchCall), [*locations get*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstancePatchCall), [*locations instances restore*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceRestoreCall), [*locations instances revert*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceRevertCall), [*locations instances shares create*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceShareCreateCall), [*locations instances shares delete*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceShareDeleteCall), [*locations instances shares get*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceShareGetCall), [*locations instances shares list*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceShareListCall), [*locations instances shares patch*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceSharePatchCall), [*locations instances snapshots create*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotCreateCall), [*locations instances snapshots delete*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotDeleteCall), [*locations instances snapshots get*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotGetCall), [*locations instances snapshots list*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotListCall), [*locations instances snapshots patch*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotPatchCall), [*locations list*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/api::ProjectLocationOperationListCall) + * [*locations backups create*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationBackupCreateCall), [*locations backups delete*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationBackupDeleteCall), [*locations backups get*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationBackupGetCall), [*locations backups list*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationBackupListCall), [*locations backups patch*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationBackupPatchCall), [*locations get*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstancePatchCall), [*locations instances restore*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceRestoreCall), [*locations instances revert*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceRevertCall), [*locations instances shares create*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceShareCreateCall), [*locations instances shares delete*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceShareDeleteCall), [*locations instances shares get*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceShareGetCall), [*locations instances shares list*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceShareListCall), [*locations instances shares patch*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceSharePatchCall), [*locations instances snapshots create*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotCreateCall), [*locations instances snapshots delete*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotDeleteCall), [*locations instances snapshots get*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotGetCall), [*locations instances snapshots list*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotListCall), [*locations instances snapshots patch*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationInstanceSnapshotPatchCall), [*locations list*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/CloudFilestore)** +* **[Hub](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/CloudFilestore)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-file1_beta1/5.0.2-beta-1+20230103/google_file1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-file1_beta1/5.0.2+20230103/google_file1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/file1_beta1/src/api.rs b/gen/file1_beta1/src/api.rs index f3939afadb..33fdd71f48 100644 --- a/gen/file1_beta1/src/api.rs +++ b/gen/file1_beta1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudFilestore { CloudFilestore { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://file.googleapis.com/".to_string(), _root_url: "https://file.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudFilestore { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/file1_beta1/src/client.rs b/gen/file1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/file1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/file1_beta1/src/lib.rs b/gen/file1_beta1/src/lib.rs index b294f93cf5..0a2611f134 100644 --- a/gen/file1_beta1/src/lib.rs +++ b/gen/file1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Filestore* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *file:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Filestore* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *file:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Filestore* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/filestore/). diff --git a/gen/firebase1_beta1-cli/Cargo.toml b/gen/firebase1_beta1-cli/Cargo.toml index 90a3d97d14..6cd802e0e4 100644 --- a/gen/firebase1_beta1-cli/Cargo.toml +++ b/gen/firebase1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebase1_beta1-cli" -version = "4.0.1+20220304" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Management (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebase1_beta1-cli" @@ -20,13 +20,13 @@ name = "firebase1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebase1_beta1] path = "../firebase1_beta1" -version = "4.0.1+20220304" +version = "5.0.2+20230123" + diff --git a/gen/firebase1_beta1-cli/README.md b/gen/firebase1_beta1-cli/README.md index 387c2aebd9..f25d35c3f5 100644 --- a/gen/firebase1_beta1-cli/README.md +++ b/gen/firebase1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase Management* API at revision *20220304*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase Management* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash firebase1-beta1 [options] @@ -41,9 +41,11 @@ firebase1-beta1 [options] android-apps-get-config [-p ]... [-o ] android-apps-list [-p ]... [-o ] android-apps-patch (-r )... [-p ]... [-o ] + android-apps-remove (-r )... [-p ]... [-o ] android-apps-sha-create (-r )... [-p ]... [-o ] android-apps-sha-delete [-p ]... [-o ] android-apps-sha-list [-p ]... [-o ] + android-apps-undelete (-r )... [-p ]... [-o ] available-locations-list [-p ]... [-o ] default-location-finalize (-r )... [-p ]... [-o ] get [-p ]... [-o ] @@ -54,6 +56,8 @@ firebase1-beta1 [options] ios-apps-get-config [-p ]... [-o ] ios-apps-list [-p ]... [-o ] ios-apps-patch (-r )... [-p ]... [-o ] + ios-apps-remove (-r )... [-p ]... [-o ] + ios-apps-undelete (-r )... [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] remove-analytics (-r )... [-p ]... [-o ] @@ -63,6 +67,8 @@ firebase1-beta1 [options] web-apps-get-config [-p ]... [-o ] web-apps-list [-p ]... [-o ] web-apps-patch (-r )... [-p ]... [-o ] + web-apps-remove (-r )... [-p ]... [-o ] + web-apps-undelete (-r )... [-p ]... [-o ] firebase1-beta1 --help Configuration: diff --git a/gen/firebase1_beta1-cli/mkdocs.yml b/gen/firebase1_beta1-cli/mkdocs.yml index e8389170fb..0e85f552f4 100644 --- a/gen/firebase1_beta1-cli/mkdocs.yml +++ b/gen/firebase1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase Management v4.0.1+20220304 +site_name: Firebase Management v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-firebase1_beta1-cli site_description: A complete library to interact with Firebase Management (protocol v1beta1) @@ -7,39 +7,48 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebase1_beta1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['available-projects_list.md', 'Available Projects', 'List'] -- ['operations_get.md', 'Operations', 'Get'] -- ['projects_add-firebase.md', 'Projects', 'Add Firebase'] -- ['projects_add-google-analytics.md', 'Projects', 'Add Google Analytics'] -- ['projects_android-apps-create.md', 'Projects', 'Android Apps Create'] -- ['projects_android-apps-get.md', 'Projects', 'Android Apps Get'] -- ['projects_android-apps-get-config.md', 'Projects', 'Android Apps Get Config'] -- ['projects_android-apps-list.md', 'Projects', 'Android Apps List'] -- ['projects_android-apps-patch.md', 'Projects', 'Android Apps Patch'] -- ['projects_android-apps-sha-create.md', 'Projects', 'Android Apps Sha Create'] -- ['projects_android-apps-sha-delete.md', 'Projects', 'Android Apps Sha Delete'] -- ['projects_android-apps-sha-list.md', 'Projects', 'Android Apps Sha List'] -- ['projects_available-locations-list.md', 'Projects', 'Available Locations List'] -- ['projects_default-location-finalize.md', 'Projects', 'Default Location Finalize'] -- ['projects_get.md', 'Projects', 'Get'] -- ['projects_get-admin-sdk-config.md', 'Projects', 'Get Admin Sdk Config'] -- ['projects_get-analytics-details.md', 'Projects', 'Get Analytics Details'] -- ['projects_ios-apps-create.md', 'Projects', 'Ios Apps Create'] -- ['projects_ios-apps-get.md', 'Projects', 'Ios Apps Get'] -- ['projects_ios-apps-get-config.md', 'Projects', 'Ios Apps Get Config'] -- ['projects_ios-apps-list.md', 'Projects', 'Ios Apps List'] -- ['projects_ios-apps-patch.md', 'Projects', 'Ios Apps Patch'] -- ['projects_list.md', 'Projects', 'List'] -- ['projects_patch.md', 'Projects', 'Patch'] -- ['projects_remove-analytics.md', 'Projects', 'Remove Analytics'] -- ['projects_search-apps.md', 'Projects', 'Search Apps'] -- ['projects_web-apps-create.md', 'Projects', 'Web Apps Create'] -- ['projects_web-apps-get.md', 'Projects', 'Web Apps Get'] -- ['projects_web-apps-get-config.md', 'Projects', 'Web Apps Get Config'] -- ['projects_web-apps-list.md', 'Projects', 'Web Apps List'] -- ['projects_web-apps-patch.md', 'Projects', 'Web Apps Patch'] +nav: +- Home: 'index.md' +- 'Available Projects': + - 'List': 'available-projects_list.md' +- 'Operations': + - 'Get': 'operations_get.md' +- 'Projects': + - 'Add Firebase': 'projects_add-firebase.md' + - 'Add Google Analytics': 'projects_add-google-analytics.md' + - 'Android Apps Create': 'projects_android-apps-create.md' + - 'Android Apps Get': 'projects_android-apps-get.md' + - 'Android Apps Get Config': 'projects_android-apps-get-config.md' + - 'Android Apps List': 'projects_android-apps-list.md' + - 'Android Apps Patch': 'projects_android-apps-patch.md' + - 'Android Apps Remove': 'projects_android-apps-remove.md' + - 'Android Apps Sha Create': 'projects_android-apps-sha-create.md' + - 'Android Apps Sha Delete': 'projects_android-apps-sha-delete.md' + - 'Android Apps Sha List': 'projects_android-apps-sha-list.md' + - 'Android Apps Undelete': 'projects_android-apps-undelete.md' + - 'Available Locations List': 'projects_available-locations-list.md' + - 'Default Location Finalize': 'projects_default-location-finalize.md' + - 'Get': 'projects_get.md' + - 'Get Admin Sdk Config': 'projects_get-admin-sdk-config.md' + - 'Get Analytics Details': 'projects_get-analytics-details.md' + - 'Ios Apps Create': 'projects_ios-apps-create.md' + - 'Ios Apps Get': 'projects_ios-apps-get.md' + - 'Ios Apps Get Config': 'projects_ios-apps-get-config.md' + - 'Ios Apps List': 'projects_ios-apps-list.md' + - 'Ios Apps Patch': 'projects_ios-apps-patch.md' + - 'Ios Apps Remove': 'projects_ios-apps-remove.md' + - 'Ios Apps Undelete': 'projects_ios-apps-undelete.md' + - 'List': 'projects_list.md' + - 'Patch': 'projects_patch.md' + - 'Remove Analytics': 'projects_remove-analytics.md' + - 'Search Apps': 'projects_search-apps.md' + - 'Web Apps Create': 'projects_web-apps-create.md' + - 'Web Apps Get': 'projects_web-apps-get.md' + - 'Web Apps Get Config': 'projects_web-apps-get-config.md' + - 'Web Apps List': 'projects_web-apps-list.md' + - 'Web Apps Patch': 'projects_web-apps-patch.md' + - 'Web Apps Remove': 'projects_web-apps-remove.md' + - 'Web Apps Undelete': 'projects_web-apps-undelete.md' theme: readthedocs diff --git a/gen/firebase1_beta1-cli/src/client.rs b/gen/firebase1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebase1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebase1_beta1-cli/src/main.rs b/gen/firebase1_beta1-cli/src/main.rs index 69eb6aa810..b647d43c01 100644 --- a/gen/firebase1_beta1-cli/src/main.rs +++ b/gen/firebase1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebase1_beta1::{api, Error, oauth2}; +use google_firebase1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -359,11 +358,15 @@ where "api-key-id" => Some(("apiKeyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-id" => Some(("appId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package-name" => Some(("packageName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-hashes" => Some(("sha1Hashes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "sha256-hashes" => Some(("sha256Hashes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "display-name", "name", "package-name", "project-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "display-name", "etag", "name", "package-name", "project-id", "sha1-hashes", "sha256-hashes", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -533,11 +536,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -552,7 +558,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); v } )); } } @@ -612,11 +618,15 @@ where "api-key-id" => Some(("apiKeyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-id" => Some(("appId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "package-name" => Some(("packageName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "sha1-hashes" => Some(("sha1Hashes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "sha256-hashes" => Some(("sha256Hashes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "display-name", "name", "package-name", "project-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "display-name", "etag", "name", "package-name", "project-id", "sha1-hashes", "sha256-hashes", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -631,7 +641,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -680,6 +690,94 @@ where } } + async fn _projects_android_apps_remove(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "allow-missing" => Some(("allowMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "immediate" => Some(("immediate", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-missing", "etag", "immediate", "validate-only"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RemoveAndroidAppRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().android_apps_remove(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_android_apps_sha_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -871,6 +969,92 @@ where } } + async fn _projects_android_apps_undelete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "validate-only"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UndeleteAndroidAppRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().android_apps_undelete(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_available_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().available_locations_list(opt.value_of("parent").unwrap_or("")); @@ -881,7 +1065,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1199,11 +1383,13 @@ where "app-store-id" => Some(("appStoreId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bundle-id" => Some(("bundleId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "team-id" => Some(("teamId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-store-id", "bundle-id", "display-name", "name", "project-id", "team-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-store-id", "bundle-id", "display-name", "etag", "name", "project-id", "state", "team-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1373,11 +1559,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1392,7 +1581,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); v } )); } } @@ -1454,11 +1643,13 @@ where "app-store-id" => Some(("appStoreId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bundle-id" => Some(("bundleId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "team-id" => Some(("teamId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-store-id", "bundle-id", "display-name", "name", "project-id", "team-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-store-id", "bundle-id", "display-name", "etag", "name", "project-id", "state", "team-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1473,7 +1664,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1522,17 +1713,194 @@ where } } + async fn _projects_ios_apps_remove(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "allow-missing" => Some(("allowMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "immediate" => Some(("immediate", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-missing", "etag", "immediate", "validate-only"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RemoveIosAppRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().ios_apps_remove(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_ios_apps_undelete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "validate-only"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UndeleteIosAppRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().ios_apps_undelete(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().list(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1547,7 +1915,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); v } )); } } @@ -1604,7 +1972,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-number" => Some(("projectNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1614,7 +1984,7 @@ where "resources.storage-bucket" => Some(("resources.storageBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["display-name", "hosting-site", "location-id", "name", "project-id", "project-number", "realtime-database-instance", "resources", "state", "storage-bucket"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "hosting-site", "location-id", "name", "project-id", "project-number", "realtime-database-instance", "resources", "state", "storage-bucket"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1629,7 +1999,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1769,11 +2139,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1791,7 +2164,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token", "show-deleted"].iter().map(|v|*v)); v } )); } } @@ -1852,11 +2225,13 @@ where "app-id" => Some(("appId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-urls" => Some(("appUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "web-id" => Some(("webId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-urls", "display-name", "name", "project-id", "web-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-urls", "display-name", "etag", "name", "project-id", "state", "web-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2026,11 +2401,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2045,7 +2423,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); v } )); } } @@ -2106,11 +2484,13 @@ where "app-id" => Some(("appId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-urls" => Some(("appUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "project-id" => Some(("projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "web-id" => Some(("webId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-urls", "display-name", "name", "project-id", "web-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-key-id", "app-id", "app-urls", "display-name", "etag", "name", "project-id", "state", "web-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2125,7 +2505,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2174,6 +2554,180 @@ where } } + async fn _projects_web_apps_remove(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "allow-missing" => Some(("allowMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "immediate" => Some(("immediate", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-missing", "etag", "immediate", "validate-only"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RemoveWebAppRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().web_apps_remove(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_web_apps_undelete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "validate-only"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UndeleteWebAppRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().web_apps_undelete(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -2224,6 +2778,9 @@ where ("android-apps-patch", Some(opt)) => { call_result = self._projects_android_apps_patch(opt, dry_run, &mut err).await; }, + ("android-apps-remove", Some(opt)) => { + call_result = self._projects_android_apps_remove(opt, dry_run, &mut err).await; + }, ("android-apps-sha-create", Some(opt)) => { call_result = self._projects_android_apps_sha_create(opt, dry_run, &mut err).await; }, @@ -2233,6 +2790,9 @@ where ("android-apps-sha-list", Some(opt)) => { call_result = self._projects_android_apps_sha_list(opt, dry_run, &mut err).await; }, + ("android-apps-undelete", Some(opt)) => { + call_result = self._projects_android_apps_undelete(opt, dry_run, &mut err).await; + }, ("available-locations-list", Some(opt)) => { call_result = self._projects_available_locations_list(opt, dry_run, &mut err).await; }, @@ -2263,6 +2823,12 @@ where ("ios-apps-patch", Some(opt)) => { call_result = self._projects_ios_apps_patch(opt, dry_run, &mut err).await; }, + ("ios-apps-remove", Some(opt)) => { + call_result = self._projects_ios_apps_remove(opt, dry_run, &mut err).await; + }, + ("ios-apps-undelete", Some(opt)) => { + call_result = self._projects_ios_apps_undelete(opt, dry_run, &mut err).await; + }, ("list", Some(opt)) => { call_result = self._projects_list(opt, dry_run, &mut err).await; }, @@ -2290,6 +2856,12 @@ where ("web-apps-patch", Some(opt)) => { call_result = self._projects_web_apps_patch(opt, dry_run, &mut err).await; }, + ("web-apps-remove", Some(opt)) => { + call_result = self._projects_web_apps_remove(opt, dry_run, &mut err).await; + }, + ("web-apps-undelete", Some(opt)) => { + call_result = self._projects_web_apps_undelete(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2413,7 +2985,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'add-firebase', 'add-google-analytics', 'android-apps-create', 'android-apps-get', 'android-apps-get-config', 'android-apps-list', 'android-apps-patch', 'android-apps-sha-create', 'android-apps-sha-delete', 'android-apps-sha-list', 'available-locations-list', 'default-location-finalize', 'get', 'get-admin-sdk-config', 'get-analytics-details', 'ios-apps-create', 'ios-apps-get', 'ios-apps-get-config', 'ios-apps-list', 'ios-apps-patch', 'list', 'patch', 'remove-analytics', 'search-apps', 'web-apps-create', 'web-apps-get', 'web-apps-get-config', 'web-apps-list' and 'web-apps-patch'", vec![ + ("projects", "methods: 'add-firebase', 'add-google-analytics', 'android-apps-create', 'android-apps-get', 'android-apps-get-config', 'android-apps-list', 'android-apps-patch', 'android-apps-remove', 'android-apps-sha-create', 'android-apps-sha-delete', 'android-apps-sha-list', 'android-apps-undelete', 'available-locations-list', 'default-location-finalize', 'get', 'get-admin-sdk-config', 'get-analytics-details', 'ios-apps-create', 'ios-apps-get', 'ios-apps-get-config', 'ios-apps-list', 'ios-apps-patch', 'ios-apps-remove', 'ios-apps-undelete', 'list', 'patch', 'remove-analytics', 'search-apps', 'web-apps-create', 'web-apps-get', 'web-apps-get-config', 'web-apps-list', 'web-apps-patch', 'web-apps-remove' and 'web-apps-undelete'", vec![ ("add-firebase", Some(r##"Adds Firebase resources to the specified existing [Google Cloud Platform (GCP) `Project`] (https://cloud.google.com/resource-manager/reference/rest/v1/projects). Since a FirebaseProject is actually also a GCP `Project`, a `FirebaseProject` has the same underlying GCP identifiers (`projectNumber` and `projectId`). This allows for easy interop with Google APIs. The result of this call is an [`Operation`](../../v1beta1/operations). Poll the `Operation` to track the provisioning process by calling GetOperation until [`done`](../../v1beta1/operations#Operation.FIELDS.done) is `true`. When `done` is `true`, the `Operation` has either succeeded or failed. If the `Operation` succeeded, its [`response`](../../v1beta1/operations#Operation.FIELDS.response) is set to a FirebaseProject; if the `Operation` failed, its [`error`](../../v1beta1/operations#Operation.FIELDS.error) is set to a google.rpc.Status. The `Operation` is automatically deleted after completion, so there is no need to call DeleteOperation. This method does not modify any billing account information on the underlying GCP `Project`. To call `AddFirebase`, a project member or service account must have the following permissions (the IAM roles of Editor and Owner contain these permissions): `firebase.projects.update`, `resourcemanager.projects.get`, `serviceusage.services.enable`, and `serviceusage.services.get`."##), "Details at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli/projects_add-firebase", @@ -2586,6 +3158,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("android-apps-remove", + Some(r##"Removes the specified AndroidApp from the FirebaseProject."##), + "Details at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli/projects_android-apps-remove", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the AndroidApp, in the format: projects/ PROJECT_IDENTIFIER/androidApps/APP_ID Since an APP_ID is a unique identifier, the Unique Resource from Sub-Collection access pattern may be used here, in the format: projects/-/androidApps/APP_ID Refer to the AndroidApp [name](../projects.androidApps#AndroidApp.FIELDS.name) field for details about PROJECT_IDENTIFIER and APP_ID values."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2658,6 +3258,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("android-apps-undelete", + Some(r##"Restores the specified AndroidApp to the FirebaseProject."##), + "Details at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli/projects_android-apps-undelete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the AndroidApp, in the format: projects/ PROJECT_IDENTIFIER/androidApps/APP_ID Since an APP_ID is a unique identifier, the Unique Resource from Sub-Collection access pattern may be used here, in the format: projects/-/androidApps/APP_ID Refer to the AndroidApp [name](../projects.androidApps#AndroidApp.FIELDS.name) field for details about PROJECT_IDENTIFIER and APP_ID values."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2896,6 +3524,62 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("ios-apps-remove", + Some(r##"Removes the specified IosApp from the FirebaseProject."##), + "Details at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli/projects_ios-apps-remove", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the IosApp, in the format: projects/ PROJECT_IDENTIFIER/iosApps/APP_ID Since an APP_ID is a unique identifier, the Unique Resource from Sub-Collection access pattern may be used here, in the format: projects/-/iosApps/APP_ID Refer to the IosApp [name](../projects.iosApps#IosApp.FIELDS.name) field for details about PROJECT_IDENTIFIER and APP_ID values."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("ios-apps-undelete", + Some(r##"Restores the specified IosApp to the FirebaseProject."##), + "Details at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli/projects_ios-apps-undelete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the IosApp, in the format: projects/ PROJECT_IDENTIFIER/iosApps/APP_ID Since an APP_ID is a unique identifier, the Unique Resource from Sub-Collection access pattern may be used here, in the format: projects/-/iosApps/APP_ID Refer to the IosApp [name](../projects.iosApps#IosApp.FIELDS.name) field for details about PROJECT_IDENTIFIER and APP_ID values."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3112,6 +3796,62 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("web-apps-remove", + Some(r##"Removes the specified WebApp from the FirebaseProject."##), + "Details at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli/projects_web-apps-remove", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the WebApp, in the format: projects/ PROJECT_IDENTIFIER/webApps/APP_ID Since an APP_ID is a unique identifier, the Unique Resource from Sub-Collection access pattern may be used here, in the format: projects/-/webApps/APP_ID Refer to the WebApp [name](../projects.webApps#WebApp.FIELDS.name) field for details about PROJECT_IDENTIFIER and APP_ID values."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("web-apps-undelete", + Some(r##"Restores the specified WebApp to the FirebaseProject."##), + "Details at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli/projects_web-apps-undelete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the WebApp, in the format: projects/ PROJECT_IDENTIFIER/webApps/APP_ID Since an APP_ID is a unique identifier, the Unique Resource from Sub-Collection access pattern may be used here, in the format: projects/-/webApps/APP_ID Refer to the WebApp [name](../projects.webApps#WebApp.FIELDS.name) field for details about PROJECT_IDENTIFIER and APP_ID values."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3124,7 +3864,7 @@ async fn main() { let mut app = App::new("firebase1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220304") + .version("5.0.2+20230123") .about("The Firebase Management API enables programmatic setup and management of Firebase projects, including a project's Firebase resources and Firebase apps.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebase1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/firebase1_beta1/Cargo.toml b/gen/firebase1_beta1/Cargo.toml index bcde26dd61..7695671da5 100644 --- a/gen/firebase1_beta1/Cargo.toml +++ b/gen/firebase1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebase1_beta1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Management (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebase1_beta1" homepage = "https://firebase.google.com" -documentation = "https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-firebase1_beta1/5.0.2+20230123" license = "MIT" keywords = ["firebase", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebase1_beta1/README.md b/gen/firebase1_beta1/README.md index 43b6eb6939..f8cb0fee25 100644 --- a/gen/firebase1_beta1/README.md +++ b/gen/firebase1_beta1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-firebase1_beta1` library allows access to all features of the *Google Firebase Management* service. -This documentation was generated from *Firebase Management* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebase:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebase Management* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebase:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebase Management* *v1_beta1* API can be found at the [official documentation site](https://firebase.google.com). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/FirebaseManagement) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/FirebaseManagement) ... * available projects - * [*list*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::AvailableProjectListCall) -* [operations](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::Operation) - * [*get*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::OperationGetCall) + * [*list*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::AvailableProjectListCall) +* [operations](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::Operation) + * [*get*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::OperationGetCall) * projects - * [*add firebase*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAddFirebaseCall), [*add google analytics*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAddGoogleAnalyticCall), [*android apps create*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppCreateCall), [*android apps get*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppGetCall), [*android apps get config*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppGetConfigCall), [*android apps list*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppListCall), [*android apps patch*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppPatchCall), [*android apps remove*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppRemoveCall), [*android apps sha create*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppShaCreateCall), [*android apps sha delete*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppShaDeleteCall), [*android apps sha list*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppShaListCall), [*android apps undelete*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAndroidAppUndeleteCall), [*available locations list*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectAvailableLocationListCall), [*default location finalize*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectDefaultLocationFinalizeCall), [*get*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectGetCall), [*get admin sdk config*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectGetAdminSdkConfigCall), [*get analytics details*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectGetAnalyticsDetailCall), [*ios apps create*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectIosAppCreateCall), [*ios apps get*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectIosAppGetCall), [*ios apps get config*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectIosAppGetConfigCall), [*ios apps list*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectIosAppListCall), [*ios apps patch*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectIosAppPatchCall), [*ios apps remove*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectIosAppRemoveCall), [*ios apps undelete*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectIosAppUndeleteCall), [*list*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectListCall), [*patch*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectPatchCall), [*remove analytics*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectRemoveAnalyticCall), [*search apps*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectSearchAppCall), [*web apps create*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectWebAppCreateCall), [*web apps get*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectWebAppGetCall), [*web apps get config*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectWebAppGetConfigCall), [*web apps list*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectWebAppListCall), [*web apps patch*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectWebAppPatchCall), [*web apps remove*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectWebAppRemoveCall) and [*web apps undelete*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/api::ProjectWebAppUndeleteCall) + * [*add firebase*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAddFirebaseCall), [*add google analytics*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAddGoogleAnalyticCall), [*android apps create*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppCreateCall), [*android apps get*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppGetCall), [*android apps get config*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppGetConfigCall), [*android apps list*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppListCall), [*android apps patch*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppPatchCall), [*android apps remove*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppRemoveCall), [*android apps sha create*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppShaCreateCall), [*android apps sha delete*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppShaDeleteCall), [*android apps sha list*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppShaListCall), [*android apps undelete*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAndroidAppUndeleteCall), [*available locations list*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectAvailableLocationListCall), [*default location finalize*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectDefaultLocationFinalizeCall), [*get*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectGetCall), [*get admin sdk config*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectGetAdminSdkConfigCall), [*get analytics details*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectGetAnalyticsDetailCall), [*ios apps create*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectIosAppCreateCall), [*ios apps get*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectIosAppGetCall), [*ios apps get config*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectIosAppGetConfigCall), [*ios apps list*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectIosAppListCall), [*ios apps patch*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectIosAppPatchCall), [*ios apps remove*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectIosAppRemoveCall), [*ios apps undelete*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectIosAppUndeleteCall), [*list*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectListCall), [*patch*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectPatchCall), [*remove analytics*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectRemoveAnalyticCall), [*search apps*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectSearchAppCall), [*web apps create*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectWebAppCreateCall), [*web apps get*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectWebAppGetCall), [*web apps get config*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectWebAppGetConfigCall), [*web apps list*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectWebAppListCall), [*web apps patch*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectWebAppPatchCall), [*web apps remove*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectWebAppRemoveCall) and [*web apps undelete*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/api::ProjectWebAppUndeleteCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/FirebaseManagement)** +* **[Hub](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/FirebaseManagement)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -140,17 +140,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -160,29 +160,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebase1_beta1/5.0.2-beta-1+20230123/google_firebase1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebase1_beta1/5.0.2+20230123/google_firebase1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebase1_beta1/src/api.rs b/gen/firebase1_beta1/src/api.rs index a3c31bbd07..b46f625bc7 100644 --- a/gen/firebase1_beta1/src/api.rs +++ b/gen/firebase1_beta1/src/api.rs @@ -137,7 +137,7 @@ impl<'a, S> FirebaseManagement { FirebaseManagement { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebase.googleapis.com/".to_string(), _root_url: "https://firebase.googleapis.com/".to_string(), } @@ -154,7 +154,7 @@ impl<'a, S> FirebaseManagement { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebase1_beta1/src/client.rs b/gen/firebase1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebase1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebase1_beta1/src/lib.rs b/gen/firebase1_beta1/src/lib.rs index def045c8d2..5a62e85143 100644 --- a/gen/firebase1_beta1/src/lib.rs +++ b/gen/firebase1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebase Management* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebase:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebase Management* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebase:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebase Management* *v1_beta1* API can be found at the //! [official documentation site](https://firebase.google.com). diff --git a/gen/firebaseappcheck1_beta-cli/Cargo.toml b/gen/firebaseappcheck1_beta-cli/Cargo.toml index f15cd6016c..60a65d376c 100644 --- a/gen/firebaseappcheck1_beta-cli/Cargo.toml +++ b/gen/firebaseappcheck1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebaseappcheck1_beta-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230116" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebaseappcheck (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseappcheck1_beta-cli" @@ -20,13 +20,13 @@ name = "firebaseappcheck1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebaseappcheck1_beta] path = "../firebaseappcheck1_beta" -version = "4.0.1+20220225" +version = "5.0.2+20230116" + diff --git a/gen/firebaseappcheck1_beta-cli/README.md b/gen/firebaseappcheck1_beta-cli/README.md index a3d2eefb4f..7d1af77cfc 100644 --- a/gen/firebaseappcheck1_beta-cli/README.md +++ b/gen/firebaseappcheck1_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebaseappcheck* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebaseappcheck* API at revision *20230116*. The CLI is at version *5.0.2*. ```bash firebaseappcheck1-beta [options] @@ -48,16 +48,25 @@ firebaseappcheck1-beta [options] apps-exchange-custom-token (-r )... [-p ]... [-o ] apps-exchange-debug-token (-r )... [-p ]... [-o ] apps-exchange-device-check-token (-r )... [-p ]... [-o ] + apps-exchange-play-integrity-token (-r )... [-p ]... [-o ] apps-exchange-recaptcha-enterprise-token (-r )... [-p ]... [-o ] apps-exchange-recaptcha-token (-r )... [-p ]... [-o ] + apps-exchange-recaptcha-v3-token (-r )... [-p ]... [-o ] apps-exchange-safety-net-token (-r )... [-p ]... [-o ] apps-generate-app-attest-challenge (-r )... [-p ]... [-o ] + apps-generate-play-integrity-challenge (-r )... [-p ]... [-o ] + apps-play-integrity-config-batch-get [-p ]... [-o ] + apps-play-integrity-config-get [-p ]... [-o ] + apps-play-integrity-config-patch (-r )... [-p ]... [-o ] apps-recaptcha-config-batch-get [-p ]... [-o ] apps-recaptcha-config-get [-p ]... [-o ] apps-recaptcha-config-patch (-r )... [-p ]... [-o ] apps-recaptcha-enterprise-config-batch-get [-p ]... [-o ] apps-recaptcha-enterprise-config-get [-p ]... [-o ] apps-recaptcha-enterprise-config-patch (-r )... [-p ]... [-o ] + apps-recaptcha-v3-config-batch-get [-p ]... [-o ] + apps-recaptcha-v3-config-get [-p ]... [-o ] + apps-recaptcha-v3-config-patch (-r )... [-p ]... [-o ] apps-safety-net-config-batch-get [-p ]... [-o ] apps-safety-net-config-get [-p ]... [-o ] apps-safety-net-config-patch (-r )... [-p ]... [-o ] diff --git a/gen/firebaseappcheck1_beta-cli/mkdocs.yml b/gen/firebaseappcheck1_beta-cli/mkdocs.yml index b6ff10702e..a117d5e612 100644 --- a/gen/firebaseappcheck1_beta-cli/mkdocs.yml +++ b/gen/firebaseappcheck1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebaseappcheck v4.0.1+20220225 +site_name: Firebaseappcheck v5.0.2+20230116 site_url: http://byron.github.io/google-apis-rs/google-firebaseappcheck1_beta-cli site_description: A complete library to interact with Firebaseappcheck (protocol v1beta) @@ -7,42 +7,53 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseappcheck docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['jwks_get.md', 'Jwks', 'Get'] -- ['projects_apps-app-attest-config-batch-get.md', 'Projects', 'Apps App Attest Config Batch Get'] -- ['projects_apps-app-attest-config-get.md', 'Projects', 'Apps App Attest Config Get'] -- ['projects_apps-app-attest-config-patch.md', 'Projects', 'Apps App Attest Config Patch'] -- ['projects_apps-debug-tokens-create.md', 'Projects', 'Apps Debug Tokens Create'] -- ['projects_apps-debug-tokens-delete.md', 'Projects', 'Apps Debug Tokens Delete'] -- ['projects_apps-debug-tokens-get.md', 'Projects', 'Apps Debug Tokens Get'] -- ['projects_apps-debug-tokens-list.md', 'Projects', 'Apps Debug Tokens List'] -- ['projects_apps-debug-tokens-patch.md', 'Projects', 'Apps Debug Tokens Patch'] -- ['projects_apps-device-check-config-batch-get.md', 'Projects', 'Apps Device Check Config Batch Get'] -- ['projects_apps-device-check-config-get.md', 'Projects', 'Apps Device Check Config Get'] -- ['projects_apps-device-check-config-patch.md', 'Projects', 'Apps Device Check Config Patch'] -- ['projects_apps-exchange-app-attest-assertion.md', 'Projects', 'Apps Exchange App Attest Assertion'] -- ['projects_apps-exchange-app-attest-attestation.md', 'Projects', 'Apps Exchange App Attest Attestation'] -- ['projects_apps-exchange-custom-token.md', 'Projects', 'Apps Exchange Custom Token'] -- ['projects_apps-exchange-debug-token.md', 'Projects', 'Apps Exchange Debug Token'] -- ['projects_apps-exchange-device-check-token.md', 'Projects', 'Apps Exchange Device Check Token'] -- ['projects_apps-exchange-recaptcha-enterprise-token.md', 'Projects', 'Apps Exchange Recaptcha Enterprise Token'] -- ['projects_apps-exchange-recaptcha-token.md', 'Projects', 'Apps Exchange Recaptcha Token'] -- ['projects_apps-exchange-safety-net-token.md', 'Projects', 'Apps Exchange Safety Net Token'] -- ['projects_apps-generate-app-attest-challenge.md', 'Projects', 'Apps Generate App Attest Challenge'] -- ['projects_apps-recaptcha-config-batch-get.md', 'Projects', 'Apps Recaptcha Config Batch Get'] -- ['projects_apps-recaptcha-config-get.md', 'Projects', 'Apps Recaptcha Config Get'] -- ['projects_apps-recaptcha-config-patch.md', 'Projects', 'Apps Recaptcha Config Patch'] -- ['projects_apps-recaptcha-enterprise-config-batch-get.md', 'Projects', 'Apps Recaptcha Enterprise Config Batch Get'] -- ['projects_apps-recaptcha-enterprise-config-get.md', 'Projects', 'Apps Recaptcha Enterprise Config Get'] -- ['projects_apps-recaptcha-enterprise-config-patch.md', 'Projects', 'Apps Recaptcha Enterprise Config Patch'] -- ['projects_apps-safety-net-config-batch-get.md', 'Projects', 'Apps Safety Net Config Batch Get'] -- ['projects_apps-safety-net-config-get.md', 'Projects', 'Apps Safety Net Config Get'] -- ['projects_apps-safety-net-config-patch.md', 'Projects', 'Apps Safety Net Config Patch'] -- ['projects_services-batch-update.md', 'Projects', 'Services Batch Update'] -- ['projects_services-get.md', 'Projects', 'Services Get'] -- ['projects_services-list.md', 'Projects', 'Services List'] -- ['projects_services-patch.md', 'Projects', 'Services Patch'] +nav: +- Home: 'index.md' +- 'Jwks': + - 'Get': 'jwks_get.md' +- 'Projects': + - 'Apps App Attest Config Batch Get': 'projects_apps-app-attest-config-batch-get.md' + - 'Apps App Attest Config Get': 'projects_apps-app-attest-config-get.md' + - 'Apps App Attest Config Patch': 'projects_apps-app-attest-config-patch.md' + - 'Apps Debug Tokens Create': 'projects_apps-debug-tokens-create.md' + - 'Apps Debug Tokens Delete': 'projects_apps-debug-tokens-delete.md' + - 'Apps Debug Tokens Get': 'projects_apps-debug-tokens-get.md' + - 'Apps Debug Tokens List': 'projects_apps-debug-tokens-list.md' + - 'Apps Debug Tokens Patch': 'projects_apps-debug-tokens-patch.md' + - 'Apps Device Check Config Batch Get': 'projects_apps-device-check-config-batch-get.md' + - 'Apps Device Check Config Get': 'projects_apps-device-check-config-get.md' + - 'Apps Device Check Config Patch': 'projects_apps-device-check-config-patch.md' + - 'Apps Exchange App Attest Assertion': 'projects_apps-exchange-app-attest-assertion.md' + - 'Apps Exchange App Attest Attestation': 'projects_apps-exchange-app-attest-attestation.md' + - 'Apps Exchange Custom Token': 'projects_apps-exchange-custom-token.md' + - 'Apps Exchange Debug Token': 'projects_apps-exchange-debug-token.md' + - 'Apps Exchange Device Check Token': 'projects_apps-exchange-device-check-token.md' + - 'Apps Exchange Play Integrity Token': 'projects_apps-exchange-play-integrity-token.md' + - 'Apps Exchange Recaptcha Enterprise Token': 'projects_apps-exchange-recaptcha-enterprise-token.md' + - 'Apps Exchange Recaptcha Token': 'projects_apps-exchange-recaptcha-token.md' + - 'Apps Exchange Recaptcha V3 Token': 'projects_apps-exchange-recaptcha-v3-token.md' + - 'Apps Exchange Safety Net Token': 'projects_apps-exchange-safety-net-token.md' + - 'Apps Generate App Attest Challenge': 'projects_apps-generate-app-attest-challenge.md' + - 'Apps Generate Play Integrity Challenge': 'projects_apps-generate-play-integrity-challenge.md' + - 'Apps Play Integrity Config Batch Get': 'projects_apps-play-integrity-config-batch-get.md' + - 'Apps Play Integrity Config Get': 'projects_apps-play-integrity-config-get.md' + - 'Apps Play Integrity Config Patch': 'projects_apps-play-integrity-config-patch.md' + - 'Apps Recaptcha Config Batch Get': 'projects_apps-recaptcha-config-batch-get.md' + - 'Apps Recaptcha Config Get': 'projects_apps-recaptcha-config-get.md' + - 'Apps Recaptcha Config Patch': 'projects_apps-recaptcha-config-patch.md' + - 'Apps Recaptcha Enterprise Config Batch Get': 'projects_apps-recaptcha-enterprise-config-batch-get.md' + - 'Apps Recaptcha Enterprise Config Get': 'projects_apps-recaptcha-enterprise-config-get.md' + - 'Apps Recaptcha Enterprise Config Patch': 'projects_apps-recaptcha-enterprise-config-patch.md' + - 'Apps Recaptcha V3 Config Batch Get': 'projects_apps-recaptcha-v3-config-batch-get.md' + - 'Apps Recaptcha V3 Config Get': 'projects_apps-recaptcha-v3-config-get.md' + - 'Apps Recaptcha V3 Config Patch': 'projects_apps-recaptcha-v3-config-patch.md' + - 'Apps Safety Net Config Batch Get': 'projects_apps-safety-net-config-batch-get.md' + - 'Apps Safety Net Config Get': 'projects_apps-safety-net-config-get.md' + - 'Apps Safety Net Config Patch': 'projects_apps-safety-net-config-patch.md' + - 'Services Batch Update': 'projects_services-batch-update.md' + - 'Services Get': 'projects_services-get.md' + - 'Services List': 'projects_services-list.md' + - 'Services Patch': 'projects_services-patch.md' theme: readthedocs diff --git a/gen/firebaseappcheck1_beta-cli/src/client.rs b/gen/firebaseappcheck1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebaseappcheck1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebaseappcheck1_beta-cli/src/main.rs b/gen/firebaseappcheck1_beta-cli/src/main.rs index bb6d60b260..42d3ce4478 100644 --- a/gen/firebaseappcheck1_beta-cli/src/main.rs +++ b/gen/firebaseappcheck1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebaseappcheck1_beta::{api, Error, oauth2}; +use google_firebaseappcheck1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -252,7 +251,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -502,7 +501,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -593,7 +592,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -794,7 +793,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1272,6 +1271,91 @@ where } } + async fn _projects_apps_exchange_play_integrity_token(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "play-integrity-token" => Some(("playIntegrityToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["play-integrity-token"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleFirebaseAppcheckV1betaExchangePlayIntegrityTokenRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().apps_exchange_play_integrity_token(request, opt.value_of("app").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_apps_exchange_recaptcha_enterprise_token(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1442,6 +1526,91 @@ where } } + async fn _projects_apps_exchange_recaptcha_v3_token(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "recaptcha-v3-token" => Some(("recaptchaV3Token", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["recaptcha-v3-token"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleFirebaseAppcheckV1betaExchangeRecaptchaV3TokenRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().apps_exchange_recaptcha_v3_token(request, opt.value_of("app").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_apps_exchange_safety_net_token(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1611,6 +1780,288 @@ where } } + async fn _projects_apps_generate_play_integrity_challenge(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleFirebaseAppcheckV1betaGeneratePlayIntegrityChallengeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().apps_generate_play_integrity_challenge(request, opt.value_of("app").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_apps_play_integrity_config_batch_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().apps_play_integrity_config_batch_get(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "names" => { + call = call.add_names(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["names"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_apps_play_integrity_config_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().apps_play_integrity_config_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_apps_play_integrity_config_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "token-ttl" => Some(("tokenTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "token-ttl"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleFirebaseAppcheckV1betaPlayIntegrityConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().apps_play_integrity_config_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_apps_recaptcha_config_batch_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().apps_recaptcha_config_batch_get(opt.value_of("parent").unwrap_or("")); @@ -1762,7 +2213,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1961,7 +2412,207 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_apps_recaptcha_v3_config_batch_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().apps_recaptcha_v3_config_batch_get(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "names" => { + call = call.add_names(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["names"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_apps_recaptcha_v3_config_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().apps_recaptcha_v3_config_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_apps_recaptcha_v3_config_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "site-secret" => Some(("siteSecret", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "site-secret-set" => Some(("siteSecretSet", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "token-ttl" => Some(("tokenTtl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "site-secret", "site-secret-set", "token-ttl"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleFirebaseAppcheckV1betaRecaptchaV3Config = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().apps_recaptcha_v3_config_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2159,7 +2810,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2355,7 +3006,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2445,7 +3096,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2560,18 +3211,36 @@ where ("apps-exchange-device-check-token", Some(opt)) => { call_result = self._projects_apps_exchange_device_check_token(opt, dry_run, &mut err).await; }, + ("apps-exchange-play-integrity-token", Some(opt)) => { + call_result = self._projects_apps_exchange_play_integrity_token(opt, dry_run, &mut err).await; + }, ("apps-exchange-recaptcha-enterprise-token", Some(opt)) => { call_result = self._projects_apps_exchange_recaptcha_enterprise_token(opt, dry_run, &mut err).await; }, ("apps-exchange-recaptcha-token", Some(opt)) => { call_result = self._projects_apps_exchange_recaptcha_token(opt, dry_run, &mut err).await; }, + ("apps-exchange-recaptcha-v3-token", Some(opt)) => { + call_result = self._projects_apps_exchange_recaptcha_v3_token(opt, dry_run, &mut err).await; + }, ("apps-exchange-safety-net-token", Some(opt)) => { call_result = self._projects_apps_exchange_safety_net_token(opt, dry_run, &mut err).await; }, ("apps-generate-app-attest-challenge", Some(opt)) => { call_result = self._projects_apps_generate_app_attest_challenge(opt, dry_run, &mut err).await; }, + ("apps-generate-play-integrity-challenge", Some(opt)) => { + call_result = self._projects_apps_generate_play_integrity_challenge(opt, dry_run, &mut err).await; + }, + ("apps-play-integrity-config-batch-get", Some(opt)) => { + call_result = self._projects_apps_play_integrity_config_batch_get(opt, dry_run, &mut err).await; + }, + ("apps-play-integrity-config-get", Some(opt)) => { + call_result = self._projects_apps_play_integrity_config_get(opt, dry_run, &mut err).await; + }, + ("apps-play-integrity-config-patch", Some(opt)) => { + call_result = self._projects_apps_play_integrity_config_patch(opt, dry_run, &mut err).await; + }, ("apps-recaptcha-config-batch-get", Some(opt)) => { call_result = self._projects_apps_recaptcha_config_batch_get(opt, dry_run, &mut err).await; }, @@ -2590,6 +3259,15 @@ where ("apps-recaptcha-enterprise-config-patch", Some(opt)) => { call_result = self._projects_apps_recaptcha_enterprise_config_patch(opt, dry_run, &mut err).await; }, + ("apps-recaptcha-v3-config-batch-get", Some(opt)) => { + call_result = self._projects_apps_recaptcha_v3_config_batch_get(opt, dry_run, &mut err).await; + }, + ("apps-recaptcha-v3-config-get", Some(opt)) => { + call_result = self._projects_apps_recaptcha_v3_config_get(opt, dry_run, &mut err).await; + }, + ("apps-recaptcha-v3-config-patch", Some(opt)) => { + call_result = self._projects_apps_recaptcha_v3_config_patch(opt, dry_run, &mut err).await; + }, ("apps-safety-net-config-batch-get", Some(opt)) => { call_result = self._projects_apps_safety_net_config_batch_get(opt, dry_run, &mut err).await; }, @@ -2715,7 +3393,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'apps-app-attest-config-batch-get', 'apps-app-attest-config-get', 'apps-app-attest-config-patch', 'apps-debug-tokens-create', 'apps-debug-tokens-delete', 'apps-debug-tokens-get', 'apps-debug-tokens-list', 'apps-debug-tokens-patch', 'apps-device-check-config-batch-get', 'apps-device-check-config-get', 'apps-device-check-config-patch', 'apps-exchange-app-attest-assertion', 'apps-exchange-app-attest-attestation', 'apps-exchange-custom-token', 'apps-exchange-debug-token', 'apps-exchange-device-check-token', 'apps-exchange-recaptcha-enterprise-token', 'apps-exchange-recaptcha-token', 'apps-exchange-safety-net-token', 'apps-generate-app-attest-challenge', 'apps-recaptcha-config-batch-get', 'apps-recaptcha-config-get', 'apps-recaptcha-config-patch', 'apps-recaptcha-enterprise-config-batch-get', 'apps-recaptcha-enterprise-config-get', 'apps-recaptcha-enterprise-config-patch', 'apps-safety-net-config-batch-get', 'apps-safety-net-config-get', 'apps-safety-net-config-patch', 'services-batch-update', 'services-get', 'services-list' and 'services-patch'", vec![ + ("projects", "methods: 'apps-app-attest-config-batch-get', 'apps-app-attest-config-get', 'apps-app-attest-config-patch', 'apps-debug-tokens-create', 'apps-debug-tokens-delete', 'apps-debug-tokens-get', 'apps-debug-tokens-list', 'apps-debug-tokens-patch', 'apps-device-check-config-batch-get', 'apps-device-check-config-get', 'apps-device-check-config-patch', 'apps-exchange-app-attest-assertion', 'apps-exchange-app-attest-attestation', 'apps-exchange-custom-token', 'apps-exchange-debug-token', 'apps-exchange-device-check-token', 'apps-exchange-play-integrity-token', 'apps-exchange-recaptcha-enterprise-token', 'apps-exchange-recaptcha-token', 'apps-exchange-recaptcha-v3-token', 'apps-exchange-safety-net-token', 'apps-generate-app-attest-challenge', 'apps-generate-play-integrity-challenge', 'apps-play-integrity-config-batch-get', 'apps-play-integrity-config-get', 'apps-play-integrity-config-patch', 'apps-recaptcha-config-batch-get', 'apps-recaptcha-config-get', 'apps-recaptcha-config-patch', 'apps-recaptcha-enterprise-config-batch-get', 'apps-recaptcha-enterprise-config-get', 'apps-recaptcha-enterprise-config-patch', 'apps-recaptcha-v3-config-batch-get', 'apps-recaptcha-v3-config-get', 'apps-recaptcha-v3-config-patch', 'apps-safety-net-config-batch-get', 'apps-safety-net-config-get', 'apps-safety-net-config-patch', 'services-batch-update', 'services-get', 'services-list' and 'services-patch'", vec![ ("apps-app-attest-config-batch-get", Some(r##"Atomically gets the AppAttestConfigs for the specified list of apps."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-app-attest-config-batch-get", @@ -2983,7 +3661,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-app-attest-assertion", - Some(r##"Accepts an App Attest assertion and an artifact previously obtained from ExchangeAppAttestAttestation and verifies those with Apple. If valid, returns an App Check token encapsulated in an AttestationTokenResponse."##), + Some(r##"Accepts an App Attest assertion and an artifact previously obtained from ExchangeAppAttestAttestation and verifies those with Apple. If valid, returns an AppCheckToken."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-app-attest-assertion", vec![ (Some(r##"app"##), @@ -3011,7 +3689,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-app-attest-attestation", - Some(r##"Accepts an App Attest CBOR attestation and verifies it with Apple using your preconfigured team and bundle IDs. If valid, returns an attestation artifact that can later be exchanged for an AttestationTokenResponse using ExchangeAppAttestAssertion. For convenience and performance, this method's response object will also contain an App Check token encapsulated in an AttestationTokenResponse (if the verification is successful)."##), + Some(r##"Accepts an App Attest CBOR attestation and verifies it with Apple using your preconfigured team and bundle IDs. If valid, returns an attestation artifact that can later be exchanged for an AppCheckToken using ExchangeAppAttestAssertion. For convenience and performance, this method's response object will also contain an AppCheckToken (if the verification is successful)."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-app-attest-attestation", vec![ (Some(r##"app"##), @@ -3039,7 +3717,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-custom-token", - Some(r##"Validates a custom token signed using your project's Admin SDK service account credentials. If valid, returns an App Check token encapsulated in an AttestationTokenResponse."##), + Some(r##"Validates a custom token signed using your project's Admin SDK service account credentials. If valid, returns an AppCheckToken."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-custom-token", vec![ (Some(r##"app"##), @@ -3067,7 +3745,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-debug-token", - Some(r##"Validates a debug token secret that you have previously created using CreateDebugToken. If valid, returns an App Check token encapsulated in an AttestationTokenResponse. Note that a restrictive quota is enforced on this method to prevent accidental exposure of the app to abuse."##), + Some(r##"Validates a debug token secret that you have previously created using CreateDebugToken. If valid, returns an AppCheckToken. Note that a restrictive quota is enforced on this method to prevent accidental exposure of the app to abuse."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-debug-token", vec![ (Some(r##"app"##), @@ -3095,7 +3773,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-device-check-token", - Some(r##"Accepts a [`device_token`](https://developer.apple.com/documentation/devicecheck/dcdevice) issued by DeviceCheck, and attempts to validate it with Apple. If valid, returns an App Check token encapsulated in an AttestationTokenResponse."##), + Some(r##"Accepts a [`device_token`](https://developer.apple.com/documentation/devicecheck/dcdevice) issued by DeviceCheck, and attempts to validate it with Apple. If valid, returns an AppCheckToken."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-device-check-token", vec![ (Some(r##"app"##), @@ -3116,6 +3794,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-exchange-play-integrity-token", + Some(r##"Validates an [integrity verdict response token from Play Integrity](https://developer.android.com/google/play/integrity/verdict#decrypt-verify). If valid, returns an AppCheckToken."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-play-integrity-token", + vec![ + (Some(r##"app"##), + None, + Some(r##"Required. The relative resource name of the Android app, in the format: ``` projects/{project_number}/apps/{app_id} ``` If necessary, the `project_number` element can be replaced with the project ID of the Firebase project. Learn more about using project identifiers in Google's [AIP 2510](https://google.aip.dev/cloud/2510) standard."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3123,7 +3829,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-recaptcha-enterprise-token", - Some(r##"Validates a [reCAPTCHA Enterprise response token](https://cloud.google.com/recaptcha-enterprise/docs/create-assessment#retrieve_token). If valid, returns an App Check token encapsulated in an AttestationTokenResponse."##), + Some(r##"Validates a [reCAPTCHA Enterprise response token](https://cloud.google.com/recaptcha-enterprise/docs/create-assessment#retrieve_token). If valid, returns an App Check token AppCheckToken."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-recaptcha-enterprise-token", vec![ (Some(r##"app"##), @@ -3151,7 +3857,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-recaptcha-token", - Some(r##"Validates a [reCAPTCHA v3 response token](https://developers.google.com/recaptcha/docs/v3). If valid, returns an App Check token encapsulated in an AttestationTokenResponse."##), + Some(r##"Validates a [reCAPTCHA v3 response token](https://developers.google.com/recaptcha/docs/v3). If valid, returns an AppCheckToken."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-recaptcha-token", vec![ (Some(r##"app"##), @@ -3172,6 +3878,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-exchange-recaptcha-v3-token", + Some(r##"Validates a [reCAPTCHA v3 response token](https://developers.google.com/recaptcha/docs/v3). If valid, returns an AppCheckToken."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-recaptcha-v3-token", + vec![ + (Some(r##"app"##), + None, + Some(r##"Required. The relative resource name of the web app, in the format: ``` projects/{project_number}/apps/{app_id} ``` If necessary, the `project_number` element can be replaced with the project ID of the Firebase project. Learn more about using project identifiers in Google's [AIP 2510](https://google.aip.dev/cloud/2510) standard."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3179,7 +3913,7 @@ async fn main() { Some(false)), ]), ("apps-exchange-safety-net-token", - Some(r##"Validates a [SafetyNet token](https://developer.android.com/training/safetynet/attestation#request-attestation-step). If valid, returns an App Check token encapsulated in an AttestationTokenResponse."##), + Some(r##"Validates a [SafetyNet token](https://developer.android.com/training/safetynet/attestation#request-attestation-step). If valid, returns an AppCheckToken."##), "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-exchange-safety-net-token", vec![ (Some(r##"app"##), @@ -3228,6 +3962,106 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-generate-play-integrity-challenge", + Some(r##"Generates a challenge that protects the integrity of an immediately following integrity verdict request to the Play Integrity API. The next call to ExchangePlayIntegrityToken using the resulting integrity token will verify the presence and validity of the challenge. A challenge should not be reused for multiple calls."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-generate-play-integrity-challenge", + vec![ + (Some(r##"app"##), + None, + Some(r##"Required. The relative resource name of the app, in the format: ``` projects/{project_number}/apps/{app_id} ``` If necessary, the `project_number` element can be replaced with the project ID of the Firebase project. Learn more about using project identifiers in Google's [AIP 2510](https://google.aip.dev/cloud/2510) standard."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-play-integrity-config-batch-get", + Some(r##"Atomically gets the PlayIntegrityConfigs for the specified list of apps."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-play-integrity-config-batch-get", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent project name shared by all PlayIntegrityConfigs being retrieved, in the format ``` projects/{project_number} ``` The parent collection in the `name` field of any resource being retrieved must match this field, or the entire batch fails."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-play-integrity-config-get", + Some(r##"Gets the PlayIntegrityConfig for the specified app."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-play-integrity-config-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The relative resource name of the PlayIntegrityConfig, in the format: ``` projects/{project_number}/apps/{app_id}/playIntegrityConfig ```"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-play-integrity-config-patch", + Some(r##"Updates the PlayIntegrityConfig for the specified app. While this configuration is incomplete or invalid, the app will be unable to exchange Play Integrity tokens for App Check tokens."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-play-integrity-config-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The relative resource name of the Play Integrity configuration object, in the format: ``` projects/{project_number}/apps/{app_id}/playIntegrityConfig ```"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3372,6 +4206,78 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-recaptcha-v3-config-batch-get", + Some(r##"Atomically gets the RecaptchaV3Configs for the specified list of apps. For security reasons, the `site_secret` field is never populated in the response."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-recaptcha-v3-config-batch-get", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent project name shared by all RecaptchaV3Configs being retrieved, in the format ``` projects/{project_number} ``` The parent collection in the `name` field of any resource being retrieved must match this field, or the entire batch fails."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-recaptcha-v3-config-get", + Some(r##"Gets the RecaptchaV3Config for the specified app. For security reasons, the `site_secret` field is never populated in the response."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-recaptcha-v3-config-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The relative resource name of the RecaptchaV3Config, in the format: ``` projects/{project_number}/apps/{app_id}/recaptchaV3Config ```"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("apps-recaptcha-v3-config-patch", + Some(r##"Updates the RecaptchaV3Config for the specified app. While this configuration is incomplete or invalid, the app will be unable to exchange reCAPTCHA V3 tokens for App Check tokens. For security reasons, the `site_secret` field is never populated in the response."##), + "Details at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli/projects_apps-recaptcha-v3-config-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The relative resource name of the reCAPTCHA v3 configuration object, in the format: ``` projects/{project_number}/apps/{app_id}/recaptchaV3Config ```"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3556,7 +4462,7 @@ async fn main() { let mut app = App::new("firebaseappcheck1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230116") .about("Firebase App Check works alongside other Firebase services to help protect your backend resources from abuse, such as billing fraud or phishing.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebaseappcheck1_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/firebaseappcheck1_beta/Cargo.toml b/gen/firebaseappcheck1_beta/Cargo.toml index 20ba69f35d..50a922767e 100644 --- a/gen/firebaseappcheck1_beta/Cargo.toml +++ b/gen/firebaseappcheck1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebaseappcheck1_beta" -version = "5.0.2-beta-1+20230116" +version = "5.0.2+20230116" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebaseappcheck (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseappcheck1_beta" homepage = "https://firebase.google.com/docs/app-check" -documentation = "https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116" +documentation = "https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116" license = "MIT" keywords = ["firebaseappcheck", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebaseappcheck1_beta/README.md b/gen/firebaseappcheck1_beta/README.md index 1b82655105..14a400ed54 100644 --- a/gen/firebaseappcheck1_beta/README.md +++ b/gen/firebaseappcheck1_beta/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-firebaseappcheck1_beta` library allows access to all features of the *Google Firebaseappcheck* service. -This documentation was generated from *Firebaseappcheck* crate version *5.0.2-beta-1+20230116*, where *20230116* is the exact revision of the *firebaseappcheck:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebaseappcheck* crate version *5.0.2+20230116*, where *20230116* is the exact revision of the *firebaseappcheck:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebaseappcheck* *v1_beta* API can be found at the [official documentation site](https://firebase.google.com/docs/app-check). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/Firebaseappcheck) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/Firebaseappcheck) ... * jwks - * [*get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::JwkGetCall) + * [*get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::JwkGetCall) * projects - * [*apps app attest config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppAppAttestConfigBatchGetCall), [*apps app attest config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppAppAttestConfigGetCall), [*apps app attest config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppAppAttestConfigPatchCall), [*apps debug tokens create*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenCreateCall), [*apps debug tokens delete*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenDeleteCall), [*apps debug tokens get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenGetCall), [*apps debug tokens list*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenListCall), [*apps debug tokens patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenPatchCall), [*apps device check config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDeviceCheckConfigBatchGetCall), [*apps device check config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDeviceCheckConfigGetCall), [*apps device check config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppDeviceCheckConfigPatchCall), [*apps exchange app attest assertion*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeAppAttestAssertionCall), [*apps exchange app attest attestation*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeAppAttestAttestationCall), [*apps exchange custom token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeCustomTokenCall), [*apps exchange debug token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeDebugTokenCall), [*apps exchange device check token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeDeviceCheckTokenCall), [*apps exchange play integrity token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangePlayIntegrityTokenCall), [*apps exchange recaptcha enterprise token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeRecaptchaEnterpriseTokenCall), [*apps exchange recaptcha token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeRecaptchaTokenCall), [*apps exchange recaptcha v3 token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeRecaptchaV3TokenCall), [*apps exchange safety net token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeSafetyNetTokenCall), [*apps generate app attest challenge*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppGenerateAppAttestChallengeCall), [*apps generate play integrity challenge*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppGeneratePlayIntegrityChallengeCall), [*apps play integrity config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppPlayIntegrityConfigBatchGetCall), [*apps play integrity config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppPlayIntegrityConfigGetCall), [*apps play integrity config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppPlayIntegrityConfigPatchCall), [*apps recaptcha config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaConfigBatchGetCall), [*apps recaptcha config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaConfigGetCall), [*apps recaptcha config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaConfigPatchCall), [*apps recaptcha enterprise config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaEnterpriseConfigBatchGetCall), [*apps recaptcha enterprise config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaEnterpriseConfigGetCall), [*apps recaptcha enterprise config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaEnterpriseConfigPatchCall), [*apps recaptcha v3 config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaV3ConfigBatchGetCall), [*apps recaptcha v3 config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaV3ConfigGetCall), [*apps recaptcha v3 config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaV3ConfigPatchCall), [*apps safety net config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppSafetyNetConfigBatchGetCall), [*apps safety net config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppSafetyNetConfigGetCall), [*apps safety net config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectAppSafetyNetConfigPatchCall), [*services batch update*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectServiceBatchUpdateCall), [*services get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectServiceGetCall), [*services list*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectServiceListCall) and [*services patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/api::ProjectServicePatchCall) + * [*apps app attest config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppAppAttestConfigBatchGetCall), [*apps app attest config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppAppAttestConfigGetCall), [*apps app attest config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppAppAttestConfigPatchCall), [*apps debug tokens create*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenCreateCall), [*apps debug tokens delete*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenDeleteCall), [*apps debug tokens get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenGetCall), [*apps debug tokens list*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenListCall), [*apps debug tokens patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDebugTokenPatchCall), [*apps device check config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDeviceCheckConfigBatchGetCall), [*apps device check config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDeviceCheckConfigGetCall), [*apps device check config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppDeviceCheckConfigPatchCall), [*apps exchange app attest assertion*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeAppAttestAssertionCall), [*apps exchange app attest attestation*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeAppAttestAttestationCall), [*apps exchange custom token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeCustomTokenCall), [*apps exchange debug token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeDebugTokenCall), [*apps exchange device check token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeDeviceCheckTokenCall), [*apps exchange play integrity token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangePlayIntegrityTokenCall), [*apps exchange recaptcha enterprise token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeRecaptchaEnterpriseTokenCall), [*apps exchange recaptcha token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeRecaptchaTokenCall), [*apps exchange recaptcha v3 token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeRecaptchaV3TokenCall), [*apps exchange safety net token*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppExchangeSafetyNetTokenCall), [*apps generate app attest challenge*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppGenerateAppAttestChallengeCall), [*apps generate play integrity challenge*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppGeneratePlayIntegrityChallengeCall), [*apps play integrity config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppPlayIntegrityConfigBatchGetCall), [*apps play integrity config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppPlayIntegrityConfigGetCall), [*apps play integrity config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppPlayIntegrityConfigPatchCall), [*apps recaptcha config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaConfigBatchGetCall), [*apps recaptcha config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaConfigGetCall), [*apps recaptcha config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaConfigPatchCall), [*apps recaptcha enterprise config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaEnterpriseConfigBatchGetCall), [*apps recaptcha enterprise config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaEnterpriseConfigGetCall), [*apps recaptcha enterprise config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaEnterpriseConfigPatchCall), [*apps recaptcha v3 config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaV3ConfigBatchGetCall), [*apps recaptcha v3 config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaV3ConfigGetCall), [*apps recaptcha v3 config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppRecaptchaV3ConfigPatchCall), [*apps safety net config batch get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppSafetyNetConfigBatchGetCall), [*apps safety net config get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppSafetyNetConfigGetCall), [*apps safety net config patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectAppSafetyNetConfigPatchCall), [*services batch update*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectServiceBatchUpdateCall), [*services get*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectServiceGetCall), [*services list*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectServiceListCall) and [*services patch*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/api::ProjectServicePatchCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/Firebaseappcheck)** +* **[Hub](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/Firebaseappcheck)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Part)** + * **[Parts](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebaseappcheck1_beta/5.0.2-beta-1+20230116/google_firebaseappcheck1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebaseappcheck1_beta/5.0.2+20230116/google_firebaseappcheck1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebaseappcheck1_beta/src/api.rs b/gen/firebaseappcheck1_beta/src/api.rs index 5ca840b9da..727691257f 100644 --- a/gen/firebaseappcheck1_beta/src/api.rs +++ b/gen/firebaseappcheck1_beta/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Firebaseappcheck { Firebaseappcheck { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebaseappcheck.googleapis.com/".to_string(), _root_url: "https://firebaseappcheck.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> Firebaseappcheck { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebaseappcheck1_beta/src/client.rs b/gen/firebaseappcheck1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebaseappcheck1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebaseappcheck1_beta/src/lib.rs b/gen/firebaseappcheck1_beta/src/lib.rs index 74d29bb13e..75a672ff80 100644 --- a/gen/firebaseappcheck1_beta/src/lib.rs +++ b/gen/firebaseappcheck1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebaseappcheck* crate version *5.0.2-beta-1+20230116*, where *20230116* is the exact revision of the *firebaseappcheck:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebaseappcheck* crate version *5.0.2+20230116*, where *20230116* is the exact revision of the *firebaseappcheck:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebaseappcheck* *v1_beta* API can be found at the //! [official documentation site](https://firebase.google.com/docs/app-check). diff --git a/gen/firebasedatabase1_beta-cli/Cargo.toml b/gen/firebasedatabase1_beta-cli/Cargo.toml index 9acc111bd7..1e59bd32fc 100644 --- a/gen/firebasedatabase1_beta-cli/Cargo.toml +++ b/gen/firebasedatabase1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebasedatabase1_beta-cli" -version = "4.0.1+20220304" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Realtime Database (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasedatabase1_beta-cli" @@ -20,13 +20,13 @@ name = "firebasedatabase1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebasedatabase1_beta] path = "../firebasedatabase1_beta" -version = "4.0.1+20220304" +version = "5.0.2+20230123" + diff --git a/gen/firebasedatabase1_beta-cli/README.md b/gen/firebasedatabase1_beta-cli/README.md index 0c8f67c0d5..dc2a2ead91 100644 --- a/gen/firebasedatabase1_beta-cli/README.md +++ b/gen/firebasedatabase1_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase Realtime Database* API at revision *20220304*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase Realtime Database* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash firebasedatabase1-beta [options] @@ -36,6 +36,7 @@ firebasedatabase1-beta [options] locations-instances-get [-p ]... [-o ] locations-instances-list [-p ]... [-o ] locations-instances-reenable (-r )... [-p ]... [-o ] + locations-instances-undelete (-r )... [-p ]... [-o ] firebasedatabase1-beta --help Configuration: diff --git a/gen/firebasedatabase1_beta-cli/mkdocs.yml b/gen/firebasedatabase1_beta-cli/mkdocs.yml index f286a167cc..474154a1b5 100644 --- a/gen/firebasedatabase1_beta-cli/mkdocs.yml +++ b/gen/firebasedatabase1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase Realtime Database v4.0.1+20220304 +site_name: Firebase Realtime Database v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-firebasedatabase1_beta-cli site_description: A complete library to interact with Firebase Realtime Database (protocol v1beta) @@ -7,14 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebasedatabase docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-disable.md', 'Projects', 'Locations Instances Disable'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-reenable.md', 'Projects', 'Locations Instances Reenable'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Disable': 'projects_locations-instances-disable.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Reenable': 'projects_locations-instances-reenable.md' + - 'Locations Instances Undelete': 'projects_locations-instances-undelete.md' theme: readthedocs diff --git a/gen/firebasedatabase1_beta-cli/src/client.rs b/gen/firebasedatabase1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebasedatabase1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebasedatabase1_beta-cli/src/main.rs b/gen/firebasedatabase1_beta-cli/src/main.rs index 697093438b..8f76c6b94f 100644 --- a/gen/firebasedatabase1_beta-cli/src/main.rs +++ b/gen/firebasedatabase1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebasedatabase1_beta::{api, Error, oauth2}; +use google_firebasedatabase1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -95,7 +94,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "database-id" => { call = call.database_id(value.unwrap_or("")); @@ -341,11 +340,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -360,7 +362,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); v } )); } } @@ -478,6 +480,90 @@ where } } + async fn _projects_locations_instances_undelete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UndeleteDatabaseInstanceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_undelete(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -503,6 +589,9 @@ where ("locations-instances-reenable", Some(opt)) => { call_result = self._projects_locations_instances_reenable(opt, dry_run, &mut err).await; }, + ("locations-instances-undelete", Some(opt)) => { + call_result = self._projects_locations_instances_undelete(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -582,14 +671,14 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-instances-create', 'locations-instances-delete', 'locations-instances-disable', 'locations-instances-get', 'locations-instances-list' and 'locations-instances-reenable'", vec![ + ("projects", "methods: 'locations-instances-create', 'locations-instances-delete', 'locations-instances-disable', 'locations-instances-get', 'locations-instances-list', 'locations-instances-reenable' and 'locations-instances-undelete'", vec![ ("locations-instances-create", Some(r##"Requests that a new DatabaseInstance be created. The state of a successfully created DatabaseInstance is ACTIVE. Only available for projects on the Blaze plan. Projects can be upgraded using the Cloud Billing API https://cloud.google.com/billing/reference/rest/v1/projects/updateBillingInfo. Note that it might take a few minutes for billing enablement state to propagate to Firebase systems."##), "Details at http://byron.github.io/google-apis-rs/google_firebasedatabase1_beta_cli/projects_locations-instances-create", vec![ (Some(r##"parent"##), None, - Some(r##"The parent project for which to create a database instance, in the form: `projects/{project-number}/locations/{location-id}`."##), + Some(r##"Required. The parent project for which to create a database instance, in the form: `projects/{project-number}/locations/{location-id}`."##), Some(true), Some(false)), @@ -612,12 +701,12 @@ async fn main() { Some(false)), ]), ("locations-instances-delete", - Some(r##"Marks a DatabaseInstance to be deleted. The DatabaseInstance will be purged within 30 days. The default database cannot be deleted. IDs for deleted database instances may never be recovered or re-used. The Database may only be deleted if it is already in a DISABLED state."##), + Some(r##"Marks a DatabaseInstance to be deleted. The DatabaseInstance will be set to the DELETED state for 20 days, and will be purged within 30 days. The default database cannot be deleted. IDs for deleted database instances may never be recovered or re-used. The Database may only be deleted if it is already in a DISABLED state."##), "Details at http://byron.github.io/google-apis-rs/google_firebasedatabase1_beta_cli/projects_locations-instances-delete", vec![ (Some(r##"name"##), None, - Some(r##"The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`"##), + Some(r##"Required. The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`"##), Some(true), Some(false)), @@ -639,7 +728,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`"##), + Some(r##"Required. The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`"##), Some(true), Some(false)), @@ -667,7 +756,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`. `database-id` is a globally unique identifier across all parent collections. For convenience, this method allows you to supply `-` as a wildcard character in place of specific collections under `projects` and `locations`. The resulting wildcarding form of the method is: `projects/-/locations/-/instances/{database-id}`."##), + Some(r##"Required. The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`. `database-id` is a globally unique identifier across all parent collections. For convenience, this method allows you to supply `-` as a wildcard character in place of specific collections under `projects` and `locations`. The resulting wildcarding form of the method is: `projects/-/locations/-/instances/{database-id}`."##), Some(true), Some(false)), @@ -689,7 +778,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The parent project for which to list database instances, in the form: `projects/{project-number}/locations/{location-id}` To list across all locations, use a parent in the form: `projects/{project-number}/locations/-`"##), + Some(r##"Required. The parent project for which to list database instances, in the form: `projects/{project-number}/locations/{location-id}` To list across all locations, use a parent in the form: `projects/{project-number}/locations/-`"##), Some(true), Some(false)), @@ -711,7 +800,35 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`"##), + Some(r##"Required. The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-undelete", + Some(r##"Restores a DatabaseInstance that was previously marked to be deleted. After the delete method is used, DatabaseInstances are set to the DELETED state for 20 days, and will be purged within 30 days. Databases in the DELETED state can be undeleted without losing any data. This method may only be used on a DatabaseInstance in the DELETED state. Purged DatabaseInstances may not be recovered."##), + "Details at http://byron.github.io/google-apis-rs/google_firebasedatabase1_beta_cli/projects_locations-instances-undelete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The fully qualified resource name of the database instance, in the form: `projects/{project-number}/locations/{location-id}/instances/{database-id}`"##), Some(true), Some(false)), @@ -739,8 +856,8 @@ async fn main() { let mut app = App::new("firebasedatabase1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220304") - .about("The Firebase Realtime Database Management API enables programmatic provisioning and management of Realtime Database instances.") + .version("5.0.2+20230123") + .about("The Firebase Realtime Database API enables programmatic provisioning and management of Realtime Database instances.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebasedatabase1_beta_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/firebasedatabase1_beta/Cargo.toml b/gen/firebasedatabase1_beta/Cargo.toml index 0980c7fa7e..cf61423266 100644 --- a/gen/firebasedatabase1_beta/Cargo.toml +++ b/gen/firebasedatabase1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebasedatabase1_beta" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Realtime Database (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasedatabase1_beta" homepage = "https://firebase.google.com/docs/reference/rest/database/database-management/rest/" -documentation = "https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123" license = "MIT" keywords = ["firebasedatabase", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebasedatabase1_beta/README.md b/gen/firebasedatabase1_beta/README.md index 17f33156c2..58164a5ea7 100644 --- a/gen/firebasedatabase1_beta/README.md +++ b/gen/firebasedatabase1_beta/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-firebasedatabase1_beta` library allows access to all features of the *Google Firebase Realtime Database* service. -This documentation was generated from *Firebase Realtime Database* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasedatabase:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebase Realtime Database* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasedatabase:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebase Realtime Database* *v1_beta* API can be found at the [official documentation site](https://firebase.google.com/docs/reference/rest/database/database-management/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/FirebaseRealtimeDatabase) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/FirebaseRealtimeDatabase) ... * projects - * [*locations instances create*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceDeleteCall), [*locations instances disable*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceDisableCall), [*locations instances get*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceListCall), [*locations instances reenable*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceReenableCall) and [*locations instances undelete*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceUndeleteCall) + * [*locations instances create*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceDeleteCall), [*locations instances disable*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceDisableCall), [*locations instances get*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceListCall), [*locations instances reenable*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceReenableCall) and [*locations instances undelete*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/api::ProjectLocationInstanceUndeleteCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/FirebaseRealtimeDatabase)** +* **[Hub](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/FirebaseRealtimeDatabase)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Part)** + * **[Parts](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebasedatabase1_beta/5.0.2-beta-1+20230123/google_firebasedatabase1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebasedatabase1_beta/5.0.2+20230123/google_firebasedatabase1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebasedatabase1_beta/src/api.rs b/gen/firebasedatabase1_beta/src/api.rs index e666fbb4ab..d447204479 100644 --- a/gen/firebasedatabase1_beta/src/api.rs +++ b/gen/firebasedatabase1_beta/src/api.rs @@ -139,7 +139,7 @@ impl<'a, S> FirebaseRealtimeDatabase { FirebaseRealtimeDatabase { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebasedatabase.googleapis.com/".to_string(), _root_url: "https://firebasedatabase.googleapis.com/".to_string(), } @@ -150,7 +150,7 @@ impl<'a, S> FirebaseRealtimeDatabase { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebasedatabase1_beta/src/client.rs b/gen/firebasedatabase1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebasedatabase1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebasedatabase1_beta/src/lib.rs b/gen/firebasedatabase1_beta/src/lib.rs index 4aebb784a7..30f9c5f669 100644 --- a/gen/firebasedatabase1_beta/src/lib.rs +++ b/gen/firebasedatabase1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebase Realtime Database* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasedatabase:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebase Realtime Database* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasedatabase:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebase Realtime Database* *v1_beta* API can be found at the //! [official documentation site](https://firebase.google.com/docs/reference/rest/database/database-management/rest/). diff --git a/gen/firebasedynamiclinks1-cli/Cargo.toml b/gen/firebasedynamiclinks1-cli/Cargo.toml index 953dbe1d52..f237b6936b 100644 --- a/gen/firebasedynamiclinks1-cli/Cargo.toml +++ b/gen/firebasedynamiclinks1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebasedynamiclinks1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Dynamic Links (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasedynamiclinks1-cli" @@ -20,13 +20,13 @@ name = "firebasedynamiclinks1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebasedynamiclinks1] path = "../firebasedynamiclinks1" -version = "4.0.1+20220228" +version = "5.0.2+20230123" + diff --git a/gen/firebasedynamiclinks1-cli/README.md b/gen/firebasedynamiclinks1-cli/README.md index d6b98a90fd..d402b57eb3 100644 --- a/gen/firebasedynamiclinks1-cli/README.md +++ b/gen/firebasedynamiclinks1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase Dynamic Links* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase Dynamic Links* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash firebasedynamiclinks1 [options] diff --git a/gen/firebasedynamiclinks1-cli/mkdocs.yml b/gen/firebasedynamiclinks1-cli/mkdocs.yml index 1355ba0866..1bc8ce6373 100644 --- a/gen/firebasedynamiclinks1-cli/mkdocs.yml +++ b/gen/firebasedynamiclinks1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase Dynamic Links v4.0.1+20220228 +site_name: Firebase Dynamic Links v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-firebasedynamiclinks1-cli site_description: A complete library to interact with Firebase Dynamic Links (protocol v1) @@ -7,13 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebasedynamicl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['managed-short-links_create.md', 'Managed Short Links', 'Create'] -- ['methods_get-link-stats.md', 'Methods', 'Get Link Stats'] -- ['methods_install-attribution.md', 'Methods', 'Install Attribution'] -- ['methods_reopen-attribution.md', 'Methods', 'Reopen Attribution'] -- ['short-links_create.md', 'Short Links', 'Create'] +nav: +- Home: 'index.md' +- 'Managed Short Links': + - 'Create': 'managed-short-links_create.md' +- 'Methods': + - 'Get Link Stats': 'methods_get-link-stats.md' + - 'Install Attribution': 'methods_install-attribution.md' + - 'Reopen Attribution': 'methods_reopen-attribution.md' +- 'Short Links': + - 'Create': 'short-links_create.md' theme: readthedocs diff --git a/gen/firebasedynamiclinks1-cli/src/client.rs b/gen/firebasedynamiclinks1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebasedynamiclinks1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebasedynamiclinks1-cli/src/main.rs b/gen/firebasedynamiclinks1-cli/src/main.rs index 7f3f2bb8c7..dff7a3965e 100644 --- a/gen/firebasedynamiclinks1-cli/src/main.rs +++ b/gen/firebasedynamiclinks1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebasedynamiclinks1::{api, Error, oauth2}; +use google_firebasedynamiclinks1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -179,7 +178,7 @@ where call = call.sdk_version(value.unwrap_or("")); }, "duration-days" => { - call = call.duration_days(value.unwrap_or("")); + call = call.duration_days( value.map(|v| arg_from_str(v, err, "duration-days", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -770,7 +769,7 @@ async fn main() { let mut app = App::new("firebasedynamiclinks1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230123") .about("Programmatically creates and manages Firebase Dynamic Links.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebasedynamiclinks1_cli") .arg(Arg::with_name("url") diff --git a/gen/firebasedynamiclinks1/Cargo.toml b/gen/firebasedynamiclinks1/Cargo.toml index cdf2f6abd0..02bbbf5ab2 100644 --- a/gen/firebasedynamiclinks1/Cargo.toml +++ b/gen/firebasedynamiclinks1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebasedynamiclinks1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Dynamic Links (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasedynamiclinks1" homepage = "https://firebase.google.com/docs/dynamic-links/" -documentation = "https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123" license = "MIT" keywords = ["firebasedynamiclinks", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebasedynamiclinks1/README.md b/gen/firebasedynamiclinks1/README.md index d69fb3b777..20dfc0b0af 100644 --- a/gen/firebasedynamiclinks1/README.md +++ b/gen/firebasedynamiclinks1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-firebasedynamiclinks1` library allows access to all features of the *Google Firebase Dynamic Links* service. -This documentation was generated from *Firebase Dynamic Links* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasedynamiclinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebase Dynamic Links* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasedynamiclinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebase Dynamic Links* *v1* API can be found at the [official documentation site](https://firebase.google.com/docs/dynamic-links/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/FirebaseDynamicLinks) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/FirebaseDynamicLinks) ... -* [managed short links](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/api::ManagedShortLink) - * [*create*](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/api::ManagedShortLinkCreateCall) +* [managed short links](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/api::ManagedShortLink) + * [*create*](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/api::ManagedShortLinkCreateCall) * short links - * [*create*](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/api::ShortLinkCreateCall) + * [*create*](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/api::ShortLinkCreateCall) Other activities are ... -* [get link stats](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/api::MethodGetLinkStatCall) -* [install attribution](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/api::MethodInstallAttributionCall) -* [reopen attribution](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/api::MethodReopenAttributionCall) +* [get link stats](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/api::MethodGetLinkStatCall) +* [install attribution](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/api::MethodInstallAttributionCall) +* [reopen attribution](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/api::MethodReopenAttributionCall) @@ -30,17 +30,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/FirebaseDynamicLinks)** +* **[Hub](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/FirebaseDynamicLinks)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Part)** + * **[Parts](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::RequestValue) and -[decodable](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::RequestValue) and +[decodable](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebasedynamiclinks1/5.0.2-beta-1+20230123/google_firebasedynamiclinks1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebasedynamiclinks1/5.0.2+20230123/google_firebasedynamiclinks1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebasedynamiclinks1/src/api.rs b/gen/firebasedynamiclinks1/src/api.rs index 10aa6b11ff..083b6873d8 100644 --- a/gen/firebasedynamiclinks1/src/api.rs +++ b/gen/firebasedynamiclinks1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> FirebaseDynamicLinks { FirebaseDynamicLinks { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebasedynamiclinks.googleapis.com/".to_string(), _root_url: "https://firebasedynamiclinks.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> FirebaseDynamicLinks { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebasedynamiclinks1/src/client.rs b/gen/firebasedynamiclinks1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebasedynamiclinks1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebasedynamiclinks1/src/lib.rs b/gen/firebasedynamiclinks1/src/lib.rs index f7f4a58557..5a5a125633 100644 --- a/gen/firebasedynamiclinks1/src/lib.rs +++ b/gen/firebasedynamiclinks1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebase Dynamic Links* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasedynamiclinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebase Dynamic Links* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasedynamiclinks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebase Dynamic Links* *v1* API can be found at the //! [official documentation site](https://firebase.google.com/docs/dynamic-links/). diff --git a/gen/firebasehosting1-cli/Cargo.toml b/gen/firebasehosting1-cli/Cargo.toml index f7ab2e182d..39b645689e 100644 --- a/gen/firebasehosting1-cli/Cargo.toml +++ b/gen/firebasehosting1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebasehosting1-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Hosting (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasehosting1-cli" @@ -20,13 +20,13 @@ name = "firebasehosting1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebasehosting1] path = "../firebasehosting1" -version = "4.0.1+20220212" +version = "5.0.2+20230123" + diff --git a/gen/firebasehosting1-cli/README.md b/gen/firebasehosting1-cli/README.md index bc9ef7eee9..62c4928f03 100644 --- a/gen/firebasehosting1-cli/README.md +++ b/gen/firebasehosting1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase Hosting* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase Hosting* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash firebasehosting1 [options] diff --git a/gen/firebasehosting1-cli/mkdocs.yml b/gen/firebasehosting1-cli/mkdocs.yml index 129eaddb2c..30623057af 100644 --- a/gen/firebasehosting1-cli/mkdocs.yml +++ b/gen/firebasehosting1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase Hosting v4.0.1+20220212 +site_name: Firebase Hosting v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-firebasehosting1-cli site_description: A complete library to interact with Firebase Hosting (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebasehosting1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_list.md', 'Operations', 'List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'List': 'operations_list.md' theme: readthedocs diff --git a/gen/firebasehosting1-cli/src/client.rs b/gen/firebasehosting1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebasehosting1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebasehosting1-cli/src/main.rs b/gen/firebasehosting1-cli/src/main.rs index 54daa162d9..5055e73634 100644 --- a/gen/firebasehosting1-cli/src/main.rs +++ b/gen/firebasehosting1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebasehosting1::{api, Error, oauth2}; +use google_firebasehosting1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -191,7 +190,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -414,7 +413,7 @@ async fn main() { let mut app = App::new("firebasehosting1") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230123") .about("The Firebase Hosting REST API enables programmatic and customizable management and deployments to your Firebase-hosted sites. Use this REST API to create and manage channels and sites as well as to deploy new or updated hosting configurations and content files.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebasehosting1_cli") .arg(Arg::with_name("folder") diff --git a/gen/firebasehosting1/Cargo.toml b/gen/firebasehosting1/Cargo.toml index d26ad401b7..ffbb55a0bd 100644 --- a/gen/firebasehosting1/Cargo.toml +++ b/gen/firebasehosting1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebasehosting1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Hosting (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasehosting1" homepage = "https://firebase.google.com/docs/hosting/" -documentation = "https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-firebasehosting1/5.0.2+20230123" license = "MIT" keywords = ["firebasehosting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebasehosting1/README.md b/gen/firebasehosting1/README.md index c92f3fb21a..f4f5209a98 100644 --- a/gen/firebasehosting1/README.md +++ b/gen/firebasehosting1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-firebasehosting1` library allows access to all features of the *Google Firebase Hosting* service. -This documentation was generated from *Firebase Hosting* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebase Hosting* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebase Hosting* *v1* API can be found at the [official documentation site](https://firebase.google.com/docs/hosting/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/FirebaseHosting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/FirebaseHosting) ... -* [operations](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/api::Operation) - * [*cancel*](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/api::OperationCancelCall), [*delete*](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/api::OperationDeleteCall) and [*list*](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/api::OperationListCall) +* [operations](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/api::Operation) + * [*cancel*](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/api::OperationCancelCall), [*delete*](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/api::OperationDeleteCall) and [*list*](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/api::OperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/FirebaseHosting)** +* **[Hub](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/FirebaseHosting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Part)** + * **[Parts](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -123,17 +123,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -143,29 +143,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::RequestValue) and -[decodable](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::RequestValue) and +[decodable](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebasehosting1/5.0.2-beta-1+20230123/google_firebasehosting1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebasehosting1/5.0.2+20230123/google_firebasehosting1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebasehosting1/src/api.rs b/gen/firebasehosting1/src/api.rs index 9e53584cfc..6f3e8c86db 100644 --- a/gen/firebasehosting1/src/api.rs +++ b/gen/firebasehosting1/src/api.rs @@ -100,7 +100,7 @@ impl<'a, S> FirebaseHosting { FirebaseHosting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebasehosting.googleapis.com/".to_string(), _root_url: "https://firebasehosting.googleapis.com/".to_string(), } @@ -111,7 +111,7 @@ impl<'a, S> FirebaseHosting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebasehosting1/src/client.rs b/gen/firebasehosting1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebasehosting1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebasehosting1/src/lib.rs b/gen/firebasehosting1/src/lib.rs index 2a3129c7e6..0a33e523e5 100644 --- a/gen/firebasehosting1/src/lib.rs +++ b/gen/firebasehosting1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebase Hosting* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebase Hosting* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebase Hosting* *v1* API can be found at the //! [official documentation site](https://firebase.google.com/docs/hosting/). diff --git a/gen/firebasehosting1_beta1-cli/Cargo.toml b/gen/firebasehosting1_beta1-cli/Cargo.toml index 26c1736ea7..5a145e3509 100644 --- a/gen/firebasehosting1_beta1-cli/Cargo.toml +++ b/gen/firebasehosting1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebasehosting1_beta1-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Hosting (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasehosting1_beta1-cli" @@ -20,13 +20,13 @@ name = "firebasehosting1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebasehosting1_beta1] path = "../firebasehosting1_beta1" -version = "4.0.1+20220212" +version = "5.0.2+20230123" + diff --git a/gen/firebasehosting1_beta1-cli/README.md b/gen/firebasehosting1_beta1-cli/README.md index 53aa424312..bf6bde777a 100644 --- a/gen/firebasehosting1_beta1-cli/README.md +++ b/gen/firebasehosting1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase Hosting* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase Hosting* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash firebasehosting1-beta1 [options] diff --git a/gen/firebasehosting1_beta1-cli/mkdocs.yml b/gen/firebasehosting1_beta1-cli/mkdocs.yml index 99955d8065..0ad4d6fdd4 100644 --- a/gen/firebasehosting1_beta1-cli/mkdocs.yml +++ b/gen/firebasehosting1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase Hosting v4.0.1+20220212 +site_name: Firebase Hosting v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-firebasehosting1_beta1-cli site_description: A complete library to interact with Firebase Hosting (protocol v1beta1) @@ -7,60 +7,62 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebasehosting1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_sites-channels-create.md', 'Projects', 'Sites Channels Create'] -- ['projects_sites-channels-delete.md', 'Projects', 'Sites Channels Delete'] -- ['projects_sites-channels-get.md', 'Projects', 'Sites Channels Get'] -- ['projects_sites-channels-list.md', 'Projects', 'Sites Channels List'] -- ['projects_sites-channels-patch.md', 'Projects', 'Sites Channels Patch'] -- ['projects_sites-channels-releases-create.md', 'Projects', 'Sites Channels Releases Create'] -- ['projects_sites-channels-releases-list.md', 'Projects', 'Sites Channels Releases List'] -- ['projects_sites-create.md', 'Projects', 'Sites Create'] -- ['projects_sites-delete.md', 'Projects', 'Sites Delete'] -- ['projects_sites-domains-create.md', 'Projects', 'Sites Domains Create'] -- ['projects_sites-domains-delete.md', 'Projects', 'Sites Domains Delete'] -- ['projects_sites-domains-get.md', 'Projects', 'Sites Domains Get'] -- ['projects_sites-domains-list.md', 'Projects', 'Sites Domains List'] -- ['projects_sites-domains-update.md', 'Projects', 'Sites Domains Update'] -- ['projects_sites-get.md', 'Projects', 'Sites Get'] -- ['projects_sites-get-config.md', 'Projects', 'Sites Get Config'] -- ['projects_sites-list.md', 'Projects', 'Sites List'] -- ['projects_sites-patch.md', 'Projects', 'Sites Patch'] -- ['projects_sites-releases-create.md', 'Projects', 'Sites Releases Create'] -- ['projects_sites-releases-list.md', 'Projects', 'Sites Releases List'] -- ['projects_sites-update-config.md', 'Projects', 'Sites Update Config'] -- ['projects_sites-versions-clone.md', 'Projects', 'Sites Versions Clone'] -- ['projects_sites-versions-create.md', 'Projects', 'Sites Versions Create'] -- ['projects_sites-versions-delete.md', 'Projects', 'Sites Versions Delete'] -- ['projects_sites-versions-files-list.md', 'Projects', 'Sites Versions Files List'] -- ['projects_sites-versions-list.md', 'Projects', 'Sites Versions List'] -- ['projects_sites-versions-patch.md', 'Projects', 'Sites Versions Patch'] -- ['projects_sites-versions-populate-files.md', 'Projects', 'Sites Versions Populate Files'] -- ['sites_channels-create.md', 'Sites', 'Channels Create'] -- ['sites_channels-delete.md', 'Sites', 'Channels Delete'] -- ['sites_channels-get.md', 'Sites', 'Channels Get'] -- ['sites_channels-list.md', 'Sites', 'Channels List'] -- ['sites_channels-patch.md', 'Sites', 'Channels Patch'] -- ['sites_channels-releases-create.md', 'Sites', 'Channels Releases Create'] -- ['sites_channels-releases-list.md', 'Sites', 'Channels Releases List'] -- ['sites_domains-create.md', 'Sites', 'Domains Create'] -- ['sites_domains-delete.md', 'Sites', 'Domains Delete'] -- ['sites_domains-get.md', 'Sites', 'Domains Get'] -- ['sites_domains-list.md', 'Sites', 'Domains List'] -- ['sites_domains-update.md', 'Sites', 'Domains Update'] -- ['sites_get-config.md', 'Sites', 'Get Config'] -- ['sites_releases-create.md', 'Sites', 'Releases Create'] -- ['sites_releases-list.md', 'Sites', 'Releases List'] -- ['sites_update-config.md', 'Sites', 'Update Config'] -- ['sites_versions-clone.md', 'Sites', 'Versions Clone'] -- ['sites_versions-create.md', 'Sites', 'Versions Create'] -- ['sites_versions-delete.md', 'Sites', 'Versions Delete'] -- ['sites_versions-files-list.md', 'Sites', 'Versions Files List'] -- ['sites_versions-list.md', 'Sites', 'Versions List'] -- ['sites_versions-patch.md', 'Sites', 'Versions Patch'] -- ['sites_versions-populate-files.md', 'Sites', 'Versions Populate Files'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Operations Get': 'projects_operations-get.md' + - 'Sites Channels Create': 'projects_sites-channels-create.md' + - 'Sites Channels Delete': 'projects_sites-channels-delete.md' + - 'Sites Channels Get': 'projects_sites-channels-get.md' + - 'Sites Channels List': 'projects_sites-channels-list.md' + - 'Sites Channels Patch': 'projects_sites-channels-patch.md' + - 'Sites Channels Releases Create': 'projects_sites-channels-releases-create.md' + - 'Sites Channels Releases List': 'projects_sites-channels-releases-list.md' + - 'Sites Create': 'projects_sites-create.md' + - 'Sites Delete': 'projects_sites-delete.md' + - 'Sites Domains Create': 'projects_sites-domains-create.md' + - 'Sites Domains Delete': 'projects_sites-domains-delete.md' + - 'Sites Domains Get': 'projects_sites-domains-get.md' + - 'Sites Domains List': 'projects_sites-domains-list.md' + - 'Sites Domains Update': 'projects_sites-domains-update.md' + - 'Sites Get': 'projects_sites-get.md' + - 'Sites Get Config': 'projects_sites-get-config.md' + - 'Sites List': 'projects_sites-list.md' + - 'Sites Patch': 'projects_sites-patch.md' + - 'Sites Releases Create': 'projects_sites-releases-create.md' + - 'Sites Releases List': 'projects_sites-releases-list.md' + - 'Sites Update Config': 'projects_sites-update-config.md' + - 'Sites Versions Clone': 'projects_sites-versions-clone.md' + - 'Sites Versions Create': 'projects_sites-versions-create.md' + - 'Sites Versions Delete': 'projects_sites-versions-delete.md' + - 'Sites Versions Files List': 'projects_sites-versions-files-list.md' + - 'Sites Versions List': 'projects_sites-versions-list.md' + - 'Sites Versions Patch': 'projects_sites-versions-patch.md' + - 'Sites Versions Populate Files': 'projects_sites-versions-populate-files.md' +- 'Sites': + - 'Channels Create': 'sites_channels-create.md' + - 'Channels Delete': 'sites_channels-delete.md' + - 'Channels Get': 'sites_channels-get.md' + - 'Channels List': 'sites_channels-list.md' + - 'Channels Patch': 'sites_channels-patch.md' + - 'Channels Releases Create': 'sites_channels-releases-create.md' + - 'Channels Releases List': 'sites_channels-releases-list.md' + - 'Domains Create': 'sites_domains-create.md' + - 'Domains Delete': 'sites_domains-delete.md' + - 'Domains Get': 'sites_domains-get.md' + - 'Domains List': 'sites_domains-list.md' + - 'Domains Update': 'sites_domains-update.md' + - 'Get Config': 'sites_get-config.md' + - 'Releases Create': 'sites_releases-create.md' + - 'Releases List': 'sites_releases-list.md' + - 'Update Config': 'sites_update-config.md' + - 'Versions Clone': 'sites_versions-clone.md' + - 'Versions Create': 'sites_versions-create.md' + - 'Versions Delete': 'sites_versions-delete.md' + - 'Versions Files List': 'sites_versions-files-list.md' + - 'Versions List': 'sites_versions-list.md' + - 'Versions Patch': 'sites_versions-patch.md' + - 'Versions Populate Files': 'sites_versions-populate-files.md' theme: readthedocs diff --git a/gen/firebasehosting1_beta1-cli/src/client.rs b/gen/firebasehosting1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebasehosting1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebasehosting1_beta1-cli/src/main.rs b/gen/firebasehosting1_beta1-cli/src/main.rs index dd7cc46e46..bd50b3a631 100644 --- a/gen/firebasehosting1_beta1-cli/src/main.rs +++ b/gen/firebasehosting1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebasehosting1_beta1::{api, Error, oauth2}; +use google_firebasehosting1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -337,7 +336,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -457,7 +456,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -628,7 +627,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1036,7 +1035,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1299,7 +1298,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1392,7 +1391,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1563,7 +1562,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1653,7 +1652,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1850,7 +1849,7 @@ where call = call.version_id(value.unwrap_or("")); }, "size-bytes" => { - call = call.size_bytes(value.unwrap_or("")); + call = call.size_bytes( value.map(|v| arg_from_str(v, err, "size-bytes", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1964,7 +1963,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2023,7 +2022,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2132,7 +2131,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2500,7 +2499,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2620,7 +2619,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2791,7 +2790,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3054,7 +3053,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3377,7 +3376,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3467,7 +3466,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3664,7 +3663,7 @@ where call = call.version_id(value.unwrap_or("")); }, "size-bytes" => { - call = call.size_bytes(value.unwrap_or("")); + call = call.size_bytes( value.map(|v| arg_from_str(v, err, "size-bytes", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3778,7 +3777,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3837,7 +3836,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3946,7 +3945,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4769,7 +4768,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Output only. The fully-qualified resource name of the Hosting site, in the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/projects/api/reference/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its [`ProjectId`](https://firebase.google.com/docs/projects/api/reference/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id). Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510)."##), + Some(r##"Output only. The fully-qualified resource name of the Hosting site, in the format: projects/PROJECT_IDENTIFIER/sites/SITE_ID PROJECT_IDENTIFIER: the Firebase project's [`ProjectNumber`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_number) ***(recommended)*** or its [`ProjectId`](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects#FirebaseProject.FIELDS.project_id). Learn more about using project identifiers in Google's [AIP 2510 standard](https://google.aip.dev/cloud/2510)."##), Some(true), Some(false)), @@ -5628,7 +5627,7 @@ async fn main() { let mut app = App::new("firebasehosting1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230123") .about("The Firebase Hosting REST API enables programmatic and customizable management and deployments to your Firebase-hosted sites. Use this REST API to create and manage channels and sites as well as to deploy new or updated hosting configurations and content files.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebasehosting1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/firebasehosting1_beta1/Cargo.toml b/gen/firebasehosting1_beta1/Cargo.toml index fe162d4626..740f3757f8 100644 --- a/gen/firebasehosting1_beta1/Cargo.toml +++ b/gen/firebasehosting1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebasehosting1_beta1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Hosting (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasehosting1_beta1" homepage = "https://firebase.google.com/docs/hosting/" -documentation = "https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123" license = "MIT" keywords = ["firebasehosting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebasehosting1_beta1/README.md b/gen/firebasehosting1_beta1/README.md index 034a91ad40..bf2d24fa13 100644 --- a/gen/firebasehosting1_beta1/README.md +++ b/gen/firebasehosting1_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-firebasehosting1_beta1` library allows access to all features of the *Google Firebase Hosting* service. -This documentation was generated from *Firebase Hosting* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebase Hosting* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebase Hosting* *v1_beta1* API can be found at the [official documentation site](https://firebase.google.com/docs/hosting/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/FirebaseHosting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/FirebaseHosting) ... * projects - * [*operations get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectOperationGetCall), [*sites channels create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelCreateCall), [*sites channels delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelDeleteCall), [*sites channels get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelGetCall), [*sites channels list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelListCall), [*sites channels patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelPatchCall), [*sites channels releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelReleaseCreateCall), [*sites channels releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelReleaseListCall), [*sites create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteCreateCall), [*sites delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteDeleteCall), [*sites domains create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainCreateCall), [*sites domains delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainDeleteCall), [*sites domains get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainGetCall), [*sites domains list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainListCall), [*sites domains update*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainUpdateCall), [*sites get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteGetCall), [*sites get config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteGetConfigCall), [*sites list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteListCall), [*sites patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSitePatchCall), [*sites releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteReleaseCreateCall), [*sites releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteReleaseListCall), [*sites update config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteUpdateConfigCall), [*sites versions clone*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionCloneCall), [*sites versions create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionCreateCall), [*sites versions delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionDeleteCall), [*sites versions files list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionFileListCall), [*sites versions list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionListCall), [*sites versions patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionPatchCall) and [*sites versions populate files*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionPopulateFileCall) -* [sites](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::Site) - * [*channels create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteChannelCreateCall), [*channels delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteChannelDeleteCall), [*channels get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteChannelGetCall), [*channels list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteChannelListCall), [*channels patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteChannelPatchCall), [*channels releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteChannelReleaseCreateCall), [*channels releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteChannelReleaseListCall), [*domains create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteDomainCreateCall), [*domains delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteDomainDeleteCall), [*domains get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteDomainGetCall), [*domains list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteDomainListCall), [*domains update*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteDomainUpdateCall), [*get config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteGetConfigCall), [*releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteReleaseCreateCall), [*releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteReleaseListCall), [*update config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteUpdateConfigCall), [*versions clone*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteVersionCloneCall), [*versions create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteVersionCreateCall), [*versions delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteVersionDeleteCall), [*versions files list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteVersionFileListCall), [*versions list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteVersionListCall), [*versions patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteVersionPatchCall) and [*versions populate files*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/api::SiteVersionPopulateFileCall) + * [*operations get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectOperationGetCall), [*sites channels create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelCreateCall), [*sites channels delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelDeleteCall), [*sites channels get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelGetCall), [*sites channels list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelListCall), [*sites channels patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelPatchCall), [*sites channels releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelReleaseCreateCall), [*sites channels releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteChannelReleaseListCall), [*sites create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteCreateCall), [*sites delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteDeleteCall), [*sites domains create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainCreateCall), [*sites domains delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainDeleteCall), [*sites domains get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainGetCall), [*sites domains list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainListCall), [*sites domains update*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteDomainUpdateCall), [*sites get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteGetCall), [*sites get config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteGetConfigCall), [*sites list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteListCall), [*sites patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSitePatchCall), [*sites releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteReleaseCreateCall), [*sites releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteReleaseListCall), [*sites update config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteUpdateConfigCall), [*sites versions clone*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionCloneCall), [*sites versions create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionCreateCall), [*sites versions delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionDeleteCall), [*sites versions files list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionFileListCall), [*sites versions list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionListCall), [*sites versions patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionPatchCall) and [*sites versions populate files*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::ProjectSiteVersionPopulateFileCall) +* [sites](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::Site) + * [*channels create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteChannelCreateCall), [*channels delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteChannelDeleteCall), [*channels get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteChannelGetCall), [*channels list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteChannelListCall), [*channels patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteChannelPatchCall), [*channels releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteChannelReleaseCreateCall), [*channels releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteChannelReleaseListCall), [*domains create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteDomainCreateCall), [*domains delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteDomainDeleteCall), [*domains get*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteDomainGetCall), [*domains list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteDomainListCall), [*domains update*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteDomainUpdateCall), [*get config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteGetConfigCall), [*releases create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteReleaseCreateCall), [*releases list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteReleaseListCall), [*update config*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteUpdateConfigCall), [*versions clone*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteVersionCloneCall), [*versions create*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteVersionCreateCall), [*versions delete*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteVersionDeleteCall), [*versions files list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteVersionFileListCall), [*versions list*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteVersionListCall), [*versions patch*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteVersionPatchCall) and [*versions populate files*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/api::SiteVersionPopulateFileCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/FirebaseHosting)** +* **[Hub](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/FirebaseHosting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -148,17 +148,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -168,29 +168,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebasehosting1_beta1/5.0.2-beta-1+20230123/google_firebasehosting1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebasehosting1_beta1/5.0.2+20230123/google_firebasehosting1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebasehosting1_beta1/src/api.rs b/gen/firebasehosting1_beta1/src/api.rs index 07120f477c..d62ec6f033 100644 --- a/gen/firebasehosting1_beta1/src/api.rs +++ b/gen/firebasehosting1_beta1/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> FirebaseHosting { FirebaseHosting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebasehosting.googleapis.com/".to_string(), _root_url: "https://firebasehosting.googleapis.com/".to_string(), } @@ -148,7 +148,7 @@ impl<'a, S> FirebaseHosting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebasehosting1_beta1/src/client.rs b/gen/firebasehosting1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebasehosting1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebasehosting1_beta1/src/lib.rs b/gen/firebasehosting1_beta1/src/lib.rs index 3cb338b3aa..4548df267d 100644 --- a/gen/firebasehosting1_beta1/src/lib.rs +++ b/gen/firebasehosting1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebase Hosting* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebase Hosting* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebasehosting:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebase Hosting* *v1_beta1* API can be found at the //! [official documentation site](https://firebase.google.com/docs/hosting/). diff --git a/gen/firebaseml1-cli/Cargo.toml b/gen/firebaseml1-cli/Cargo.toml index 720cdb43b4..158c8d3211 100644 --- a/gen/firebaseml1-cli/Cargo.toml +++ b/gen/firebaseml1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebaseml1-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase ML (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseml1-cli" @@ -20,13 +20,13 @@ name = "firebaseml1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebaseml1] path = "../firebaseml1" -version = "4.0.1+20220302" +version = "5.0.2+20230123" + diff --git a/gen/firebaseml1-cli/README.md b/gen/firebaseml1-cli/README.md index 897239f134..4341628886 100644 --- a/gen/firebaseml1-cli/README.md +++ b/gen/firebaseml1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase ML* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase ML* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash firebaseml1 [options] diff --git a/gen/firebaseml1-cli/mkdocs.yml b/gen/firebaseml1-cli/mkdocs.yml index fa0deef8ba..a8e93213af 100644 --- a/gen/firebaseml1-cli/mkdocs.yml +++ b/gen/firebaseml1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase ML v4.0.1+20220302 +site_name: Firebase ML v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-firebaseml1-cli site_description: A complete library to interact with Firebase ML (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseml1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_list.md', 'Operations', 'List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'List': 'operations_list.md' theme: readthedocs diff --git a/gen/firebaseml1-cli/src/client.rs b/gen/firebaseml1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebaseml1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebaseml1-cli/src/main.rs b/gen/firebaseml1-cli/src/main.rs index 3f3580dc18..e3440f7090 100644 --- a/gen/firebaseml1-cli/src/main.rs +++ b/gen/firebaseml1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebaseml1::{api, Error, oauth2}; +use google_firebaseml1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -197,7 +196,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -423,7 +422,7 @@ async fn main() { let mut app = App::new("firebaseml1") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230123") .about("Access custom machine learning models hosted via Firebase ML.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebaseml1_cli") .arg(Arg::with_name("url") diff --git a/gen/firebaseml1/Cargo.toml b/gen/firebaseml1/Cargo.toml index 7ab8f47ef0..2d729d33ed 100644 --- a/gen/firebaseml1/Cargo.toml +++ b/gen/firebaseml1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebaseml1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase ML (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseml1" homepage = "https://firebase.google.com" -documentation = "https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-firebaseml1/5.0.2+20230123" license = "MIT" keywords = ["firebaseml", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebaseml1/README.md b/gen/firebaseml1/README.md index 5a60389f58..03942414f8 100644 --- a/gen/firebaseml1/README.md +++ b/gen/firebaseml1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-firebaseml1` library allows access to all features of the *Google Firebase ML* service. -This documentation was generated from *Firebase ML* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebaseml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebase ML* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebaseml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebase ML* *v1* API can be found at the [official documentation site](https://firebase.google.com). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/FirebaseML) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/FirebaseML) ... -* [operations](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/api::Operation) - * [*cancel*](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/api::OperationCancelCall), [*delete*](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/api::OperationDeleteCall) and [*list*](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/api::OperationListCall) +* [operations](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/api::Operation) + * [*cancel*](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/api::OperationCancelCall), [*delete*](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/api::OperationDeleteCall) and [*list*](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/api::OperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/FirebaseML)** +* **[Hub](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/FirebaseML)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Part)** + * **[Parts](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -123,17 +123,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -143,29 +143,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::RequestValue) and -[decodable](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::RequestValue) and +[decodable](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebaseml1/5.0.2-beta-1+20230123/google_firebaseml1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebaseml1/5.0.2+20230123/google_firebaseml1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebaseml1/src/api.rs b/gen/firebaseml1/src/api.rs index 976827a60d..bf24ec40ea 100644 --- a/gen/firebaseml1/src/api.rs +++ b/gen/firebaseml1/src/api.rs @@ -122,7 +122,7 @@ impl<'a, S> FirebaseML { FirebaseML { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebaseml.googleapis.com/".to_string(), _root_url: "https://firebaseml.googleapis.com/".to_string(), } @@ -133,7 +133,7 @@ impl<'a, S> FirebaseML { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebaseml1/src/client.rs b/gen/firebaseml1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebaseml1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebaseml1/src/lib.rs b/gen/firebaseml1/src/lib.rs index 3207449e51..55546f5088 100644 --- a/gen/firebaseml1/src/lib.rs +++ b/gen/firebaseml1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebase ML* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *firebaseml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebase ML* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *firebaseml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebase ML* *v1* API can be found at the //! [official documentation site](https://firebase.google.com). diff --git a/gen/firebaseremoteconfig1-cli/Cargo.toml b/gen/firebaseremoteconfig1-cli/Cargo.toml index 9ab292868d..b97d25717b 100644 --- a/gen/firebaseremoteconfig1-cli/Cargo.toml +++ b/gen/firebaseremoteconfig1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebaseremoteconfig1-cli" -version = "4.0.1+20171129" +version = "5.0.2+20171129" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Remote Config (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseremoteconfig1-cli" @@ -20,13 +20,13 @@ name = "firebaseremoteconfig1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebaseremoteconfig1] path = "../firebaseremoteconfig1" -version = "4.0.1+20171129" +version = "5.0.2+20171129" + diff --git a/gen/firebaseremoteconfig1-cli/README.md b/gen/firebaseremoteconfig1-cli/README.md index cdd8e3f9e5..bf92b75cb7 100644 --- a/gen/firebaseremoteconfig1-cli/README.md +++ b/gen/firebaseremoteconfig1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebase Remote Config* API at revision *20171129*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebase Remote Config* API at revision *20171129*. The CLI is at version *5.0.2*. ```bash firebaseremoteconfig1 [options] diff --git a/gen/firebaseremoteconfig1-cli/mkdocs.yml b/gen/firebaseremoteconfig1-cli/mkdocs.yml index 8e837dc21e..0fe5905a22 100644 --- a/gen/firebaseremoteconfig1-cli/mkdocs.yml +++ b/gen/firebaseremoteconfig1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebase Remote Config v4.0.1+20171129 +site_name: Firebase Remote Config v5.0.2+20171129 site_url: http://byron.github.io/google-apis-rs/google-firebaseremoteconfig1-cli site_description: A complete library to interact with Firebase Remote Config (protocol v1) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseremoteco docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_get-remote-config.md', 'Projects', 'Get Remote Config'] -- ['projects_update-remote-config.md', 'Projects', 'Update Remote Config'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Get Remote Config': 'projects_get-remote-config.md' + - 'Update Remote Config': 'projects_update-remote-config.md' theme: readthedocs diff --git a/gen/firebaseremoteconfig1-cli/src/client.rs b/gen/firebaseremoteconfig1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebaseremoteconfig1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebaseremoteconfig1-cli/src/main.rs b/gen/firebaseremoteconfig1-cli/src/main.rs index 6adf6333c5..7a8ed9fa66 100644 --- a/gen/firebaseremoteconfig1-cli/src/main.rs +++ b/gen/firebaseremoteconfig1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebaseremoteconfig1::{api, Error, oauth2}; +use google_firebaseremoteconfig1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -139,7 +138,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -356,7 +355,7 @@ async fn main() { let mut app = App::new("firebaseremoteconfig1") .author("Sebastian Thiel ") - .version("4.0.1+20171129") + .version("5.0.2+20171129") .about("Firebase Remote Config API allows the 3P clients to manage Remote Config conditions and parameters for Firebase applications.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebaseremoteconfig1_cli") .arg(Arg::with_name("folder") diff --git a/gen/firebaseremoteconfig1/Cargo.toml b/gen/firebaseremoteconfig1/Cargo.toml index b91a6584ce..ec35eac034 100644 --- a/gen/firebaseremoteconfig1/Cargo.toml +++ b/gen/firebaseremoteconfig1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebaseremoteconfig1" -version = "5.0.2-beta-1+20171129" +version = "5.0.2+20171129" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebase Remote Config (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebaseremoteconfig1" homepage = "https://firebase.google.com/docs/remote-config/" -documentation = "https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129" +documentation = "https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129" license = "MIT" keywords = ["firebaseremoteconfig", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebaseremoteconfig1/README.md b/gen/firebaseremoteconfig1/README.md index a0a98ebc57..91e912226e 100644 --- a/gen/firebaseremoteconfig1/README.md +++ b/gen/firebaseremoteconfig1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-firebaseremoteconfig1` library allows access to all features of the *Google Firebase Remote Config* service. -This documentation was generated from *Firebase Remote Config* crate version *5.0.2-beta-1+20171129*, where *20171129* is the exact revision of the *firebaseremoteconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebase Remote Config* crate version *5.0.2+20171129*, where *20171129* is the exact revision of the *firebaseremoteconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebase Remote Config* *v1* API can be found at the [official documentation site](https://firebase.google.com/docs/remote-config/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/FirebaseRemoteConfig) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/FirebaseRemoteConfig) ... * projects - * [*get remote config*](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/api::ProjectGetRemoteConfigCall) and [*update remote config*](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/api::ProjectUpdateRemoteConfigCall) + * [*get remote config*](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/api::ProjectGetRemoteConfigCall) and [*update remote config*](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/api::ProjectUpdateRemoteConfigCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/FirebaseRemoteConfig)** +* **[Hub](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/FirebaseRemoteConfig)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Part)** + * **[Parts](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::RequestValue) and -[decodable](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::RequestValue) and +[decodable](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebaseremoteconfig1/5.0.2-beta-1+20171129/google_firebaseremoteconfig1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebaseremoteconfig1/5.0.2+20171129/google_firebaseremoteconfig1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebaseremoteconfig1/src/api.rs b/gen/firebaseremoteconfig1/src/api.rs index 6a45356358..0e0288a081 100644 --- a/gen/firebaseremoteconfig1/src/api.rs +++ b/gen/firebaseremoteconfig1/src/api.rs @@ -104,7 +104,7 @@ impl<'a, S> FirebaseRemoteConfig { FirebaseRemoteConfig { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebaseremoteconfig.googleapis.com/".to_string(), _root_url: "https://firebaseremoteconfig.googleapis.com/".to_string(), } @@ -115,7 +115,7 @@ impl<'a, S> FirebaseRemoteConfig { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebaseremoteconfig1/src/client.rs b/gen/firebaseremoteconfig1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebaseremoteconfig1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebaseremoteconfig1/src/lib.rs b/gen/firebaseremoteconfig1/src/lib.rs index 5bc8726437..ec13dd9245 100644 --- a/gen/firebaseremoteconfig1/src/lib.rs +++ b/gen/firebaseremoteconfig1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebase Remote Config* crate version *5.0.2-beta-1+20171129*, where *20171129* is the exact revision of the *firebaseremoteconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebase Remote Config* crate version *5.0.2+20171129*, where *20171129* is the exact revision of the *firebaseremoteconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebase Remote Config* *v1* API can be found at the //! [official documentation site](https://firebase.google.com/docs/remote-config/). diff --git a/gen/firebasestorage1_beta-cli/Cargo.toml b/gen/firebasestorage1_beta-cli/Cargo.toml index 42692378ea..1dad894f31 100644 --- a/gen/firebasestorage1_beta-cli/Cargo.toml +++ b/gen/firebasestorage1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firebasestorage1_beta-cli" -version = "4.0.1+20220218" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebasestorage (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasestorage1_beta-cli" @@ -20,13 +20,13 @@ name = "firebasestorage1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firebasestorage1_beta] path = "../firebasestorage1_beta" -version = "4.0.1+20220218" +version = "5.0.2+20230106" + diff --git a/gen/firebasestorage1_beta-cli/README.md b/gen/firebasestorage1_beta-cli/README.md index b8f6d4d08c..432b866068 100644 --- a/gen/firebasestorage1_beta-cli/README.md +++ b/gen/firebasestorage1_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firebasestorage* API at revision *20220218*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firebasestorage* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash firebasestorage1-beta [options] diff --git a/gen/firebasestorage1_beta-cli/mkdocs.yml b/gen/firebasestorage1_beta-cli/mkdocs.yml index c42710d50e..898c5cbc1d 100644 --- a/gen/firebasestorage1_beta-cli/mkdocs.yml +++ b/gen/firebasestorage1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firebasestorage v4.0.1+20220218 +site_name: Firebasestorage v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-firebasestorage1_beta-cli site_description: A complete library to interact with Firebasestorage (protocol v1beta) @@ -7,12 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firebasestorage1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_buckets-add-firebase.md', 'Projects', 'Buckets Add Firebase'] -- ['projects_buckets-get.md', 'Projects', 'Buckets Get'] -- ['projects_buckets-list.md', 'Projects', 'Buckets List'] -- ['projects_buckets-remove-firebase.md', 'Projects', 'Buckets Remove Firebase'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Buckets Add Firebase': 'projects_buckets-add-firebase.md' + - 'Buckets Get': 'projects_buckets-get.md' + - 'Buckets List': 'projects_buckets-list.md' + - 'Buckets Remove Firebase': 'projects_buckets-remove-firebase.md' theme: readthedocs diff --git a/gen/firebasestorage1_beta-cli/src/client.rs b/gen/firebasestorage1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firebasestorage1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firebasestorage1_beta-cli/src/main.rs b/gen/firebasestorage1_beta-cli/src/main.rs index f41808ef8c..6ec2d35da8 100644 --- a/gen/firebasestorage1_beta-cli/src/main.rs +++ b/gen/firebasestorage1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firebasestorage1_beta::{api, Error, oauth2}; +use google_firebasestorage1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -197,7 +196,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -535,7 +534,7 @@ async fn main() { let mut app = App::new("firebasestorage1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220218") + .version("5.0.2+20230106") .about("The Cloud Storage for Firebase API enables programmatic management of Cloud Storage buckets for use in Firebase projects") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firebasestorage1_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/firebasestorage1_beta/Cargo.toml b/gen/firebasestorage1_beta/Cargo.toml index 917dc791d9..8571588bd2 100644 --- a/gen/firebasestorage1_beta/Cargo.toml +++ b/gen/firebasestorage1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firebasestorage1_beta" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firebasestorage (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firebasestorage1_beta" homepage = "https://firebase.google.com/docs/storage" -documentation = "https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106" license = "MIT" keywords = ["firebasestorage", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firebasestorage1_beta/README.md b/gen/firebasestorage1_beta/README.md index 1b0975a64f..6be126e120 100644 --- a/gen/firebasestorage1_beta/README.md +++ b/gen/firebasestorage1_beta/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-firebasestorage1_beta` library allows access to all features of the *Google Firebasestorage* service. -This documentation was generated from *Firebasestorage* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *firebasestorage:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firebasestorage* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *firebasestorage:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firebasestorage* *v1_beta* API can be found at the [official documentation site](https://firebase.google.com/docs/storage). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/Firebasestorage) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/Firebasestorage) ... * projects - * [*buckets add firebase*](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/api::ProjectBucketAddFirebaseCall), [*buckets get*](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/api::ProjectBucketGetCall), [*buckets list*](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/api::ProjectBucketListCall) and [*buckets remove firebase*](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/api::ProjectBucketRemoveFirebaseCall) + * [*buckets add firebase*](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/api::ProjectBucketAddFirebaseCall), [*buckets get*](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/api::ProjectBucketGetCall), [*buckets list*](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/api::ProjectBucketListCall) and [*buckets remove firebase*](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/api::ProjectBucketRemoveFirebaseCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/Firebasestorage)** +* **[Hub](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/Firebasestorage)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Part)** + * **[Parts](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firebasestorage1_beta/5.0.2-beta-1+20230106/google_firebasestorage1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-firebasestorage1_beta/5.0.2+20230106/google_firebasestorage1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firebasestorage1_beta/src/api.rs b/gen/firebasestorage1_beta/src/api.rs index 17d33620d6..d28f42f655 100644 --- a/gen/firebasestorage1_beta/src/api.rs +++ b/gen/firebasestorage1_beta/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Firebasestorage { Firebasestorage { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firebasestorage.googleapis.com/".to_string(), _root_url: "https://firebasestorage.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Firebasestorage { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firebasestorage1_beta/src/client.rs b/gen/firebasestorage1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firebasestorage1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firebasestorage1_beta/src/lib.rs b/gen/firebasestorage1_beta/src/lib.rs index ea47278d69..bcd797af57 100644 --- a/gen/firebasestorage1_beta/src/lib.rs +++ b/gen/firebasestorage1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firebasestorage* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *firebasestorage:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firebasestorage* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *firebasestorage:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firebasestorage* *v1_beta* API can be found at the //! [official documentation site](https://firebase.google.com/docs/storage). diff --git a/gen/firestore1-cli/Cargo.toml b/gen/firestore1-cli/Cargo.toml index cc75071969..830996f59d 100644 --- a/gen/firestore1-cli/Cargo.toml +++ b/gen/firestore1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firestore1-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firestore (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firestore1-cli" @@ -20,13 +20,13 @@ name = "firestore1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firestore1] path = "../firestore1" -version = "4.0.1+20220221" +version = "5.0.2+20230118" + diff --git a/gen/firestore1-cli/README.md b/gen/firestore1-cli/README.md index 3634e2de67..7140390379 100644 --- a/gen/firestore1-cli/README.md +++ b/gen/firestore1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firestore* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firestore* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash firestore1 [options] @@ -37,6 +37,8 @@ firestore1 [options] databases-collection-groups-indexes-delete [-p ]... [-o ] databases-collection-groups-indexes-get [-p ]... [-o ] databases-collection-groups-indexes-list [-p ]... [-o ] + databases-create (-r )... [-p ]... [-o ] + databases-delete [-p ]... [-o ] databases-documents-batch-get (-r )... [-p ]... [-o ] databases-documents-batch-write (-r )... [-p ]... [-o ] databases-documents-begin-transaction (-r )... [-p ]... [-o ] @@ -46,10 +48,12 @@ firestore1 [options] databases-documents-get [-p ]... [-o ] databases-documents-list [-p ]... [-o ] databases-documents-list-collection-ids (-r )... [-p ]... [-o ] + databases-documents-list-documents [-p ]... [-o ] databases-documents-listen (-r )... [-p ]... [-o ] databases-documents-partition-query (-r )... [-p ]... [-o ] databases-documents-patch (-r )... [-p ]... [-o ] databases-documents-rollback (-r )... [-p ]... [-o ] + databases-documents-run-aggregation-query (-r )... [-p ]... [-o ] databases-documents-run-query (-r )... [-p ]... [-o ] databases-documents-write (-r )... [-p ]... [-o ] databases-export-documents (-r )... [-p ]... [-o ] diff --git a/gen/firestore1-cli/mkdocs.yml b/gen/firestore1-cli/mkdocs.yml index 96195a07ec..ab80c3b0f2 100644 --- a/gen/firestore1-cli/mkdocs.yml +++ b/gen/firestore1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firestore v4.0.1+20220221 +site_name: Firestore v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-firestore1-cli site_description: A complete library to interact with Firestore (protocol v1) @@ -7,41 +7,46 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firestore1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_databases-collection-groups-fields-get.md', 'Projects', 'Databases Collection Groups Fields Get'] -- ['projects_databases-collection-groups-fields-list.md', 'Projects', 'Databases Collection Groups Fields List'] -- ['projects_databases-collection-groups-fields-patch.md', 'Projects', 'Databases Collection Groups Fields Patch'] -- ['projects_databases-collection-groups-indexes-create.md', 'Projects', 'Databases Collection Groups Indexes Create'] -- ['projects_databases-collection-groups-indexes-delete.md', 'Projects', 'Databases Collection Groups Indexes Delete'] -- ['projects_databases-collection-groups-indexes-get.md', 'Projects', 'Databases Collection Groups Indexes Get'] -- ['projects_databases-collection-groups-indexes-list.md', 'Projects', 'Databases Collection Groups Indexes List'] -- ['projects_databases-documents-batch-get.md', 'Projects', 'Databases Documents Batch Get'] -- ['projects_databases-documents-batch-write.md', 'Projects', 'Databases Documents Batch Write'] -- ['projects_databases-documents-begin-transaction.md', 'Projects', 'Databases Documents Begin Transaction'] -- ['projects_databases-documents-commit.md', 'Projects', 'Databases Documents Commit'] -- ['projects_databases-documents-create-document.md', 'Projects', 'Databases Documents Create Document'] -- ['projects_databases-documents-delete.md', 'Projects', 'Databases Documents Delete'] -- ['projects_databases-documents-get.md', 'Projects', 'Databases Documents Get'] -- ['projects_databases-documents-list.md', 'Projects', 'Databases Documents List'] -- ['projects_databases-documents-list-collection-ids.md', 'Projects', 'Databases Documents List Collection Ids'] -- ['projects_databases-documents-listen.md', 'Projects', 'Databases Documents Listen'] -- ['projects_databases-documents-partition-query.md', 'Projects', 'Databases Documents Partition Query'] -- ['projects_databases-documents-patch.md', 'Projects', 'Databases Documents Patch'] -- ['projects_databases-documents-rollback.md', 'Projects', 'Databases Documents Rollback'] -- ['projects_databases-documents-run-query.md', 'Projects', 'Databases Documents Run Query'] -- ['projects_databases-documents-write.md', 'Projects', 'Databases Documents Write'] -- ['projects_databases-export-documents.md', 'Projects', 'Databases Export Documents'] -- ['projects_databases-get.md', 'Projects', 'Databases Get'] -- ['projects_databases-import-documents.md', 'Projects', 'Databases Import Documents'] -- ['projects_databases-list.md', 'Projects', 'Databases List'] -- ['projects_databases-operations-cancel.md', 'Projects', 'Databases Operations Cancel'] -- ['projects_databases-operations-delete.md', 'Projects', 'Databases Operations Delete'] -- ['projects_databases-operations-get.md', 'Projects', 'Databases Operations Get'] -- ['projects_databases-operations-list.md', 'Projects', 'Databases Operations List'] -- ['projects_databases-patch.md', 'Projects', 'Databases Patch'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Databases Collection Groups Fields Get': 'projects_databases-collection-groups-fields-get.md' + - 'Databases Collection Groups Fields List': 'projects_databases-collection-groups-fields-list.md' + - 'Databases Collection Groups Fields Patch': 'projects_databases-collection-groups-fields-patch.md' + - 'Databases Collection Groups Indexes Create': 'projects_databases-collection-groups-indexes-create.md' + - 'Databases Collection Groups Indexes Delete': 'projects_databases-collection-groups-indexes-delete.md' + - 'Databases Collection Groups Indexes Get': 'projects_databases-collection-groups-indexes-get.md' + - 'Databases Collection Groups Indexes List': 'projects_databases-collection-groups-indexes-list.md' + - 'Databases Create': 'projects_databases-create.md' + - 'Databases Delete': 'projects_databases-delete.md' + - 'Databases Documents Batch Get': 'projects_databases-documents-batch-get.md' + - 'Databases Documents Batch Write': 'projects_databases-documents-batch-write.md' + - 'Databases Documents Begin Transaction': 'projects_databases-documents-begin-transaction.md' + - 'Databases Documents Commit': 'projects_databases-documents-commit.md' + - 'Databases Documents Create Document': 'projects_databases-documents-create-document.md' + - 'Databases Documents Delete': 'projects_databases-documents-delete.md' + - 'Databases Documents Get': 'projects_databases-documents-get.md' + - 'Databases Documents List': 'projects_databases-documents-list.md' + - 'Databases Documents List Collection Ids': 'projects_databases-documents-list-collection-ids.md' + - 'Databases Documents List Documents': 'projects_databases-documents-list-documents.md' + - 'Databases Documents Listen': 'projects_databases-documents-listen.md' + - 'Databases Documents Partition Query': 'projects_databases-documents-partition-query.md' + - 'Databases Documents Patch': 'projects_databases-documents-patch.md' + - 'Databases Documents Rollback': 'projects_databases-documents-rollback.md' + - 'Databases Documents Run Aggregation Query': 'projects_databases-documents-run-aggregation-query.md' + - 'Databases Documents Run Query': 'projects_databases-documents-run-query.md' + - 'Databases Documents Write': 'projects_databases-documents-write.md' + - 'Databases Export Documents': 'projects_databases-export-documents.md' + - 'Databases Get': 'projects_databases-get.md' + - 'Databases Import Documents': 'projects_databases-import-documents.md' + - 'Databases List': 'projects_databases-list.md' + - 'Databases Operations Cancel': 'projects_databases-operations-cancel.md' + - 'Databases Operations Delete': 'projects_databases-operations-delete.md' + - 'Databases Operations Get': 'projects_databases-operations-get.md' + - 'Databases Operations List': 'projects_databases-operations-list.md' + - 'Databases Patch': 'projects_databases-patch.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/firestore1-cli/src/client.rs b/gen/firestore1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firestore1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firestore1-cli/src/main.rs b/gen/firestore1-cli/src/main.rs index 364a7781d1..5d9eceea06 100644 --- a/gen/firestore1-cli/src/main.rs +++ b/gen/firestore1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firestore1::{api, Error, oauth2}; +use google_firestore1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -192,8 +191,9 @@ where "index-config.reverting" => Some(("indexConfig.reverting", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "index-config.uses-ancestor-config" => Some(("indexConfig.usesAncestorConfig", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ttl-config.state" => Some(("ttlConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ancestor-field", "index-config", "name", "reverting", "uses-ancestor-config"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ancestor-field", "index-config", "name", "reverting", "state", "ttl-config", "uses-ancestor-config"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -208,7 +208,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -280,11 +280,12 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "api-scope" => Some(("apiScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-scope" => Some(("queryScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["name", "query-scope", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["api-scope", "name", "query-scope", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -458,7 +459,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -510,6 +511,166 @@ where } } + async fn _projects_databases_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "app-engine-integration-mode" => Some(("appEngineIntegrationMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "concurrency-mode" => Some(("concurrencyMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "key-prefix" => Some(("keyPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "location-id" => Some(("locationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-integration-mode", "concurrency-mode", "etag", "key-prefix", "location-id", "name", "type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleFirestoreAdminV1Database = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().databases_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "database-id" => { + call = call.database_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["database-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_databases_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().databases_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "free-id" => { + call = call.free_id( value.map(|v| arg_from_str(v, err, "free-id", "boolean")).unwrap_or(false)); + }, + "etag" => { + call = call.etag(value.unwrap_or("")); + }, + "allow-missing" => { + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-missing", "etag", "free-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_databases_documents_batch_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -957,10 +1118,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "current-document-update-time" => { - call = call.current_document_update_time(value.unwrap_or("")); + call = call.current_document_update_time( value.map(|v| arg_from_str(v, err, "current-document-update-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "current-document-exists" => { - call = call.current_document_exists(arg_from_str(value.unwrap_or("false"), err, "current-document-exists", "boolean")); + call = call.current_document_exists( value.map(|v| arg_from_str(v, err, "current-document-exists", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1016,10 +1177,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "transaction" => { - call = call.transaction(value.unwrap_or("")); + call = call.transaction( value.map(|v| arg_from_str(v, err, "transaction", "byte")).unwrap_or(b"hello world")); }, "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "mask-field-paths" => { call = call.add_mask_field_paths(value.unwrap_or("")); @@ -1078,19 +1239,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "transaction" => { - call = call.transaction(value.unwrap_or("")); + call = call.transaction( value.map(|v| arg_from_str(v, err, "transaction", "byte")).unwrap_or(b"hello world")); }, "show-missing" => { - call = call.show_missing(arg_from_str(value.unwrap_or("false"), err, "show-missing", "boolean")); + call = call.show_missing( value.map(|v| arg_from_str(v, err, "show-missing", "boolean")).unwrap_or(false)); }, "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1170,8 +1331,9 @@ where match &temp_cursor.to_string()[..] { "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-time" => Some(("readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["page-size", "page-token"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["page-size", "page-token", "read-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1231,6 +1393,80 @@ where } } + async fn _projects_databases_documents_list_documents(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().databases_documents_list_documents(opt.value_of("parent").unwrap_or(""), opt.value_of("collection-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "transaction" => { + call = call.transaction( value.map(|v| arg_from_str(v, err, "transaction", "byte")).unwrap_or(b"hello world")); + }, + "show-missing" => { + call = call.show_missing( value.map(|v| arg_from_str(v, err, "show-missing", "boolean")).unwrap_or(false)); + }, + "read-time" => { + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "mask-field-paths" => { + call = call.add_mask_field_paths(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["mask-field-paths", "order-by", "page-size", "page-token", "read-time", "show-missing", "transaction"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_databases_documents_listen(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1368,6 +1604,7 @@ where "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "partition-count" => Some(("partitionCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "read-time" => Some(("readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "structured-query.end-at.before" => Some(("structuredQuery.endAt.before", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "structured-query.limit" => Some(("structuredQuery.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "structured-query.offset" => Some(("structuredQuery.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -1388,7 +1625,7 @@ where "structured-query.where.unary-filter.field.field-path" => Some(("structuredQuery.where.unaryFilter.field.fieldPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "structured-query.where.unary-filter.op" => Some(("structuredQuery.where.unaryFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["before", "boolean-value", "bytes-value", "composite-filter", "double-value", "end-at", "field", "field-filter", "field-path", "geo-point-value", "integer-value", "latitude", "limit", "longitude", "null-value", "offset", "op", "page-size", "page-token", "partition-count", "reference-value", "start-at", "string-value", "structured-query", "timestamp-value", "unary-filter", "value", "where"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["before", "boolean-value", "bytes-value", "composite-filter", "double-value", "end-at", "field", "field-filter", "field-path", "geo-point-value", "integer-value", "latitude", "limit", "longitude", "null-value", "offset", "op", "page-size", "page-token", "partition-count", "read-time", "reference-value", "start-at", "string-value", "structured-query", "timestamp-value", "unary-filter", "value", "where"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1496,10 +1733,10 @@ where call = call.add_mask_field_paths(value.unwrap_or("")); }, "current-document-update-time" => { - call = call.current_document_update_time(value.unwrap_or("")); + call = call.current_document_update_time( value.map(|v| arg_from_str(v, err, "current-document-update-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "current-document-exists" => { - call = call.current_document_exists(arg_from_str(value.unwrap_or("false"), err, "current-document-exists", "boolean")); + call = call.current_document_exists( value.map(|v| arg_from_str(v, err, "current-document-exists", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1633,6 +1870,113 @@ where } } + async fn _projects_databases_documents_run_aggregation_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "new-transaction.read-only.read-time" => Some(("newTransaction.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "new-transaction.read-write.retry-transaction" => Some(("newTransaction.readWrite.retryTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-time" => Some(("readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.end-at.before" => Some(("structuredAggregationQuery.structuredQuery.endAt.before", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.limit" => Some(("structuredAggregationQuery.structuredQuery.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.offset" => Some(("structuredAggregationQuery.structuredQuery.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.start-at.before" => Some(("structuredAggregationQuery.structuredQuery.startAt.before", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.composite-filter.op" => Some(("structuredAggregationQuery.structuredQuery.where.compositeFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.field.field-path" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.field.fieldPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.op" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.boolean-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.booleanValue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.bytes-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.bytesValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.double-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.doubleValue", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.geo-point-value.latitude" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.geoPointValue.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.geo-point-value.longitude" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.geoPointValue.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.integer-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.integerValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.null-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.nullValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.reference-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.referenceValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.string-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.stringValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.timestamp-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.timestampValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.unary-filter.field.field-path" => Some(("structuredAggregationQuery.structuredQuery.where.unaryFilter.field.fieldPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.unary-filter.op" => Some(("structuredAggregationQuery.structuredQuery.where.unaryFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transaction" => Some(("transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["before", "boolean-value", "bytes-value", "composite-filter", "double-value", "end-at", "field", "field-filter", "field-path", "geo-point-value", "integer-value", "latitude", "limit", "longitude", "new-transaction", "null-value", "offset", "op", "read-only", "read-time", "read-write", "reference-value", "retry-transaction", "start-at", "string-value", "structured-aggregation-query", "structured-query", "timestamp-value", "transaction", "unary-filter", "value", "where"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RunAggregationQueryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().databases_documents_run_aggregation_query(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_databases_documents_run_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1851,9 +2195,10 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "collection-ids" => Some(("collectionIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "namespace-ids" => Some(("namespaceIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "output-uri-prefix" => Some(("outputUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["collection-ids", "output-uri-prefix"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["collection-ids", "namespace-ids", "output-uri-prefix"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1990,8 +2335,9 @@ where match &temp_cursor.to_string()[..] { "collection-ids" => Some(("collectionIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "input-uri-prefix" => Some(("inputUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "namespace-ids" => Some(("namespaceIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["collection-ids", "input-uri-prefix"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["collection-ids", "input-uri-prefix", "namespace-ids"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2301,7 +2647,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2376,13 +2722,15 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "app-engine-integration-mode" => Some(("appEngineIntegrationMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "concurrency-mode" => Some(("concurrencyMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "key-prefix" => Some(("keyPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "location-id" => Some(("locationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["concurrency-mode", "etag", "location-id", "name", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-integration-mode", "concurrency-mode", "etag", "key-prefix", "location-id", "name", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2397,7 +2745,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2508,7 +2856,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2588,6 +2936,12 @@ where ("databases-collection-groups-indexes-list", Some(opt)) => { call_result = self._projects_databases_collection_groups_indexes_list(opt, dry_run, &mut err).await; }, + ("databases-create", Some(opt)) => { + call_result = self._projects_databases_create(opt, dry_run, &mut err).await; + }, + ("databases-delete", Some(opt)) => { + call_result = self._projects_databases_delete(opt, dry_run, &mut err).await; + }, ("databases-documents-batch-get", Some(opt)) => { call_result = self._projects_databases_documents_batch_get(opt, dry_run, &mut err).await; }, @@ -2615,6 +2969,9 @@ where ("databases-documents-list-collection-ids", Some(opt)) => { call_result = self._projects_databases_documents_list_collection_ids(opt, dry_run, &mut err).await; }, + ("databases-documents-list-documents", Some(opt)) => { + call_result = self._projects_databases_documents_list_documents(opt, dry_run, &mut err).await; + }, ("databases-documents-listen", Some(opt)) => { call_result = self._projects_databases_documents_listen(opt, dry_run, &mut err).await; }, @@ -2627,6 +2984,9 @@ where ("databases-documents-rollback", Some(opt)) => { call_result = self._projects_databases_documents_rollback(opt, dry_run, &mut err).await; }, + ("databases-documents-run-aggregation-query", Some(opt)) => { + call_result = self._projects_databases_documents_run_aggregation_query(opt, dry_run, &mut err).await; + }, ("databases-documents-run-query", Some(opt)) => { call_result = self._projects_databases_documents_run_query(opt, dry_run, &mut err).await; }, @@ -2745,7 +3105,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'databases-collection-groups-fields-get', 'databases-collection-groups-fields-list', 'databases-collection-groups-fields-patch', 'databases-collection-groups-indexes-create', 'databases-collection-groups-indexes-delete', 'databases-collection-groups-indexes-get', 'databases-collection-groups-indexes-list', 'databases-documents-batch-get', 'databases-documents-batch-write', 'databases-documents-begin-transaction', 'databases-documents-commit', 'databases-documents-create-document', 'databases-documents-delete', 'databases-documents-get', 'databases-documents-list', 'databases-documents-list-collection-ids', 'databases-documents-listen', 'databases-documents-partition-query', 'databases-documents-patch', 'databases-documents-rollback', 'databases-documents-run-query', 'databases-documents-write', 'databases-export-documents', 'databases-get', 'databases-import-documents', 'databases-list', 'databases-operations-cancel', 'databases-operations-delete', 'databases-operations-get', 'databases-operations-list', 'databases-patch', 'locations-get' and 'locations-list'", vec![ + ("projects", "methods: 'databases-collection-groups-fields-get', 'databases-collection-groups-fields-list', 'databases-collection-groups-fields-patch', 'databases-collection-groups-indexes-create', 'databases-collection-groups-indexes-delete', 'databases-collection-groups-indexes-get', 'databases-collection-groups-indexes-list', 'databases-create', 'databases-delete', 'databases-documents-batch-get', 'databases-documents-batch-write', 'databases-documents-begin-transaction', 'databases-documents-commit', 'databases-documents-create-document', 'databases-documents-delete', 'databases-documents-get', 'databases-documents-list', 'databases-documents-list-collection-ids', 'databases-documents-list-documents', 'databases-documents-listen', 'databases-documents-partition-query', 'databases-documents-patch', 'databases-documents-rollback', 'databases-documents-run-aggregation-query', 'databases-documents-run-query', 'databases-documents-write', 'databases-export-documents', 'databases-get', 'databases-import-documents', 'databases-list', 'databases-operations-cancel', 'databases-operations-delete', 'databases-operations-get', 'databases-operations-list', 'databases-patch', 'locations-get' and 'locations-list'", vec![ ("databases-collection-groups-fields-get", Some(r##"Gets the metadata and configuration for a Field."##), "Details at http://byron.github.io/google-apis-rs/google_firestore1_cli/projects_databases-collection-groups-fields-get", @@ -2906,6 +3266,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("databases-create", + Some(r##"Create a database."##), + "Details at http://byron.github.io/google-apis-rs/google_firestore1_cli/projects_databases-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. A parent name of the form `projects/{project_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("databases-delete", + Some(r##"Deletes a database."##), + "Details at http://byron.github.io/google-apis-rs/google_firestore1_cli/projects_databases-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the form `projects/{project_id}/databases/{database_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3114,7 +3524,7 @@ async fn main() { (Some(r##"collection-id"##), None, - Some(r##"Required. The collection ID, relative to `parent`, to list. For example: `chatrooms` or `messages`."##), + Some(r##"Optional. The collection ID, relative to `parent`, to list. For example: `chatrooms` or `messages`. This is optional, and when not provided, Firestore will list documents from all collections under the provided `parent`."##), Some(true), Some(false)), @@ -3152,6 +3562,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("databases-documents-list-documents", + Some(r##"Lists documents."##), + "Details at http://byron.github.io/google-apis-rs/google_firestore1_cli/projects_databases-documents-list-documents", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. In the format: `projects/{project_id}/databases/{database_id}/documents` or `projects/{project_id}/databases/{database_id}/documents/{document_path}`. For example: `projects/my-project/databases/my-database/documents` or `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`"##), + Some(true), + Some(false)), + + (Some(r##"collection-id"##), + None, + Some(r##"Optional. The collection ID, relative to `parent`, to list. For example: `chatrooms` or `messages`. This is optional, and when not provided, Firestore will list documents from all collections under the provided `parent`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3264,6 +3702,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("databases-documents-run-aggregation-query", + Some(r##"Runs an aggregation query. Rather than producing Document results like Firestore.RunQuery, this API allows running an aggregation to produce a series of AggregationResult server-side. High-Level Example: ``` -- Return the number of documents in table given a filter. SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); ```"##), + "Details at http://byron.github.io/google-apis-rs/google_firestore1_cli/projects_databases-documents-run-aggregation-query", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. In the format: `projects/{project_id}/databases/{database_id}/documents` or `projects/{project_id}/databases/{database_id}/documents/{document_path}`. For example: `projects/my-project/databases/my-database/documents` or `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3598,7 +4064,7 @@ async fn main() { let mut app = App::new("firestore1") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230118") .about("Accesses the NoSQL document database built for automatic scaling, high performance, and ease of application development. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firestore1_cli") .arg(Arg::with_name("url") diff --git a/gen/firestore1/Cargo.toml b/gen/firestore1/Cargo.toml index 60df1afaf8..3a6fb49fad 100644 --- a/gen/firestore1/Cargo.toml +++ b/gen/firestore1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firestore1" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firestore (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firestore1" homepage = "https://cloud.google.com/firestore" -documentation = "https://docs.rs/google-firestore1/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-firestore1/5.0.2+20230118" license = "MIT" keywords = ["firestore", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firestore1/README.md b/gen/firestore1/README.md index 54c2215642..12194665d4 100644 --- a/gen/firestore1/README.md +++ b/gen/firestore1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-firestore1` library allows access to all features of the *Google Firestore* service. -This documentation was generated from *Firestore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *firestore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firestore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *firestore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firestore* *v1* API can be found at the [official documentation site](https://cloud.google.com/firestore). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/Firestore) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/Firestore) ... * projects - * [*databases collection groups fields get*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupFieldGetCall), [*databases collection groups fields list*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupFieldListCall), [*databases collection groups fields patch*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupFieldPatchCall), [*databases collection groups indexes create*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexCreateCall), [*databases collection groups indexes delete*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexDeleteCall), [*databases collection groups indexes get*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexGetCall), [*databases collection groups indexes list*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexListCall), [*databases create*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseCreateCall), [*databases delete*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDeleteCall), [*databases documents batch get*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentBatchGetCall), [*databases documents batch write*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentBatchWriteCall), [*databases documents begin transaction*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentBeginTransactionCall), [*databases documents commit*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentCommitCall), [*databases documents create document*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentCreateDocumentCall), [*databases documents delete*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentDeleteCall), [*databases documents get*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentGetCall), [*databases documents list*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentListCall), [*databases documents list collection ids*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentListCollectionIdCall), [*databases documents list documents*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentListDocumentCall), [*databases documents listen*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentListenCall), [*databases documents partition query*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentPartitionQueryCall), [*databases documents patch*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentPatchCall), [*databases documents rollback*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentRollbackCall), [*databases documents run aggregation query*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentRunAggregationQueryCall), [*databases documents run query*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentRunQueryCall), [*databases documents write*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseDocumentWriteCall), [*databases export documents*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseExportDocumentCall), [*databases get*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseGetCall), [*databases import documents*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseImportDocumentCall), [*databases list*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseListCall), [*databases operations cancel*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseOperationCancelCall), [*databases operations delete*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseOperationDeleteCall), [*databases operations get*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseOperationGetCall), [*databases operations list*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabaseOperationListCall), [*databases patch*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectDatabasePatchCall), [*locations get*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectLocationGetCall) and [*locations list*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/api::ProjectLocationListCall) + * [*databases collection groups fields get*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupFieldGetCall), [*databases collection groups fields list*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupFieldListCall), [*databases collection groups fields patch*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupFieldPatchCall), [*databases collection groups indexes create*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexCreateCall), [*databases collection groups indexes delete*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexDeleteCall), [*databases collection groups indexes get*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexGetCall), [*databases collection groups indexes list*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCollectionGroupIndexListCall), [*databases create*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseCreateCall), [*databases delete*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDeleteCall), [*databases documents batch get*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentBatchGetCall), [*databases documents batch write*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentBatchWriteCall), [*databases documents begin transaction*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentBeginTransactionCall), [*databases documents commit*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentCommitCall), [*databases documents create document*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentCreateDocumentCall), [*databases documents delete*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentDeleteCall), [*databases documents get*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentGetCall), [*databases documents list*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentListCall), [*databases documents list collection ids*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentListCollectionIdCall), [*databases documents list documents*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentListDocumentCall), [*databases documents listen*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentListenCall), [*databases documents partition query*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentPartitionQueryCall), [*databases documents patch*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentPatchCall), [*databases documents rollback*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentRollbackCall), [*databases documents run aggregation query*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentRunAggregationQueryCall), [*databases documents run query*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentRunQueryCall), [*databases documents write*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseDocumentWriteCall), [*databases export documents*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseExportDocumentCall), [*databases get*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseGetCall), [*databases import documents*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseImportDocumentCall), [*databases list*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseListCall), [*databases operations cancel*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseOperationCancelCall), [*databases operations delete*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseOperationDeleteCall), [*databases operations get*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseOperationGetCall), [*databases operations list*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabaseOperationListCall), [*databases patch*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectDatabasePatchCall), [*locations get*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectLocationGetCall) and [*locations list*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/api::ProjectLocationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/Firestore)** +* **[Hub](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/Firestore)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Part)** + * **[Parts](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::RequestValue) and -[decodable](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::RequestValue) and +[decodable](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firestore1/5.0.2-beta-1+20230118/google_firestore1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firestore1/5.0.2+20230118/google_firestore1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firestore1/src/api.rs b/gen/firestore1/src/api.rs index 42da7dce97..6fc43f10f8 100644 --- a/gen/firestore1/src/api.rs +++ b/gen/firestore1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> Firestore { Firestore { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firestore.googleapis.com/".to_string(), _root_url: "https://firestore.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> Firestore { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firestore1/src/client.rs b/gen/firestore1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firestore1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firestore1/src/lib.rs b/gen/firestore1/src/lib.rs index 266149a2de..f1de12424e 100644 --- a/gen/firestore1/src/lib.rs +++ b/gen/firestore1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firestore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *firestore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firestore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *firestore:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firestore* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/firestore). diff --git a/gen/firestore1_beta1-cli/Cargo.toml b/gen/firestore1_beta1-cli/Cargo.toml index dbc4c6bb32..d96ad4fd76 100644 --- a/gen/firestore1_beta1-cli/Cargo.toml +++ b/gen/firestore1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-firestore1_beta1-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firestore (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firestore1_beta1-cli" @@ -20,13 +20,13 @@ name = "firestore1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-firestore1_beta1] path = "../firestore1_beta1" -version = "4.0.1+20220221" +version = "5.0.2+20230118" + diff --git a/gen/firestore1_beta1-cli/README.md b/gen/firestore1_beta1-cli/README.md index 00d593e613..5cf5ea438e 100644 --- a/gen/firestore1_beta1-cli/README.md +++ b/gen/firestore1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Firestore* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *Firestore* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash firestore1-beta1 [options] @@ -39,10 +39,12 @@ firestore1-beta1 [options] databases-documents-get [-p ]... [-o ] databases-documents-list [-p ]... [-o ] databases-documents-list-collection-ids (-r )... [-p ]... [-o ] + databases-documents-list-documents [-p ]... [-o ] databases-documents-listen (-r )... [-p ]... [-o ] databases-documents-partition-query (-r )... [-p ]... [-o ] databases-documents-patch (-r )... [-p ]... [-o ] databases-documents-rollback (-r )... [-p ]... [-o ] + databases-documents-run-aggregation-query (-r )... [-p ]... [-o ] databases-documents-run-query (-r )... [-p ]... [-o ] databases-documents-write (-r )... [-p ]... [-o ] databases-export-documents (-r )... [-p ]... [-o ] diff --git a/gen/firestore1_beta1-cli/mkdocs.yml b/gen/firestore1_beta1-cli/mkdocs.yml index ce083b2332..72856c8002 100644 --- a/gen/firestore1_beta1-cli/mkdocs.yml +++ b/gen/firestore1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Firestore v4.0.1+20220221 +site_name: Firestore v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-firestore1_beta1-cli site_description: A complete library to interact with Firestore (protocol v1beta1) @@ -7,29 +7,32 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/firestore1_beta1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_databases-documents-batch-get.md', 'Projects', 'Databases Documents Batch Get'] -- ['projects_databases-documents-batch-write.md', 'Projects', 'Databases Documents Batch Write'] -- ['projects_databases-documents-begin-transaction.md', 'Projects', 'Databases Documents Begin Transaction'] -- ['projects_databases-documents-commit.md', 'Projects', 'Databases Documents Commit'] -- ['projects_databases-documents-create-document.md', 'Projects', 'Databases Documents Create Document'] -- ['projects_databases-documents-delete.md', 'Projects', 'Databases Documents Delete'] -- ['projects_databases-documents-get.md', 'Projects', 'Databases Documents Get'] -- ['projects_databases-documents-list.md', 'Projects', 'Databases Documents List'] -- ['projects_databases-documents-list-collection-ids.md', 'Projects', 'Databases Documents List Collection Ids'] -- ['projects_databases-documents-listen.md', 'Projects', 'Databases Documents Listen'] -- ['projects_databases-documents-partition-query.md', 'Projects', 'Databases Documents Partition Query'] -- ['projects_databases-documents-patch.md', 'Projects', 'Databases Documents Patch'] -- ['projects_databases-documents-rollback.md', 'Projects', 'Databases Documents Rollback'] -- ['projects_databases-documents-run-query.md', 'Projects', 'Databases Documents Run Query'] -- ['projects_databases-documents-write.md', 'Projects', 'Databases Documents Write'] -- ['projects_databases-export-documents.md', 'Projects', 'Databases Export Documents'] -- ['projects_databases-import-documents.md', 'Projects', 'Databases Import Documents'] -- ['projects_databases-indexes-create.md', 'Projects', 'Databases Indexes Create'] -- ['projects_databases-indexes-delete.md', 'Projects', 'Databases Indexes Delete'] -- ['projects_databases-indexes-get.md', 'Projects', 'Databases Indexes Get'] -- ['projects_databases-indexes-list.md', 'Projects', 'Databases Indexes List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Databases Documents Batch Get': 'projects_databases-documents-batch-get.md' + - 'Databases Documents Batch Write': 'projects_databases-documents-batch-write.md' + - 'Databases Documents Begin Transaction': 'projects_databases-documents-begin-transaction.md' + - 'Databases Documents Commit': 'projects_databases-documents-commit.md' + - 'Databases Documents Create Document': 'projects_databases-documents-create-document.md' + - 'Databases Documents Delete': 'projects_databases-documents-delete.md' + - 'Databases Documents Get': 'projects_databases-documents-get.md' + - 'Databases Documents List': 'projects_databases-documents-list.md' + - 'Databases Documents List Collection Ids': 'projects_databases-documents-list-collection-ids.md' + - 'Databases Documents List Documents': 'projects_databases-documents-list-documents.md' + - 'Databases Documents Listen': 'projects_databases-documents-listen.md' + - 'Databases Documents Partition Query': 'projects_databases-documents-partition-query.md' + - 'Databases Documents Patch': 'projects_databases-documents-patch.md' + - 'Databases Documents Rollback': 'projects_databases-documents-rollback.md' + - 'Databases Documents Run Aggregation Query': 'projects_databases-documents-run-aggregation-query.md' + - 'Databases Documents Run Query': 'projects_databases-documents-run-query.md' + - 'Databases Documents Write': 'projects_databases-documents-write.md' + - 'Databases Export Documents': 'projects_databases-export-documents.md' + - 'Databases Import Documents': 'projects_databases-import-documents.md' + - 'Databases Indexes Create': 'projects_databases-indexes-create.md' + - 'Databases Indexes Delete': 'projects_databases-indexes-delete.md' + - 'Databases Indexes Get': 'projects_databases-indexes-get.md' + - 'Databases Indexes List': 'projects_databases-indexes-list.md' theme: readthedocs diff --git a/gen/firestore1_beta1-cli/src/client.rs b/gen/firestore1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/firestore1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/firestore1_beta1-cli/src/main.rs b/gen/firestore1_beta1-cli/src/main.rs index 53c6b2a3c2..0c24e4c154 100644 --- a/gen/firestore1_beta1-cli/src/main.rs +++ b/gen/firestore1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_firestore1_beta1::{api, Error, oauth2}; +use google_firestore1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -498,10 +497,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "current-document-update-time" => { - call = call.current_document_update_time(value.unwrap_or("")); + call = call.current_document_update_time( value.map(|v| arg_from_str(v, err, "current-document-update-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "current-document-exists" => { - call = call.current_document_exists(arg_from_str(value.unwrap_or("false"), err, "current-document-exists", "boolean")); + call = call.current_document_exists( value.map(|v| arg_from_str(v, err, "current-document-exists", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -557,10 +556,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "transaction" => { - call = call.transaction(value.unwrap_or("")); + call = call.transaction( value.map(|v| arg_from_str(v, err, "transaction", "byte")).unwrap_or(b"hello world")); }, "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "mask-field-paths" => { call = call.add_mask_field_paths(value.unwrap_or("")); @@ -619,19 +618,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "transaction" => { - call = call.transaction(value.unwrap_or("")); + call = call.transaction( value.map(|v| arg_from_str(v, err, "transaction", "byte")).unwrap_or(b"hello world")); }, "show-missing" => { - call = call.show_missing(arg_from_str(value.unwrap_or("false"), err, "show-missing", "boolean")); + call = call.show_missing( value.map(|v| arg_from_str(v, err, "show-missing", "boolean")).unwrap_or(false)); }, "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -711,8 +710,9 @@ where match &temp_cursor.to_string()[..] { "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-time" => Some(("readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["page-size", "page-token"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["page-size", "page-token", "read-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -772,6 +772,80 @@ where } } + async fn _projects_databases_documents_list_documents(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().databases_documents_list_documents(opt.value_of("parent").unwrap_or(""), opt.value_of("collection-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "transaction" => { + call = call.transaction( value.map(|v| arg_from_str(v, err, "transaction", "byte")).unwrap_or(b"hello world")); + }, + "show-missing" => { + call = call.show_missing( value.map(|v| arg_from_str(v, err, "show-missing", "boolean")).unwrap_or(false)); + }, + "read-time" => { + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "mask-field-paths" => { + call = call.add_mask_field_paths(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["mask-field-paths", "order-by", "page-size", "page-token", "read-time", "show-missing", "transaction"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_databases_documents_listen(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -909,6 +983,7 @@ where "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "partition-count" => Some(("partitionCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "read-time" => Some(("readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "structured-query.end-at.before" => Some(("structuredQuery.endAt.before", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "structured-query.limit" => Some(("structuredQuery.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "structured-query.offset" => Some(("structuredQuery.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -929,7 +1004,7 @@ where "structured-query.where.unary-filter.field.field-path" => Some(("structuredQuery.where.unaryFilter.field.fieldPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "structured-query.where.unary-filter.op" => Some(("structuredQuery.where.unaryFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["before", "boolean-value", "bytes-value", "composite-filter", "double-value", "end-at", "field", "field-filter", "field-path", "geo-point-value", "integer-value", "latitude", "limit", "longitude", "null-value", "offset", "op", "page-size", "page-token", "partition-count", "reference-value", "start-at", "string-value", "structured-query", "timestamp-value", "unary-filter", "value", "where"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["before", "boolean-value", "bytes-value", "composite-filter", "double-value", "end-at", "field", "field-filter", "field-path", "geo-point-value", "integer-value", "latitude", "limit", "longitude", "null-value", "offset", "op", "page-size", "page-token", "partition-count", "read-time", "reference-value", "start-at", "string-value", "structured-query", "timestamp-value", "unary-filter", "value", "where"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1037,10 +1112,10 @@ where call = call.add_mask_field_paths(value.unwrap_or("")); }, "current-document-update-time" => { - call = call.current_document_update_time(value.unwrap_or("")); + call = call.current_document_update_time( value.map(|v| arg_from_str(v, err, "current-document-update-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "current-document-exists" => { - call = call.current_document_exists(arg_from_str(value.unwrap_or("false"), err, "current-document-exists", "boolean")); + call = call.current_document_exists( value.map(|v| arg_from_str(v, err, "current-document-exists", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1174,6 +1249,113 @@ where } } + async fn _projects_databases_documents_run_aggregation_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "new-transaction.read-only.read-time" => Some(("newTransaction.readOnly.readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "new-transaction.read-write.retry-transaction" => Some(("newTransaction.readWrite.retryTransaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "read-time" => Some(("readTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.end-at.before" => Some(("structuredAggregationQuery.structuredQuery.endAt.before", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.limit" => Some(("structuredAggregationQuery.structuredQuery.limit", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.offset" => Some(("structuredAggregationQuery.structuredQuery.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.start-at.before" => Some(("structuredAggregationQuery.structuredQuery.startAt.before", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.composite-filter.op" => Some(("structuredAggregationQuery.structuredQuery.where.compositeFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.field.field-path" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.field.fieldPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.op" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.boolean-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.booleanValue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.bytes-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.bytesValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.double-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.doubleValue", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.geo-point-value.latitude" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.geoPointValue.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.geo-point-value.longitude" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.geoPointValue.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.integer-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.integerValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.null-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.nullValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.reference-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.referenceValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.string-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.stringValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.field-filter.value.timestamp-value" => Some(("structuredAggregationQuery.structuredQuery.where.fieldFilter.value.timestampValue", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.unary-filter.field.field-path" => Some(("structuredAggregationQuery.structuredQuery.where.unaryFilter.field.fieldPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "structured-aggregation-query.structured-query.where.unary-filter.op" => Some(("structuredAggregationQuery.structuredQuery.where.unaryFilter.op", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transaction" => Some(("transaction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["before", "boolean-value", "bytes-value", "composite-filter", "double-value", "end-at", "field", "field-filter", "field-path", "geo-point-value", "integer-value", "latitude", "limit", "longitude", "new-transaction", "null-value", "offset", "op", "read-only", "read-time", "read-write", "reference-value", "retry-transaction", "start-at", "string-value", "structured-aggregation-query", "structured-query", "timestamp-value", "transaction", "unary-filter", "value", "where"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RunAggregationQueryRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().databases_documents_run_aggregation_query(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_databases_documents_run_query(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1741,7 +1923,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1827,6 +2009,9 @@ where ("databases-documents-list-collection-ids", Some(opt)) => { call_result = self._projects_databases_documents_list_collection_ids(opt, dry_run, &mut err).await; }, + ("databases-documents-list-documents", Some(opt)) => { + call_result = self._projects_databases_documents_list_documents(opt, dry_run, &mut err).await; + }, ("databases-documents-listen", Some(opt)) => { call_result = self._projects_databases_documents_listen(opt, dry_run, &mut err).await; }, @@ -1839,6 +2024,9 @@ where ("databases-documents-rollback", Some(opt)) => { call_result = self._projects_databases_documents_rollback(opt, dry_run, &mut err).await; }, + ("databases-documents-run-aggregation-query", Some(opt)) => { + call_result = self._projects_databases_documents_run_aggregation_query(opt, dry_run, &mut err).await; + }, ("databases-documents-run-query", Some(opt)) => { call_result = self._projects_databases_documents_run_query(opt, dry_run, &mut err).await; }, @@ -1942,7 +2130,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'databases-documents-batch-get', 'databases-documents-batch-write', 'databases-documents-begin-transaction', 'databases-documents-commit', 'databases-documents-create-document', 'databases-documents-delete', 'databases-documents-get', 'databases-documents-list', 'databases-documents-list-collection-ids', 'databases-documents-listen', 'databases-documents-partition-query', 'databases-documents-patch', 'databases-documents-rollback', 'databases-documents-run-query', 'databases-documents-write', 'databases-export-documents', 'databases-import-documents', 'databases-indexes-create', 'databases-indexes-delete', 'databases-indexes-get' and 'databases-indexes-list'", vec![ + ("projects", "methods: 'databases-documents-batch-get', 'databases-documents-batch-write', 'databases-documents-begin-transaction', 'databases-documents-commit', 'databases-documents-create-document', 'databases-documents-delete', 'databases-documents-get', 'databases-documents-list', 'databases-documents-list-collection-ids', 'databases-documents-list-documents', 'databases-documents-listen', 'databases-documents-partition-query', 'databases-documents-patch', 'databases-documents-rollback', 'databases-documents-run-aggregation-query', 'databases-documents-run-query', 'databases-documents-write', 'databases-export-documents', 'databases-import-documents', 'databases-indexes-create', 'databases-indexes-delete', 'databases-indexes-get' and 'databases-indexes-list'", vec![ ("databases-documents-batch-get", Some(r##"Gets multiple documents. Documents returned by this method are not guaranteed to be returned in the same order that they were requested."##), "Details at http://byron.github.io/google-apis-rs/google_firestore1_beta1_cli/projects_databases-documents-batch-get", @@ -2145,7 +2333,7 @@ async fn main() { (Some(r##"collection-id"##), None, - Some(r##"Required. The collection ID, relative to `parent`, to list. For example: `chatrooms` or `messages`."##), + Some(r##"Optional. The collection ID, relative to `parent`, to list. For example: `chatrooms` or `messages`. This is optional, and when not provided, Firestore will list documents from all collections under the provided `parent`."##), Some(true), Some(false)), @@ -2183,6 +2371,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("databases-documents-list-documents", + Some(r##"Lists documents."##), + "Details at http://byron.github.io/google-apis-rs/google_firestore1_beta1_cli/projects_databases-documents-list-documents", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. In the format: `projects/{project_id}/databases/{database_id}/documents` or `projects/{project_id}/databases/{database_id}/documents/{document_path}`. For example: `projects/my-project/databases/my-database/documents` or `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`"##), + Some(true), + Some(false)), + + (Some(r##"collection-id"##), + None, + Some(r##"Optional. The collection ID, relative to `parent`, to list. For example: `chatrooms` or `messages`. This is optional, and when not provided, Firestore will list documents from all collections under the provided `parent`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2295,6 +2511,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("databases-documents-run-aggregation-query", + Some(r##"Runs an aggregation query. Rather than producing Document results like Firestore.RunQuery, this API allows running an aggregation to produce a series of AggregationResult server-side. High-Level Example: ``` -- Return the number of documents in table given a filter. SELECT COUNT(*) FROM ( SELECT * FROM k where a = true ); ```"##), + "Details at http://byron.github.io/google-apis-rs/google_firestore1_beta1_cli/projects_databases-documents-run-aggregation-query", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource name. In the format: `projects/{project_id}/databases/{database_id}/documents` or `projects/{project_id}/databases/{database_id}/documents/{document_path}`. For example: `projects/my-project/databases/my-database/documents` or `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2513,7 +2757,7 @@ async fn main() { let mut app = App::new("firestore1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230118") .about("Accesses the NoSQL document database built for automatic scaling, high performance, and ease of application development. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_firestore1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/firestore1_beta1/Cargo.toml b/gen/firestore1_beta1/Cargo.toml index 563304a2fe..3aab320970 100644 --- a/gen/firestore1_beta1/Cargo.toml +++ b/gen/firestore1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-firestore1_beta1" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Firestore (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/firestore1_beta1" homepage = "https://cloud.google.com/firestore" -documentation = "https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-firestore1_beta1/5.0.2+20230118" license = "MIT" keywords = ["firestore", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/firestore1_beta1/README.md b/gen/firestore1_beta1/README.md index 34f3b854e8..31e8dd5e9f 100644 --- a/gen/firestore1_beta1/README.md +++ b/gen/firestore1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-firestore1_beta1` library allows access to all features of the *Google Firestore* service. -This documentation was generated from *Firestore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *firestore:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Firestore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *firestore:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Firestore* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/firestore). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/Firestore) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/Firestore) ... * projects - * [*databases documents batch get*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentBatchGetCall), [*databases documents batch write*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentBatchWriteCall), [*databases documents begin transaction*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentBeginTransactionCall), [*databases documents commit*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentCommitCall), [*databases documents create document*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentCreateDocumentCall), [*databases documents delete*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentDeleteCall), [*databases documents get*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentGetCall), [*databases documents list*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListCall), [*databases documents list collection ids*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListCollectionIdCall), [*databases documents list documents*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListDocumentCall), [*databases documents listen*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListenCall), [*databases documents partition query*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentPartitionQueryCall), [*databases documents patch*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentPatchCall), [*databases documents rollback*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentRollbackCall), [*databases documents run aggregation query*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentRunAggregationQueryCall), [*databases documents run query*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentRunQueryCall), [*databases documents write*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentWriteCall), [*databases export documents*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseExportDocumentCall), [*databases import documents*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseImportDocumentCall), [*databases indexes create*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexCreateCall), [*databases indexes delete*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexDeleteCall), [*databases indexes get*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexGetCall) and [*databases indexes list*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexListCall) + * [*databases documents batch get*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentBatchGetCall), [*databases documents batch write*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentBatchWriteCall), [*databases documents begin transaction*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentBeginTransactionCall), [*databases documents commit*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentCommitCall), [*databases documents create document*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentCreateDocumentCall), [*databases documents delete*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentDeleteCall), [*databases documents get*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentGetCall), [*databases documents list*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListCall), [*databases documents list collection ids*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListCollectionIdCall), [*databases documents list documents*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListDocumentCall), [*databases documents listen*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentListenCall), [*databases documents partition query*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentPartitionQueryCall), [*databases documents patch*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentPatchCall), [*databases documents rollback*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentRollbackCall), [*databases documents run aggregation query*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentRunAggregationQueryCall), [*databases documents run query*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentRunQueryCall), [*databases documents write*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseDocumentWriteCall), [*databases export documents*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseExportDocumentCall), [*databases import documents*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseImportDocumentCall), [*databases indexes create*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexCreateCall), [*databases indexes delete*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexDeleteCall), [*databases indexes get*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexGetCall) and [*databases indexes list*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/api::ProjectDatabaseIndexListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/Firestore)** +* **[Hub](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/Firestore)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-firestore1_beta1/5.0.2-beta-1+20230118/google_firestore1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-firestore1_beta1/5.0.2+20230118/google_firestore1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/firestore1_beta1/src/api.rs b/gen/firestore1_beta1/src/api.rs index 79aa625348..7a1591edfb 100644 --- a/gen/firestore1_beta1/src/api.rs +++ b/gen/firestore1_beta1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Firestore { Firestore { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://firestore.googleapis.com/".to_string(), _root_url: "https://firestore.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Firestore { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/firestore1_beta1/src/client.rs b/gen/firestore1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/firestore1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/firestore1_beta1/src/lib.rs b/gen/firestore1_beta1/src/lib.rs index 445d743b85..1c6cb14871 100644 --- a/gen/firestore1_beta1/src/lib.rs +++ b/gen/firestore1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Firestore* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *firestore:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Firestore* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *firestore:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Firestore* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/firestore). diff --git a/gen/fitness1-cli/Cargo.toml b/gen/fitness1-cli/Cargo.toml index 618102c777..e755f2520b 100644 --- a/gen/fitness1-cli/Cargo.toml +++ b/gen/fitness1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-fitness1-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with fitness (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/fitness1-cli" @@ -20,13 +20,13 @@ name = "fitness1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-fitness1] path = "../fitness1" -version = "4.0.1+20220302" +version = "5.0.2+20230120" + diff --git a/gen/fitness1-cli/README.md b/gen/fitness1-cli/README.md index ac3407aa15..69f138e5b6 100644 --- a/gen/fitness1-cli/README.md +++ b/gen/fitness1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *fitness* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *fitness* API at revision *20230120*. The CLI is at version *5.0.2*. ```bash fitness1 [options] diff --git a/gen/fitness1-cli/mkdocs.yml b/gen/fitness1-cli/mkdocs.yml index 7bdeb1be46..c1d2c1b4e6 100644 --- a/gen/fitness1-cli/mkdocs.yml +++ b/gen/fitness1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: fitness v4.0.1+20220302 +site_name: fitness v5.0.2+20230120 site_url: http://byron.github.io/google-apis-rs/google-fitness1-cli site_description: A complete library to interact with fitness (protocol v1) @@ -7,21 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/fitness1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['users_data-sources-create.md', 'Users', 'Data Sources Create'] -- ['users_data-sources-data-point-changes-list.md', 'Users', 'Data Sources Data Point Changes List'] -- ['users_data-sources-datasets-delete.md', 'Users', 'Data Sources Datasets Delete'] -- ['users_data-sources-datasets-get.md', 'Users', 'Data Sources Datasets Get'] -- ['users_data-sources-datasets-patch.md', 'Users', 'Data Sources Datasets Patch'] -- ['users_data-sources-delete.md', 'Users', 'Data Sources Delete'] -- ['users_data-sources-get.md', 'Users', 'Data Sources Get'] -- ['users_data-sources-list.md', 'Users', 'Data Sources List'] -- ['users_data-sources-update.md', 'Users', 'Data Sources Update'] -- ['users_dataset-aggregate.md', 'Users', 'Dataset Aggregate'] -- ['users_sessions-delete.md', 'Users', 'Sessions Delete'] -- ['users_sessions-list.md', 'Users', 'Sessions List'] -- ['users_sessions-update.md', 'Users', 'Sessions Update'] +nav: +- Home: 'index.md' +- 'Users': + - 'Data Sources Create': 'users_data-sources-create.md' + - 'Data Sources Data Point Changes List': 'users_data-sources-data-point-changes-list.md' + - 'Data Sources Datasets Delete': 'users_data-sources-datasets-delete.md' + - 'Data Sources Datasets Get': 'users_data-sources-datasets-get.md' + - 'Data Sources Datasets Patch': 'users_data-sources-datasets-patch.md' + - 'Data Sources Delete': 'users_data-sources-delete.md' + - 'Data Sources Get': 'users_data-sources-get.md' + - 'Data Sources List': 'users_data-sources-list.md' + - 'Data Sources Update': 'users_data-sources-update.md' + - 'Dataset Aggregate': 'users_dataset-aggregate.md' + - 'Sessions Delete': 'users_sessions-delete.md' + - 'Sessions List': 'users_sessions-list.md' + - 'Sessions Update': 'users_sessions-update.md' theme: readthedocs diff --git a/gen/fitness1-cli/src/client.rs b/gen/fitness1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/fitness1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/fitness1-cli/src/main.rs b/gen/fitness1-cli/src/main.rs index 4f80e53940..436c889d1a 100644 --- a/gen/fitness1-cli/src/main.rs +++ b/gen/fitness1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_fitness1::{api, Error, oauth2}; +use google_fitness1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -160,7 +159,7 @@ where call = call.page_token(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -263,7 +262,7 @@ where call = call.page_token(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -812,13 +811,13 @@ where call = call.page_token(value.unwrap_or("")); }, "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, "end-time" => { call = call.end_time(value.unwrap_or("")); }, "activity-type" => { - call = call.add_activity_type(arg_from_str(value.unwrap_or("-0"), err, "activity-type", "integer")); + call = call.add_activity_type( value.map(|v| arg_from_str(v, err, "activity-type", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1471,7 +1470,7 @@ async fn main() { let mut app = App::new("fitness1") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230120") .about("The Fitness API for managing users' fitness tracking data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_fitness1_cli") .arg(Arg::with_name("url") diff --git a/gen/fitness1/Cargo.toml b/gen/fitness1/Cargo.toml index b00e56d81a..ba1c43e7c8 100644 --- a/gen/fitness1/Cargo.toml +++ b/gen/fitness1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-fitness1" -version = "5.0.2-beta-1+20230120" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with fitness (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/fitness1" homepage = "https://developers.google.com/fit/rest/v1/get-started" -documentation = "https://docs.rs/google-fitness1/5.0.2-beta-1+20230120" +documentation = "https://docs.rs/google-fitness1/5.0.2+20230120" license = "MIT" keywords = ["fitness", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/fitness1/README.md b/gen/fitness1/README.md index c51f5a54b7..e893dcc35b 100644 --- a/gen/fitness1/README.md +++ b/gen/fitness1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-fitness1` library allows access to all features of the *Google fitness* service. -This documentation was generated from *fitness* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *fitness:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *fitness* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *fitness:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *fitness* *v1* API can be found at the [official documentation site](https://developers.google.com/fit/rest/v1/get-started). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/Fitness) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/Fitness) ... * users - * [*data sources create*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceCreateCall), [*data sources data point changes list*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceDataPointChangeListCall), [*data sources datasets delete*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceDatasetDeleteCall), [*data sources datasets get*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceDatasetGetCall), [*data sources datasets patch*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceDatasetPatchCall), [*data sources delete*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceDeleteCall), [*data sources get*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceGetCall), [*data sources list*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceListCall), [*data sources update*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDataSourceUpdateCall), [*dataset aggregate*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserDatasetAggregateCall), [*sessions delete*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserSessionDeleteCall), [*sessions list*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserSessionListCall) and [*sessions update*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/api::UserSessionUpdateCall) + * [*data sources create*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceCreateCall), [*data sources data point changes list*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceDataPointChangeListCall), [*data sources datasets delete*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceDatasetDeleteCall), [*data sources datasets get*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceDatasetGetCall), [*data sources datasets patch*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceDatasetPatchCall), [*data sources delete*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceDeleteCall), [*data sources get*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceGetCall), [*data sources list*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceListCall), [*data sources update*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDataSourceUpdateCall), [*dataset aggregate*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserDatasetAggregateCall), [*sessions delete*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserSessionDeleteCall), [*sessions list*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserSessionListCall) and [*sessions update*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/api::UserSessionUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/Fitness)** +* **[Hub](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/Fitness)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::CallBuilder) -* **[Resources](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::CallBuilder) +* **[Resources](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Part)** + * **[Parts](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Delegate) to the -[Method Builder](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Delegate) to the +[Method Builder](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::RequestValue) and -[decodable](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::RequestValue) and +[decodable](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-fitness1/5.0.2-beta-1+20230120/google_fitness1/client::RequestValue) are moved +* [request values](https://docs.rs/google-fitness1/5.0.2+20230120/google_fitness1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/fitness1/src/api.rs b/gen/fitness1/src/api.rs index 23c974cf94..5395b2d73a 100644 --- a/gen/fitness1/src/api.rs +++ b/gen/fitness1/src/api.rs @@ -209,7 +209,7 @@ impl<'a, S> Fitness { Fitness { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://fitness.googleapis.com/fitness/v1/users/".to_string(), _root_url: "https://fitness.googleapis.com/".to_string(), } @@ -220,7 +220,7 @@ impl<'a, S> Fitness { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/fitness1/src/client.rs b/gen/fitness1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/fitness1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/fitness1/src/lib.rs b/gen/fitness1/src/lib.rs index 44d64aff71..51c6d8fbc9 100644 --- a/gen/fitness1/src/lib.rs +++ b/gen/fitness1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *fitness* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *fitness:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *fitness* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *fitness:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *fitness* *v1* API can be found at the //! [official documentation site](https://developers.google.com/fit/rest/v1/get-started). diff --git a/gen/fusiontables2-cli/Cargo.toml b/gen/fusiontables2-cli/Cargo.toml index 1fcbaf1f13..49593014b9 100644 --- a/gen/fusiontables2-cli/Cargo.toml +++ b/gen/fusiontables2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-fusiontables2-cli" -version = "4.0.1+20171117" +version = "5.0.2+20171117" authors = ["Sebastian Thiel "] description = "A complete library to interact with fusiontables (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/fusiontables2-cli" @@ -20,13 +20,13 @@ name = "fusiontables2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-fusiontables2] path = "../fusiontables2" -version = "4.0.1+20171117" +version = "5.0.2+20171117" + diff --git a/gen/fusiontables2-cli/README.md b/gen/fusiontables2-cli/README.md index a7bc9232ed..16fbc1686a 100644 --- a/gen/fusiontables2-cli/README.md +++ b/gen/fusiontables2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *fusiontables* API at revision *20171117*. The CLI is at version *4.0.1*. +This documentation was generated from the *fusiontables* API at revision *20171117*. The CLI is at version *5.0.2*. ```bash fusiontables2 [options] diff --git a/gen/fusiontables2-cli/mkdocs.yml b/gen/fusiontables2-cli/mkdocs.yml index c3224c1220..be276d84c3 100644 --- a/gen/fusiontables2-cli/mkdocs.yml +++ b/gen/fusiontables2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: fusiontables v4.0.1+20171117 +site_name: fusiontables v5.0.2+20171117 site_url: http://byron.github.io/google-apis-rs/google-fusiontables2-cli site_description: A complete library to interact with fusiontables (protocol v2) @@ -7,42 +7,48 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/fusiontables2-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['column_delete.md', 'Column', 'Delete'] -- ['column_get.md', 'Column', 'Get'] -- ['column_insert.md', 'Column', 'Insert'] -- ['column_list.md', 'Column', 'List'] -- ['column_patch.md', 'Column', 'Patch'] -- ['column_update.md', 'Column', 'Update'] -- ['query_sql.md', 'Query', 'Sql'] -- ['query_sql-get.md', 'Query', 'Sql Get'] -- ['style_delete.md', 'Style', 'Delete'] -- ['style_get.md', 'Style', 'Get'] -- ['style_insert.md', 'Style', 'Insert'] -- ['style_list.md', 'Style', 'List'] -- ['style_patch.md', 'Style', 'Patch'] -- ['style_update.md', 'Style', 'Update'] -- ['table_copy.md', 'Table', 'Copy'] -- ['table_delete.md', 'Table', 'Delete'] -- ['table_get.md', 'Table', 'Get'] -- ['table_import-rows.md', 'Table', 'Import Rows'] -- ['table_import-table.md', 'Table', 'Import Table'] -- ['table_insert.md', 'Table', 'Insert'] -- ['table_list.md', 'Table', 'List'] -- ['table_patch.md', 'Table', 'Patch'] -- ['table_refetch-sheet.md', 'Table', 'Refetch Sheet'] -- ['table_replace-rows.md', 'Table', 'Replace Rows'] -- ['table_update.md', 'Table', 'Update'] -- ['task_delete.md', 'Task', 'Delete'] -- ['task_get.md', 'Task', 'Get'] -- ['task_list.md', 'Task', 'List'] -- ['template_delete.md', 'Template', 'Delete'] -- ['template_get.md', 'Template', 'Get'] -- ['template_insert.md', 'Template', 'Insert'] -- ['template_list.md', 'Template', 'List'] -- ['template_patch.md', 'Template', 'Patch'] -- ['template_update.md', 'Template', 'Update'] +nav: +- Home: 'index.md' +- 'Column': + - 'Delete': 'column_delete.md' + - 'Get': 'column_get.md' + - 'Insert': 'column_insert.md' + - 'List': 'column_list.md' + - 'Patch': 'column_patch.md' + - 'Update': 'column_update.md' +- 'Query': + - 'Sql': 'query_sql.md' + - 'Sql Get': 'query_sql-get.md' +- 'Style': + - 'Delete': 'style_delete.md' + - 'Get': 'style_get.md' + - 'Insert': 'style_insert.md' + - 'List': 'style_list.md' + - 'Patch': 'style_patch.md' + - 'Update': 'style_update.md' +- 'Table': + - 'Copy': 'table_copy.md' + - 'Delete': 'table_delete.md' + - 'Get': 'table_get.md' + - 'Import Rows': 'table_import-rows.md' + - 'Import Table': 'table_import-table.md' + - 'Insert': 'table_insert.md' + - 'List': 'table_list.md' + - 'Patch': 'table_patch.md' + - 'Refetch Sheet': 'table_refetch-sheet.md' + - 'Replace Rows': 'table_replace-rows.md' + - 'Update': 'table_update.md' +- 'Task': + - 'Delete': 'task_delete.md' + - 'Get': 'task_get.md' + - 'List': 'task_list.md' +- 'Template': + - 'Delete': 'template_delete.md' + - 'Get': 'template_get.md' + - 'Insert': 'template_insert.md' + - 'List': 'template_list.md' + - 'Patch': 'template_patch.md' + - 'Update': 'template_update.md' theme: readthedocs diff --git a/gen/fusiontables2-cli/src/client.rs b/gen/fusiontables2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/fusiontables2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/fusiontables2-cli/src/main.rs b/gen/fusiontables2-cli/src/main.rs index 4e2eab2a89..4493ba540e 100644 --- a/gen/fusiontables2-cli/src/main.rs +++ b/gen/fusiontables2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_fusiontables2::{api, Error, oauth2}; +use google_fusiontables2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -254,7 +253,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -505,10 +504,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "typed" => { - call = call.typed(arg_from_str(value.unwrap_or("false"), err, "typed", "boolean")); + call = call.typed( value.map(|v| arg_from_str(v, err, "typed", "boolean")).unwrap_or(false)); }, "hdrs" => { - call = call.hdrs(arg_from_str(value.unwrap_or("false"), err, "hdrs", "boolean")); + call = call.hdrs( value.map(|v| arg_from_str(v, err, "hdrs", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -574,10 +573,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "typed" => { - call = call.typed(arg_from_str(value.unwrap_or("false"), err, "typed", "boolean")); + call = call.typed( value.map(|v| arg_from_str(v, err, "typed", "boolean")).unwrap_or(false)); }, "hdrs" => { - call = call.hdrs(arg_from_str(value.unwrap_or("false"), err, "hdrs", "boolean")); + call = call.hdrs( value.map(|v| arg_from_str(v, err, "hdrs", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -864,7 +863,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1164,7 +1163,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "copy-presentation" => { - call = call.copy_presentation(arg_from_str(value.unwrap_or("false"), err, "copy-presentation", "boolean")); + call = call.copy_presentation( value.map(|v| arg_from_str(v, err, "copy-presentation", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1316,13 +1315,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-line" => { - call = call.start_line(arg_from_str(value.unwrap_or("-0"), err, "start-line", "integer")); + call = call.start_line( value.map(|v| arg_from_str(v, err, "start-line", "int32")).unwrap_or(-0)); }, "is-strict" => { - call = call.is_strict(arg_from_str(value.unwrap_or("false"), err, "is-strict", "boolean")); + call = call.is_strict( value.map(|v| arg_from_str(v, err, "is-strict", "boolean")).unwrap_or(false)); }, "end-line" => { - call = call.end_line(arg_from_str(value.unwrap_or("-0"), err, "end-line", "integer")); + call = call.end_line( value.map(|v| arg_from_str(v, err, "end-line", "int32")).unwrap_or(-0)); }, "encoding" => { call = call.encoding(value.unwrap_or("")); @@ -1548,7 +1547,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1648,7 +1647,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "replace-view-definition" => { - call = call.replace_view_definition(arg_from_str(value.unwrap_or("false"), err, "replace-view-definition", "boolean")); + call = call.replace_view_definition( value.map(|v| arg_from_str(v, err, "replace-view-definition", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1756,13 +1755,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-line" => { - call = call.start_line(arg_from_str(value.unwrap_or("-0"), err, "start-line", "integer")); + call = call.start_line( value.map(|v| arg_from_str(v, err, "start-line", "int32")).unwrap_or(-0)); }, "is-strict" => { - call = call.is_strict(arg_from_str(value.unwrap_or("false"), err, "is-strict", "boolean")); + call = call.is_strict( value.map(|v| arg_from_str(v, err, "is-strict", "boolean")).unwrap_or(false)); }, "end-line" => { - call = call.end_line(arg_from_str(value.unwrap_or("-0"), err, "end-line", "integer")); + call = call.end_line( value.map(|v| arg_from_str(v, err, "end-line", "int32")).unwrap_or(-0)); }, "encoding" => { call = call.encoding(value.unwrap_or("")); @@ -1871,7 +1870,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "replace-view-definition" => { - call = call.replace_view_definition(arg_from_str(value.unwrap_or("false"), err, "replace-view-definition", "boolean")); + call = call.replace_view_definition( value.map(|v| arg_from_str(v, err, "replace-view-definition", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2023,13 +2022,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -2276,7 +2275,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -3655,7 +3654,7 @@ async fn main() { let mut app = App::new("fusiontables2") .author("Sebastian Thiel ") - .version("4.0.1+20171117") + .version("5.0.2+20171117") .about("API for working with Fusion Tables data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_fusiontables2_cli") .arg(Arg::with_name("url") diff --git a/gen/fusiontables2/Cargo.toml b/gen/fusiontables2/Cargo.toml index d678cae328..a45eed083f 100644 --- a/gen/fusiontables2/Cargo.toml +++ b/gen/fusiontables2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-fusiontables2" -version = "5.0.2-beta-1+20171117" +version = "5.0.2+20171117" authors = ["Sebastian Thiel "] description = "A complete library to interact with fusiontables (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/fusiontables2" homepage = "https://developers.google.com/fusiontables" -documentation = "https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117" +documentation = "https://docs.rs/google-fusiontables2/5.0.2+20171117" license = "MIT" keywords = ["fusiontables", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/fusiontables2/README.md b/gen/fusiontables2/README.md index 93c8fca520..8693ba4a5d 100644 --- a/gen/fusiontables2/README.md +++ b/gen/fusiontables2/README.md @@ -5,38 +5,38 @@ DO NOT EDIT ! --> The `google-fusiontables2` library allows access to all features of the *Google fusiontables* service. -This documentation was generated from *fusiontables* crate version *5.0.2-beta-1+20171117*, where *20171117* is the exact revision of the *fusiontables:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *fusiontables* crate version *5.0.2+20171117*, where *20171117* is the exact revision of the *fusiontables:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *fusiontables* *v2* API can be found at the [official documentation site](https://developers.google.com/fusiontables). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/Fusiontables) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/Fusiontables) ... -* [column](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::Column) - * [*delete*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::ColumnDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::ColumnGetCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::ColumnInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::ColumnListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::ColumnPatchCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::ColumnUpdateCall) +* [column](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::Column) + * [*delete*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::ColumnDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::ColumnGetCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::ColumnInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::ColumnListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::ColumnPatchCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::ColumnUpdateCall) * query - * [*sql*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::QuerySqlCall) and [*sql get*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::QuerySqlGetCall) + * [*sql*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::QuerySqlCall) and [*sql get*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::QuerySqlGetCall) * style - * [*delete*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::StyleDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::StyleGetCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::StyleInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::StyleListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::StylePatchCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::StyleUpdateCall) -* [table](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::Table) - * [*copy*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableCopyCall), [*delete*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableGetCall), [*import rows*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableImportRowCall), [*import table*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableImportTableCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TablePatchCall), [*refetch sheet*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableRefetchSheetCall), [*replace rows*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableReplaceRowCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableUpdateCall) -* [task](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::Task) - * [*delete*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TaskDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TaskGetCall) and [*list*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TaskListCall) -* [template](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::Template) - * [*delete*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TemplateDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TemplateGetCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TemplateInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TemplateListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TemplatePatchCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TemplateUpdateCall) + * [*delete*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::StyleDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::StyleGetCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::StyleInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::StyleListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::StylePatchCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::StyleUpdateCall) +* [table](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::Table) + * [*copy*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableCopyCall), [*delete*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableGetCall), [*import rows*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableImportRowCall), [*import table*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableImportTableCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TablePatchCall), [*refetch sheet*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableRefetchSheetCall), [*replace rows*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableReplaceRowCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableUpdateCall) +* [task](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::Task) + * [*delete*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TaskDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TaskGetCall) and [*list*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TaskListCall) +* [template](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::Template) + * [*delete*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TemplateDeleteCall), [*get*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TemplateGetCall), [*insert*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TemplateInsertCall), [*list*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TemplateListCall), [*patch*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TemplatePatchCall) and [*update*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TemplateUpdateCall) Upload supported by ... -* [*import rows table*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableImportRowCall) -* [*import table table*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableImportTableCall) -* [*replace rows table*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::TableReplaceRowCall) +* [*import rows table*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableImportRowCall) +* [*import table table*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableImportTableCall) +* [*replace rows table*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::TableReplaceRowCall) Download supported by ... -* [*sql query*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::QuerySqlCall) -* [*sql get query*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/api::QuerySqlGetCall) +* [*sql query*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::QuerySqlCall) +* [*sql get query*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/api::QuerySqlGetCall) @@ -44,17 +44,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/Fusiontables)** +* **[Hub](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/Fusiontables)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::CallBuilder) -* **[Resources](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::CallBuilder) +* **[Resources](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Part)** + * **[Parts](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -147,17 +147,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -167,29 +167,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Delegate) to the -[Method Builder](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Delegate) to the +[Method Builder](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::RequestValue) and -[decodable](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::RequestValue) and +[decodable](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-fusiontables2/5.0.2-beta-1+20171117/google_fusiontables2/client::RequestValue) are moved +* [request values](https://docs.rs/google-fusiontables2/5.0.2+20171117/google_fusiontables2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/fusiontables2/src/api.rs b/gen/fusiontables2/src/api.rs index 446971057a..3322f5bf38 100644 --- a/gen/fusiontables2/src/api.rs +++ b/gen/fusiontables2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Fusiontables { Fusiontables { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/fusiontables/v2/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -152,7 +152,7 @@ impl<'a, S> Fusiontables { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/fusiontables2/src/client.rs b/gen/fusiontables2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/fusiontables2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/fusiontables2/src/lib.rs b/gen/fusiontables2/src/lib.rs index 6c4ae7d6a5..bc621b0568 100644 --- a/gen/fusiontables2/src/lib.rs +++ b/gen/fusiontables2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *fusiontables* crate version *5.0.2-beta-1+20171117*, where *20171117* is the exact revision of the *fusiontables:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *fusiontables* crate version *5.0.2+20171117*, where *20171117* is the exact revision of the *fusiontables:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *fusiontables* *v2* API can be found at the //! [official documentation site](https://developers.google.com/fusiontables). diff --git a/gen/games1-cli/Cargo.toml b/gen/games1-cli/Cargo.toml index f1c5f41a12..cc660e9e6f 100644 --- a/gen/games1-cli/Cargo.toml +++ b/gen/games1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-games1-cli" -version = "4.0.1+20220217" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Games (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/games1-cli" @@ -20,13 +20,13 @@ name = "games1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-games1] path = "../games1" -version = "4.0.1+20220217" +version = "5.0.2+20230112" + diff --git a/gen/games1-cli/README.md b/gen/games1-cli/README.md index d81e06f5f0..4716bf8333 100644 --- a/gen/games1-cli/README.md +++ b/gen/games1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Games* API at revision *20220217*. The CLI is at version *4.0.1*. +This documentation was generated from the *Games* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash games1 [options] @@ -55,6 +55,7 @@ games1 [options] list-categories-by-player [-p ]... [-o ] players get [-p ]... [-o ] + get-scoped-player-ids [-p ]... [-o ] list [-p ]... [-o ] revisions check [-p ]... [-o ] diff --git a/gen/games1-cli/mkdocs.yml b/gen/games1-cli/mkdocs.yml index ce749c7bdd..c370dd6d00 100644 --- a/gen/games1-cli/mkdocs.yml +++ b/gen/games1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Games v4.0.1+20220217 +site_name: Games v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-games1-cli site_description: A complete library to interact with Games (protocol v1) @@ -7,37 +7,49 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/games1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['achievement-definitions_list.md', 'Achievement Definitions', 'List'] -- ['achievements_increment.md', 'Achievements', 'Increment'] -- ['achievements_list.md', 'Achievements', 'List'] -- ['achievements_reveal.md', 'Achievements', 'Reveal'] -- ['achievements_set-steps-at-least.md', 'Achievements', 'Set Steps At Least'] -- ['achievements_unlock.md', 'Achievements', 'Unlock'] -- ['achievements_update-multiple.md', 'Achievements', 'Update Multiple'] -- ['applications_get.md', 'Applications', 'Get'] -- ['applications_get-end-point.md', 'Applications', 'Get End Point'] -- ['applications_played.md', 'Applications', 'Played'] -- ['applications_verify.md', 'Applications', 'Verify'] -- ['events_list-by-player.md', 'Events', 'List By Player'] -- ['events_list-definitions.md', 'Events', 'List Definitions'] -- ['events_record.md', 'Events', 'Record'] -- ['leaderboards_get.md', 'Leaderboards', 'Get'] -- ['leaderboards_list.md', 'Leaderboards', 'List'] -- ['metagame_get-metagame-config.md', 'Metagame', 'Get Metagame Config'] -- ['metagame_list-categories-by-player.md', 'Metagame', 'List Categories By Player'] -- ['players_get.md', 'Players', 'Get'] -- ['players_list.md', 'Players', 'List'] -- ['revisions_check.md', 'Revisions', 'Check'] -- ['scores_get.md', 'Scores', 'Get'] -- ['scores_list.md', 'Scores', 'List'] -- ['scores_list-window.md', 'Scores', 'List Window'] -- ['scores_submit.md', 'Scores', 'Submit'] -- ['scores_submit-multiple.md', 'Scores', 'Submit Multiple'] -- ['snapshots_get.md', 'Snapshots', 'Get'] -- ['snapshots_list.md', 'Snapshots', 'List'] -- ['stats_get.md', 'Stats', 'Get'] +nav: +- Home: 'index.md' +- 'Achievement Definitions': + - 'List': 'achievement-definitions_list.md' +- 'Achievements': + - 'Increment': 'achievements_increment.md' + - 'List': 'achievements_list.md' + - 'Reveal': 'achievements_reveal.md' + - 'Set Steps At Least': 'achievements_set-steps-at-least.md' + - 'Unlock': 'achievements_unlock.md' + - 'Update Multiple': 'achievements_update-multiple.md' +- 'Applications': + - 'Get': 'applications_get.md' + - 'Get End Point': 'applications_get-end-point.md' + - 'Played': 'applications_played.md' + - 'Verify': 'applications_verify.md' +- 'Events': + - 'List By Player': 'events_list-by-player.md' + - 'List Definitions': 'events_list-definitions.md' + - 'Record': 'events_record.md' +- 'Leaderboards': + - 'Get': 'leaderboards_get.md' + - 'List': 'leaderboards_list.md' +- 'Metagame': + - 'Get Metagame Config': 'metagame_get-metagame-config.md' + - 'List Categories By Player': 'metagame_list-categories-by-player.md' +- 'Players': + - 'Get': 'players_get.md' + - 'Get Scoped Player Ids': 'players_get-scoped-player-ids.md' + - 'List': 'players_list.md' +- 'Revisions': + - 'Check': 'revisions_check.md' +- 'Scores': + - 'Get': 'scores_get.md' + - 'List': 'scores_list.md' + - 'List Window': 'scores_list-window.md' + - 'Submit': 'scores_submit.md' + - 'Submit Multiple': 'scores_submit-multiple.md' +- 'Snapshots': + - 'Get': 'snapshots_get.md' + - 'List': 'snapshots_list.md' +- 'Stats': + - 'Get': 'stats_get.md' theme: readthedocs diff --git a/gen/games1-cli/src/client.rs b/gen/games1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/games1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/games1-cli/src/main.rs b/gen/games1-cli/src/main.rs index 4b1dfff650..50665dbb5c 100644 --- a/gen/games1-cli/src/main.rs +++ b/gen/games1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_games1::{api, Error, oauth2}; +use google_games1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -121,7 +120,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "request-id" => { - call = call.request_id(value.unwrap_or("")); + call = call.request_id( value.map(|v| arg_from_str(v, err, "request-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -183,7 +182,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -701,7 +700,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -763,7 +762,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -972,7 +971,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -1086,7 +1085,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -1144,6 +1143,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "player-id-consistency-token" => { + call = call.player_id_consistency_token(value.unwrap_or("")); + }, "language" => { call = call.language(value.unwrap_or("")); }, @@ -1160,7 +1162,59 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["language"].iter().map(|v|*v)); + v.extend(["language", "player-id-consistency-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _players_get_scoped_player_ids(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.players().get_scoped_player_ids(); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); v } )); } } @@ -1204,7 +1258,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -1318,7 +1372,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -1383,7 +1437,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -1442,16 +1496,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-top-if-absent" => { - call = call.return_top_if_absent(arg_from_str(value.unwrap_or("false"), err, "return-top-if-absent", "boolean")); + call = call.return_top_if_absent( value.map(|v| arg_from_str(v, err, "return-top-if-absent", "boolean")).unwrap_or(false)); }, "results-above" => { - call = call.results_above(arg_from_str(value.unwrap_or("-0"), err, "results-above", "integer")); + call = call.results_above( value.map(|v| arg_from_str(v, err, "results-above", "int32")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -1717,7 +1771,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -1933,6 +1987,9 @@ where ("get", Some(opt)) => { call_result = self._players_get(opt, dry_run, &mut err).await; }, + ("get-scoped-player-ids", Some(opt)) => { + call_result = self._players_get_scoped_player_ids(opt, dry_run, &mut err).await; + }, ("list", Some(opt)) => { call_result = self._players_list(opt, dry_run, &mut err).await; }, @@ -2458,7 +2515,7 @@ async fn main() { ]), ]), - ("players", "methods: 'get' and 'list'", vec![ + ("players", "methods: 'get', 'get-scoped-player-ids' and 'list'", vec![ ("get", Some(r##"Retrieves the Player resource with the given ID. To retrieve the player for the currently authenticated user, set `playerId` to `me`."##), "Details at http://byron.github.io/google-apis-rs/google_games1_cli/players_get", @@ -2475,6 +2532,22 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get-scoped-player-ids", + Some(r##"Retrieves scoped player identifiers for currently authenticated user."##), + "Details at http://byron.github.io/google-apis-rs/google_games1_cli/players_get-scoped-player-ids", + vec![ + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2755,7 +2828,7 @@ async fn main() { let mut app = App::new("games1") .author("Sebastian Thiel ") - .version("4.0.1+20220217") + .version("5.0.2+20230112") .about("The Google Play games service allows developers to enhance games with social leaderboards, achievements, game state, sign-in with Google, and more.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_games1_cli") .arg(Arg::with_name("url") diff --git a/gen/games1/Cargo.toml b/gen/games1/Cargo.toml index 8ec2de355a..b8cf89a1ae 100644 --- a/gen/games1/Cargo.toml +++ b/gen/games1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-games1" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Games (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/games1" homepage = "https://developers.google.com/games/" -documentation = "https://docs.rs/google-games1/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-games1/5.0.2+20230112" license = "MIT" keywords = ["games", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/games1/README.md b/gen/games1/README.md index ebeb2d44af..a3fe3c9951 100644 --- a/gen/games1/README.md +++ b/gen/games1/README.md @@ -5,36 +5,36 @@ DO NOT EDIT ! --> The `google-games1` library allows access to all features of the *Google Games* service. -This documentation was generated from *Games* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *games:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Games* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *games:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Games* *v1* API can be found at the [official documentation site](https://developers.google.com/games/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/Games) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-games1/5.0.2+20230112/google_games1/Games) ... -* [achievement definitions](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementDefinition) - * [*list*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementDefinitionListCall) +* [achievement definitions](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementDefinition) + * [*list*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementDefinitionListCall) * achievements - * [*increment*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementIncrementCall), [*list*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementListCall), [*reveal*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementRevealCall), [*set steps at least*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementSetStepsAtLeastCall), [*unlock*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementUnlockCall) and [*update multiple*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::AchievementUpdateMultipleCall) -* [applications](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::Application) - * [*get*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ApplicationGetCall), [*get end point*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ApplicationGetEndPointCall), [*played*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ApplicationPlayedCall) and [*verify*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ApplicationVerifyCall) + * [*increment*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementIncrementCall), [*list*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementListCall), [*reveal*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementRevealCall), [*set steps at least*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementSetStepsAtLeastCall), [*unlock*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementUnlockCall) and [*update multiple*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::AchievementUpdateMultipleCall) +* [applications](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::Application) + * [*get*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ApplicationGetCall), [*get end point*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ApplicationGetEndPointCall), [*played*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ApplicationPlayedCall) and [*verify*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ApplicationVerifyCall) * events - * [*list by player*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::EventListByPlayerCall), [*list definitions*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::EventListDefinitionCall) and [*record*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::EventRecordCall) -* [leaderboards](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::Leaderboard) - * [*get*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::LeaderboardGetCall) and [*list*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::LeaderboardListCall) + * [*list by player*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::EventListByPlayerCall), [*list definitions*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::EventListDefinitionCall) and [*record*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::EventRecordCall) +* [leaderboards](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::Leaderboard) + * [*get*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::LeaderboardGetCall) and [*list*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::LeaderboardListCall) * metagame - * [*get metagame config*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::MetagameGetMetagameConfigCall) and [*list categories by player*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::MetagameListCategoriesByPlayerCall) -* [players](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::Player) - * [*get*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::PlayerGetCall), [*get scoped player ids*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::PlayerGetScopedPlayerIdCall) and [*list*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::PlayerListCall) + * [*get metagame config*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::MetagameGetMetagameConfigCall) and [*list categories by player*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::MetagameListCategoriesByPlayerCall) +* [players](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::Player) + * [*get*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::PlayerGetCall), [*get scoped player ids*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::PlayerGetScopedPlayerIdCall) and [*list*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::PlayerListCall) * revisions - * [*check*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::RevisionCheckCall) + * [*check*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::RevisionCheckCall) * scores - * [*get*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ScoreGetCall), [*list*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ScoreListCall), [*list window*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ScoreListWindowCall), [*submit*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ScoreSubmitCall) and [*submit multiple*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::ScoreSubmitMultipleCall) -* [snapshots](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::Snapshot) - * [*get*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::SnapshotGetCall) and [*list*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::SnapshotListCall) + * [*get*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ScoreGetCall), [*list*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ScoreListCall), [*list window*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ScoreListWindowCall), [*submit*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ScoreSubmitCall) and [*submit multiple*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::ScoreSubmitMultipleCall) +* [snapshots](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::Snapshot) + * [*get*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::SnapshotGetCall) and [*list*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::SnapshotListCall) * stats - * [*get*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/api::StatGetCall) + * [*get*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/api::StatGetCall) @@ -43,17 +43,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/Games)** +* **[Hub](https://docs.rs/google-games1/5.0.2+20230112/google_games1/Games)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::CallBuilder) -* **[Resources](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::CallBuilder) +* **[Resources](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Part)** + * **[Parts](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -143,17 +143,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -163,29 +163,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Delegate) to the -[Method Builder](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Delegate) to the +[Method Builder](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::RequestValue) and -[decodable](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::RequestValue) and +[decodable](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-games1/5.0.2-beta-1+20230112/google_games1/client::RequestValue) are moved +* [request values](https://docs.rs/google-games1/5.0.2+20230112/google_games1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/games1/src/api.rs b/gen/games1/src/api.rs index 5fc73ab846..469fdea2b3 100644 --- a/gen/games1/src/api.rs +++ b/gen/games1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Games { Games { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://games.googleapis.com/".to_string(), _root_url: "https://games.googleapis.com/".to_string(), } @@ -166,7 +166,7 @@ impl<'a, S> Games { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/games1/src/client.rs b/gen/games1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/games1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/games1/src/lib.rs b/gen/games1/src/lib.rs index 40a5984cd2..7f9e518a14 100644 --- a/gen/games1/src/lib.rs +++ b/gen/games1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Games* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *games:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Games* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *games:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Games* *v1* API can be found at the //! [official documentation site](https://developers.google.com/games/). diff --git a/gen/gamesconfiguration1_configuration-cli/Cargo.toml b/gen/gamesconfiguration1_configuration-cli/Cargo.toml index e6b9fed728..4016070fa0 100644 --- a/gen/gamesconfiguration1_configuration-cli/Cargo.toml +++ b/gen/gamesconfiguration1_configuration-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gamesconfiguration1_configuration-cli" -version = "4.0.1+20220217" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Games Configuration (protocol v1configuration)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gamesconfiguration1_configuration-cli" @@ -20,13 +20,13 @@ name = "gamesconfiguration1-configuration" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gamesconfiguration1_configuration] path = "../gamesconfiguration1_configuration" -version = "4.0.1+20220217" +version = "5.0.2+20230112" + diff --git a/gen/gamesconfiguration1_configuration-cli/README.md b/gen/gamesconfiguration1_configuration-cli/README.md index 8334ed4f29..752ecb10f3 100644 --- a/gen/gamesconfiguration1_configuration-cli/README.md +++ b/gen/gamesconfiguration1_configuration-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Games Configuration* API at revision *20220217*. The CLI is at version *4.0.1*. +This documentation was generated from the *Games Configuration* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash gamesconfiguration1-configuration [options] @@ -35,8 +35,6 @@ gamesconfiguration1-configuration [options] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] update (-r )... [-p ]... [-o ] - image-configurations - upload (-u simple -f [-m ]) [-p ]... [-o ] leaderboard-configurations delete [-p ]... get [-p ]... [-o ] diff --git a/gen/gamesconfiguration1_configuration-cli/mkdocs.yml b/gen/gamesconfiguration1_configuration-cli/mkdocs.yml index c40b182c7f..2fd524041e 100644 --- a/gen/gamesconfiguration1_configuration-cli/mkdocs.yml +++ b/gen/gamesconfiguration1_configuration-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Games Configuration v4.0.1+20220217 +site_name: Games Configuration v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-gamesconfiguration1_configuration-cli site_description: A complete library to interact with Games Configuration (protocol v1configuration) @@ -7,19 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gamesconfigurati docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['achievement-configurations_delete.md', 'Achievement Configurations', 'Delete'] -- ['achievement-configurations_get.md', 'Achievement Configurations', 'Get'] -- ['achievement-configurations_insert.md', 'Achievement Configurations', 'Insert'] -- ['achievement-configurations_list.md', 'Achievement Configurations', 'List'] -- ['achievement-configurations_update.md', 'Achievement Configurations', 'Update'] -- ['image-configurations_upload.md', 'Image Configurations', 'Upload'] -- ['leaderboard-configurations_delete.md', 'Leaderboard Configurations', 'Delete'] -- ['leaderboard-configurations_get.md', 'Leaderboard Configurations', 'Get'] -- ['leaderboard-configurations_insert.md', 'Leaderboard Configurations', 'Insert'] -- ['leaderboard-configurations_list.md', 'Leaderboard Configurations', 'List'] -- ['leaderboard-configurations_update.md', 'Leaderboard Configurations', 'Update'] +nav: +- Home: 'index.md' +- 'Achievement Configurations': + - 'Delete': 'achievement-configurations_delete.md' + - 'Get': 'achievement-configurations_get.md' + - 'Insert': 'achievement-configurations_insert.md' + - 'List': 'achievement-configurations_list.md' + - 'Update': 'achievement-configurations_update.md' +- 'Leaderboard Configurations': + - 'Delete': 'leaderboard-configurations_delete.md' + - 'Get': 'leaderboard-configurations_get.md' + - 'Insert': 'leaderboard-configurations_insert.md' + - 'List': 'leaderboard-configurations_list.md' + - 'Update': 'leaderboard-configurations_update.md' theme: readthedocs diff --git a/gen/gamesconfiguration1_configuration-cli/src/client.rs b/gen/gamesconfiguration1_configuration-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gamesconfiguration1_configuration-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gamesconfiguration1_configuration-cli/src/main.rs b/gen/gamesconfiguration1_configuration-cli/src/main.rs index 6b5f25a752..6c0e8d7941 100644 --- a/gen/gamesconfiguration1_configuration-cli/src/main.rs +++ b/gen/gamesconfiguration1_configuration-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gamesconfiguration1_configuration::{api, Error, oauth2}; +use google_gamesconfiguration1_configuration::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -259,7 +258,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -410,61 +409,6 @@ where } } - async fn _image_configurations_upload(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.image_configurations().upload(opt.value_of("resource-id").unwrap_or(""), opt.value_of("image-type").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let vals = opt.values_of("mode").unwrap().collect::>(); - let protocol = calltype_from_str(vals[0], ["simple"].iter().map(|&v| v.to_string()).collect(), err); - let mut input_file = input_file_from_opts(vals[1], err); - let mime_type = input_mime_from_opts(opt.value_of("mime").unwrap_or("application/octet-stream"), err); - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Upload(UploadProtocol::Simple) => call.upload(input_file.unwrap(), mime_type.unwrap()).await, - CallType::Standard => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _leaderboard_configurations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.leaderboard_configurations().delete(opt.value_of("leaderboard-id").unwrap_or("")); @@ -687,7 +631,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -880,17 +824,6 @@ where } } }, - ("image-configurations", Some(opt)) => { - match opt.subcommand() { - ("upload", Some(opt)) => { - call_result = self._image_configurations_upload(opt, dry_run, &mut err).await; - }, - _ => { - err.issues.push(CLIError::MissingMethodError("image-configurations".to_string())); - writeln!(io::stderr(), "{}\n", opt.usage()).ok(); - } - } - }, ("leaderboard-configurations", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { @@ -986,7 +919,6 @@ where #[tokio::main] async fn main() { let mut exit_status = 0i32; - let upload_value_names = ["mode", "file"]; let arg_data = [ ("achievement-configurations", "methods: 'delete', 'get', 'insert', 'list' and 'update'", vec![ ("delete", @@ -1107,43 +1039,6 @@ async fn main() { ]), ]), - ("image-configurations", "methods: 'upload'", vec![ - ("upload", - Some(r##"Uploads an image for a resource with the given ID and image type."##), - "Details at http://byron.github.io/google-apis-rs/google_gamesconfiguration1_configuration_cli/image-configurations_upload", - vec![ - (Some(r##"resource-id"##), - None, - Some(r##"The ID of the resource used by this method."##), - Some(true), - Some(false)), - - (Some(r##"image-type"##), - None, - Some(r##"Selects which image in a resource for this method."##), - Some(true), - Some(false)), - - (Some(r##"mode"##), - Some(r##"u"##), - Some(r##"Specify the upload protocol (simple) and the file to upload"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ]), - ("leaderboard-configurations", "methods: 'delete', 'get', 'insert', 'list' and 'update'", vec![ ("delete", Some(r##"Delete the leaderboard configuration with the given ID."##), @@ -1267,7 +1162,7 @@ async fn main() { let mut app = App::new("gamesconfiguration1-configuration") .author("Sebastian Thiel ") - .version("4.0.1+20220217") + .version("5.0.2+20230112") .about("The Google Play Game Services Publishing API allows developers to configure their games in Game Services.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gamesconfiguration1_configuration_cli") .arg(Arg::with_name("url") @@ -1320,17 +1215,6 @@ async fn main() { if let &Some(multi) = multi { arg = arg.multiple(multi); } - if arg_name_str == "mode" { - arg = arg.number_of_values(2); - arg = arg.value_names(&upload_value_names); - - scmd = scmd.arg(Arg::with_name("mime") - .short("m") - .requires("mode") - .required(false) - .help("The file's mime time, like 'application/octet-stream'") - .takes_value(true)); - } scmd = scmd.arg(arg); } mcmd = mcmd.subcommand(scmd); diff --git a/gen/gamesconfiguration1_configuration/Cargo.toml b/gen/gamesconfiguration1_configuration/Cargo.toml index 25b6febe10..0685f4f75d 100644 --- a/gen/gamesconfiguration1_configuration/Cargo.toml +++ b/gen/gamesconfiguration1_configuration/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gamesconfiguration1_configuration" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Games Configuration (protocol v1configuration)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gamesconfiguration1_configuration" homepage = "https://developers.google.com/games/" -documentation = "https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112" license = "MIT" keywords = ["gamesConfiguration", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gamesconfiguration1_configuration/README.md b/gen/gamesconfiguration1_configuration/README.md index 76df1ba01c..72ca58a745 100644 --- a/gen/gamesconfiguration1_configuration/README.md +++ b/gen/gamesconfiguration1_configuration/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-gamesconfiguration1_configuration` library allows access to all features of the *Google Games Configuration* service. -This documentation was generated from *Games Configuration* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *gamesConfiguration:v1configuration* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Games Configuration* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *gamesConfiguration:v1configuration* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Games Configuration* *v1_configuration* API can be found at the [official documentation site](https://developers.google.com/games/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/GamesConfiguration) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/GamesConfiguration) ... -* [achievement configurations](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::AchievementConfiguration) - * [*delete*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationDeleteCall), [*get*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationGetCall), [*insert*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationInsertCall), [*list*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationListCall) and [*update*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationUpdateCall) -* [leaderboard configurations](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfiguration) - * [*delete*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationDeleteCall), [*get*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationGetCall), [*insert*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationInsertCall), [*list*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationListCall) and [*update*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationUpdateCall) +* [achievement configurations](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::AchievementConfiguration) + * [*delete*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationDeleteCall), [*get*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationGetCall), [*insert*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationInsertCall), [*list*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationListCall) and [*update*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::AchievementConfigurationUpdateCall) +* [leaderboard configurations](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfiguration) + * [*delete*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationDeleteCall), [*get*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationGetCall), [*insert*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationInsertCall), [*list*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationListCall) and [*update*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/api::LeaderboardConfigurationUpdateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/GamesConfiguration)** +* **[Hub](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/GamesConfiguration)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::CallBuilder) -* **[Resources](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::CallBuilder) +* **[Resources](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Part)** + * **[Parts](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Delegate) to the -[Method Builder](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Delegate) to the +[Method Builder](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::RequestValue) and -[decodable](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::RequestValue) and +[decodable](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2-beta-1+20230112/google_gamesconfiguration1_configuration/client::RequestValue) are moved +* [request values](https://docs.rs/google-gamesconfiguration1_configuration/5.0.2+20230112/google_gamesconfiguration1_configuration/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gamesconfiguration1_configuration/src/api.rs b/gen/gamesconfiguration1_configuration/src/api.rs index 519bdc6c60..c777854233 100644 --- a/gen/gamesconfiguration1_configuration/src/api.rs +++ b/gen/gamesconfiguration1_configuration/src/api.rs @@ -121,7 +121,7 @@ impl<'a, S> GamesConfiguration { GamesConfiguration { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://gamesconfiguration.googleapis.com/".to_string(), _root_url: "https://gamesconfiguration.googleapis.com/".to_string(), } @@ -135,7 +135,7 @@ impl<'a, S> GamesConfiguration { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gamesconfiguration1_configuration/src/client.rs b/gen/gamesconfiguration1_configuration/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gamesconfiguration1_configuration/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gamesconfiguration1_configuration/src/lib.rs b/gen/gamesconfiguration1_configuration/src/lib.rs index 5a8d7a5dd4..07686cf32d 100644 --- a/gen/gamesconfiguration1_configuration/src/lib.rs +++ b/gen/gamesconfiguration1_configuration/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Games Configuration* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *gamesConfiguration:v1configuration* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Games Configuration* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *gamesConfiguration:v1configuration* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Games Configuration* *v1_configuration* API can be found at the //! [official documentation site](https://developers.google.com/games/). diff --git a/gen/gameservices1-cli/Cargo.toml b/gen/gameservices1-cli/Cargo.toml index 6552d8ee87..a25fe86acf 100644 --- a/gen/gameservices1-cli/Cargo.toml +++ b/gen/gameservices1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gameservices1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Game Services (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gameservices1-cli" @@ -20,13 +20,13 @@ name = "gameservices1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gameservices1] path = "../gameservices1" -version = "4.0.1+20220223" +version = "5.0.2+20230105" + diff --git a/gen/gameservices1-cli/README.md b/gen/gameservices1-cli/README.md index cf14f16cdf..c8111d5a7b 100644 --- a/gen/gameservices1-cli/README.md +++ b/gen/gameservices1-cli/README.md @@ -25,47 +25,20 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Game Services* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *Game Services* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash gameservices1 [options] projects - locations-game-server-deployments-configs-create (-r )... [-p ]... [-o ] - locations-game-server-deployments-configs-delete [-p ]... [-o ] - locations-game-server-deployments-configs-get [-p ]... [-o ] - locations-game-server-deployments-configs-list [-p ]... [-o ] - locations-game-server-deployments-create (-r )... [-p ]... [-o ] - locations-game-server-deployments-delete [-p ]... [-o ] - locations-game-server-deployments-fetch-deployment-state (-r )... [-p ]... [-o ] - locations-game-server-deployments-get [-p ]... [-o ] locations-game-server-deployments-get-iam-policy [-p ]... [-o ] - locations-game-server-deployments-get-rollout [-p ]... [-o ] - locations-game-server-deployments-list [-p ]... [-o ] - locations-game-server-deployments-patch (-r )... [-p ]... [-o ] - locations-game-server-deployments-preview-rollout (-r )... [-p ]... [-o ] locations-game-server-deployments-set-iam-policy (-r )... [-p ]... [-o ] locations-game-server-deployments-test-iam-permissions (-r )... [-p ]... [-o ] - locations-game-server-deployments-update-rollout (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] - locations-realms-create (-r )... [-p ]... [-o ] - locations-realms-delete [-p ]... [-o ] - locations-realms-game-server-clusters-create (-r )... [-p ]... [-o ] - locations-realms-game-server-clusters-delete [-p ]... [-o ] - locations-realms-game-server-clusters-get [-p ]... [-o ] - locations-realms-game-server-clusters-list [-p ]... [-o ] - locations-realms-game-server-clusters-patch (-r )... [-p ]... [-o ] - locations-realms-game-server-clusters-preview-create (-r )... [-p ]... [-o ] - locations-realms-game-server-clusters-preview-delete [-p ]... [-o ] - locations-realms-game-server-clusters-preview-update (-r )... [-p ]... [-o ] - locations-realms-get [-p ]... [-o ] - locations-realms-list [-p ]... [-o ] - locations-realms-patch (-r )... [-p ]... [-o ] - locations-realms-preview-update (-r )... [-p ]... [-o ] gameservices1 --help Configuration: diff --git a/gen/gameservices1-cli/mkdocs.yml b/gen/gameservices1-cli/mkdocs.yml index 514b1f7d39..c7b712b0a1 100644 --- a/gen/gameservices1-cli/mkdocs.yml +++ b/gen/gameservices1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Game Services v4.0.1+20220223 +site_name: Game Services v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-gameservices1-cli site_description: A complete library to interact with Game Services (protocol v1) @@ -7,44 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gameservices1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-game-server-deployments-configs-create.md', 'Projects', 'Locations Game Server Deployments Configs Create'] -- ['projects_locations-game-server-deployments-configs-delete.md', 'Projects', 'Locations Game Server Deployments Configs Delete'] -- ['projects_locations-game-server-deployments-configs-get.md', 'Projects', 'Locations Game Server Deployments Configs Get'] -- ['projects_locations-game-server-deployments-configs-list.md', 'Projects', 'Locations Game Server Deployments Configs List'] -- ['projects_locations-game-server-deployments-create.md', 'Projects', 'Locations Game Server Deployments Create'] -- ['projects_locations-game-server-deployments-delete.md', 'Projects', 'Locations Game Server Deployments Delete'] -- ['projects_locations-game-server-deployments-fetch-deployment-state.md', 'Projects', 'Locations Game Server Deployments Fetch Deployment State'] -- ['projects_locations-game-server-deployments-get.md', 'Projects', 'Locations Game Server Deployments Get'] -- ['projects_locations-game-server-deployments-get-iam-policy.md', 'Projects', 'Locations Game Server Deployments Get Iam Policy'] -- ['projects_locations-game-server-deployments-get-rollout.md', 'Projects', 'Locations Game Server Deployments Get Rollout'] -- ['projects_locations-game-server-deployments-list.md', 'Projects', 'Locations Game Server Deployments List'] -- ['projects_locations-game-server-deployments-patch.md', 'Projects', 'Locations Game Server Deployments Patch'] -- ['projects_locations-game-server-deployments-preview-rollout.md', 'Projects', 'Locations Game Server Deployments Preview Rollout'] -- ['projects_locations-game-server-deployments-set-iam-policy.md', 'Projects', 'Locations Game Server Deployments Set Iam Policy'] -- ['projects_locations-game-server-deployments-test-iam-permissions.md', 'Projects', 'Locations Game Server Deployments Test Iam Permissions'] -- ['projects_locations-game-server-deployments-update-rollout.md', 'Projects', 'Locations Game Server Deployments Update Rollout'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-realms-create.md', 'Projects', 'Locations Realms Create'] -- ['projects_locations-realms-delete.md', 'Projects', 'Locations Realms Delete'] -- ['projects_locations-realms-game-server-clusters-create.md', 'Projects', 'Locations Realms Game Server Clusters Create'] -- ['projects_locations-realms-game-server-clusters-delete.md', 'Projects', 'Locations Realms Game Server Clusters Delete'] -- ['projects_locations-realms-game-server-clusters-get.md', 'Projects', 'Locations Realms Game Server Clusters Get'] -- ['projects_locations-realms-game-server-clusters-list.md', 'Projects', 'Locations Realms Game Server Clusters List'] -- ['projects_locations-realms-game-server-clusters-patch.md', 'Projects', 'Locations Realms Game Server Clusters Patch'] -- ['projects_locations-realms-game-server-clusters-preview-create.md', 'Projects', 'Locations Realms Game Server Clusters Preview Create'] -- ['projects_locations-realms-game-server-clusters-preview-delete.md', 'Projects', 'Locations Realms Game Server Clusters Preview Delete'] -- ['projects_locations-realms-game-server-clusters-preview-update.md', 'Projects', 'Locations Realms Game Server Clusters Preview Update'] -- ['projects_locations-realms-get.md', 'Projects', 'Locations Realms Get'] -- ['projects_locations-realms-list.md', 'Projects', 'Locations Realms List'] -- ['projects_locations-realms-patch.md', 'Projects', 'Locations Realms Patch'] -- ['projects_locations-realms-preview-update.md', 'Projects', 'Locations Realms Preview Update'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Game Server Deployments Get Iam Policy': 'projects_locations-game-server-deployments-get-iam-policy.md' + - 'Locations Game Server Deployments Set Iam Policy': 'projects_locations-game-server-deployments-set-iam-policy.md' + - 'Locations Game Server Deployments Test Iam Permissions': 'projects_locations-game-server-deployments-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/gameservices1-cli/src/client.rs b/gen/gameservices1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gameservices1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gameservices1-cli/src/main.rs b/gen/gameservices1-cli/src/main.rs index ce5d931fb5..3435bb8a02 100644 --- a/gen/gameservices1-cli/src/main.rs +++ b/gen/gameservices1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gameservices1::{api, Error, oauth2}; +use google_gameservices1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,550 +50,6 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { - async fn _projects_locations_game_server_deployments_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "labels", "name", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerConfig = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_game_server_deployments_configs_create(request, opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "config-id" => { - call = call.config_id(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["config-id"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_game_server_deployments_configs_delete(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_game_server_deployments_configs_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_game_server_deployments_configs_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "order-by" => { - call = call.order_by(value.unwrap_or("")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "labels", "name", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerDeployment = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_game_server_deployments_create(request, opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "deployment-id" => { - call = call.deployment_id(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["deployment-id"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_game_server_deployments_delete(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_fetch_deployment_state(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::FetchDeploymentStateRequest = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_game_server_deployments_fetch_deployment_state(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_game_server_deployments_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_game_server_deployments_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_game_server_deployments_get_iam_policy(opt.value_of("resource").unwrap_or("")); @@ -602,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -651,313 +106,6 @@ where } } - async fn _projects_locations_game_server_deployments_get_rollout(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_game_server_deployments_get_rollout(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_game_server_deployments_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "order-by" => { - call = call.order_by(value.unwrap_or("")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "labels", "name", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerDeployment = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_game_server_deployments_patch(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["update-mask"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_game_server_deployments_preview_rollout(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "default-game-server-config" => Some(("defaultGameServerConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "default-game-server-config", "etag", "name", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerDeploymentRollout = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_game_server_deployments_preview_rollout(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - "preview-time" => { - call = call.preview_time(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["preview-time", "update-mask"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_game_server_deployments_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1130,99 +278,6 @@ where } } - async fn _projects_locations_game_server_deployments_update_rollout(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "default-game-server-config" => Some(("defaultGameServerConfig", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "default-game-server-config", "etag", "name", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerDeploymentRollout = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_game_server_deployments_update_rollout(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["update-mask"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or("")); @@ -1285,10 +340,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "include-unrevealed-locations" => { - call = call.include_unrevealed_locations(arg_from_str(value.unwrap_or("false"), err, "include-unrevealed-locations", "boolean")); + call = call.include_unrevealed_locations( value.map(|v| arg_from_str(v, err, "include-unrevealed-locations", "boolean")).unwrap_or(false)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1538,7 +593,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1590,1112 +645,6 @@ where } } - async fn _projects_locations_realms_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "labels", "name", "time-zone", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::Realm = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_realms_create(request, opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "realm-id" => { - call = call.realm_id(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["realm-id"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_realms_delete(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "cluster-state.agones-version-installed" => Some(("clusterState.agonesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.agones-version-targeted" => Some(("clusterState.agonesVersionTargeted", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.installation-state" => Some(("clusterState.installationState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.kubernetes-version-installed" => Some(("clusterState.kubernetesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.provider" => Some(("clusterState.provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.version-installed-error-message" => Some(("clusterState.versionInstalledErrorMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.gke-cluster-reference.cluster" => Some(("connectionInfo.gkeClusterReference.cluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.namespace" => Some(("connectionInfo.namespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agones-version-installed", "agones-version-targeted", "cluster", "cluster-state", "connection-info", "create-time", "description", "etag", "gke-cluster-reference", "installation-state", "kubernetes-version-installed", "labels", "name", "namespace", "provider", "update-time", "version-installed-error-message"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerCluster = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_realms_game_server_clusters_create(request, opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "game-server-cluster-id" => { - call = call.game_server_cluster_id(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["game-server-cluster-id"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_realms_game_server_clusters_delete(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_realms_game_server_clusters_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "view" => { - call = call.view(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["view"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_realms_game_server_clusters_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "view" => { - call = call.view(value.unwrap_or("")); - }, - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "order-by" => { - call = call.order_by(value.unwrap_or("")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "order-by", "page-size", "page-token", "view"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "cluster-state.agones-version-installed" => Some(("clusterState.agonesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.agones-version-targeted" => Some(("clusterState.agonesVersionTargeted", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.installation-state" => Some(("clusterState.installationState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.kubernetes-version-installed" => Some(("clusterState.kubernetesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.provider" => Some(("clusterState.provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.version-installed-error-message" => Some(("clusterState.versionInstalledErrorMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.gke-cluster-reference.cluster" => Some(("connectionInfo.gkeClusterReference.cluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.namespace" => Some(("connectionInfo.namespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agones-version-installed", "agones-version-targeted", "cluster", "cluster-state", "connection-info", "create-time", "description", "etag", "gke-cluster-reference", "installation-state", "kubernetes-version-installed", "labels", "name", "namespace", "provider", "update-time", "version-installed-error-message"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerCluster = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_realms_game_server_clusters_patch(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["update-mask"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_preview_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "cluster-state.agones-version-installed" => Some(("clusterState.agonesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.agones-version-targeted" => Some(("clusterState.agonesVersionTargeted", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.installation-state" => Some(("clusterState.installationState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.kubernetes-version-installed" => Some(("clusterState.kubernetesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.provider" => Some(("clusterState.provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.version-installed-error-message" => Some(("clusterState.versionInstalledErrorMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.gke-cluster-reference.cluster" => Some(("connectionInfo.gkeClusterReference.cluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.namespace" => Some(("connectionInfo.namespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agones-version-installed", "agones-version-targeted", "cluster", "cluster-state", "connection-info", "create-time", "description", "etag", "gke-cluster-reference", "installation-state", "kubernetes-version-installed", "labels", "name", "namespace", "provider", "update-time", "version-installed-error-message"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerCluster = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_realms_game_server_clusters_preview_create(request, opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "view" => { - call = call.view(value.unwrap_or("")); - }, - "preview-time" => { - call = call.preview_time(value.unwrap_or("")); - }, - "game-server-cluster-id" => { - call = call.game_server_cluster_id(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["game-server-cluster-id", "preview-time", "view"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_preview_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_realms_game_server_clusters_preview_delete(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "preview-time" => { - call = call.preview_time(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["preview-time"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_game_server_clusters_preview_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "cluster-state.agones-version-installed" => Some(("clusterState.agonesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.agones-version-targeted" => Some(("clusterState.agonesVersionTargeted", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.installation-state" => Some(("clusterState.installationState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.kubernetes-version-installed" => Some(("clusterState.kubernetesVersionInstalled", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.provider" => Some(("clusterState.provider", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "cluster-state.version-installed-error-message" => Some(("clusterState.versionInstalledErrorMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.gke-cluster-reference.cluster" => Some(("connectionInfo.gkeClusterReference.cluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "connection-info.namespace" => Some(("connectionInfo.namespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["agones-version-installed", "agones-version-targeted", "cluster", "cluster-state", "connection-info", "create-time", "description", "etag", "gke-cluster-reference", "installation-state", "kubernetes-version-installed", "labels", "name", "namespace", "provider", "update-time", "version-installed-error-message"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GameServerCluster = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_realms_game_server_clusters_preview_update(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - "preview-time" => { - call = call.preview_time(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["preview-time", "update-mask"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_realms_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_realms_list(opt.value_of("parent").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "page-token" => { - call = call.page_token(value.unwrap_or("")); - }, - "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "order-by" => { - call = call.order_by(value.unwrap_or("")); - }, - "filter" => { - call = call.filter(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "labels", "name", "time-zone", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::Realm = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_realms_patch(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["update-mask"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _projects_locations_realms_preview_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "etag", "labels", "name", "time-zone", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::Realm = json::value::from_value(object).unwrap(); - let mut call = self.hub.projects().locations_realms_preview_update(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - "preview-time" => { - call = call.preview_time(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["preview-time", "update-mask"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -2703,54 +652,15 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { - ("locations-game-server-deployments-configs-create", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_configs_create(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-configs-delete", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_configs_delete(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-configs-get", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_configs_get(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-configs-list", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_configs_list(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-create", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_create(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-delete", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_delete(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-fetch-deployment-state", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_fetch_deployment_state(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-get", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_get(opt, dry_run, &mut err).await; - }, ("locations-game-server-deployments-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_game_server_deployments_get_iam_policy(opt, dry_run, &mut err).await; }, - ("locations-game-server-deployments-get-rollout", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_get_rollout(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-list", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_list(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-patch", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_patch(opt, dry_run, &mut err).await; - }, - ("locations-game-server-deployments-preview-rollout", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_preview_rollout(opt, dry_run, &mut err).await; - }, ("locations-game-server-deployments-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_game_server_deployments_set_iam_policy(opt, dry_run, &mut err).await; }, ("locations-game-server-deployments-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_game_server_deployments_test_iam_permissions(opt, dry_run, &mut err).await; }, - ("locations-game-server-deployments-update-rollout", Some(opt)) => { - call_result = self._projects_locations_game_server_deployments_update_rollout(opt, dry_run, &mut err).await; - }, ("locations-get", Some(opt)) => { call_result = self._projects_locations_get(opt, dry_run, &mut err).await; }, @@ -2769,48 +679,6 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, - ("locations-realms-create", Some(opt)) => { - call_result = self._projects_locations_realms_create(opt, dry_run, &mut err).await; - }, - ("locations-realms-delete", Some(opt)) => { - call_result = self._projects_locations_realms_delete(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-create", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_create(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-delete", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_delete(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-get", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_get(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-list", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_list(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-patch", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_patch(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-preview-create", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_preview_create(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-preview-delete", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_preview_delete(opt, dry_run, &mut err).await; - }, - ("locations-realms-game-server-clusters-preview-update", Some(opt)) => { - call_result = self._projects_locations_realms_game_server_clusters_preview_update(opt, dry_run, &mut err).await; - }, - ("locations-realms-get", Some(opt)) => { - call_result = self._projects_locations_realms_get(opt, dry_run, &mut err).await; - }, - ("locations-realms-list", Some(opt)) => { - call_result = self._projects_locations_realms_list(opt, dry_run, &mut err).await; - }, - ("locations-realms-patch", Some(opt)) => { - call_result = self._projects_locations_realms_patch(opt, dry_run, &mut err).await; - }, - ("locations-realms-preview-update", Some(opt)) => { - call_result = self._projects_locations_realms_preview_update(opt, dry_run, &mut err).await; - }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2890,208 +758,14 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-game-server-deployments-configs-create', 'locations-game-server-deployments-configs-delete', 'locations-game-server-deployments-configs-get', 'locations-game-server-deployments-configs-list', 'locations-game-server-deployments-create', 'locations-game-server-deployments-delete', 'locations-game-server-deployments-fetch-deployment-state', 'locations-game-server-deployments-get', 'locations-game-server-deployments-get-iam-policy', 'locations-game-server-deployments-get-rollout', 'locations-game-server-deployments-list', 'locations-game-server-deployments-patch', 'locations-game-server-deployments-preview-rollout', 'locations-game-server-deployments-set-iam-policy', 'locations-game-server-deployments-test-iam-permissions', 'locations-game-server-deployments-update-rollout', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-realms-create', 'locations-realms-delete', 'locations-realms-game-server-clusters-create', 'locations-realms-game-server-clusters-delete', 'locations-realms-game-server-clusters-get', 'locations-realms-game-server-clusters-list', 'locations-realms-game-server-clusters-patch', 'locations-realms-game-server-clusters-preview-create', 'locations-realms-game-server-clusters-preview-delete', 'locations-realms-game-server-clusters-preview-update', 'locations-realms-get', 'locations-realms-list', 'locations-realms-patch' and 'locations-realms-preview-update'", vec![ - ("locations-game-server-deployments-configs-create", - Some(r##"Creates a new game server config in a given project, location, and game server deployment. Game server configs are immutable, and are not applied until referenced in the game server deployment rollout resource."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-configs-create", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}/`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-configs-delete", - Some(r##"Deletes a single game server config. The deletion fails if the game server config is referenced in a game server deployment rollout."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-configs-delete", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server config to delete, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}/configs/{configId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-configs-get", - Some(r##"Gets details of a single game server config."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-configs-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server config to retrieve, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}/configs/{configId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-configs-list", - Some(r##"Lists game server configs in a given project, location, and game server deployment."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-configs-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}/configs/*`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-create", - Some(r##"Creates a new game server deployment in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-create", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-delete", - Some(r##"Deletes a single game server deployment."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-delete", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server deployment to delete, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-fetch-deployment-state", - Some(r##"Retrieves information about the current state of the game server deployment. Gathers all the Agones fleets and Agones autoscalers, including fleets running an older version of the game server deployment."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-fetch-deployment-state", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server deployment, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-get", - Some(r##"Gets details of a single game server deployment."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server deployment to retrieve, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), + ("projects", "methods: 'locations-game-server-deployments-get-iam-policy', 'locations-game-server-deployments-set-iam-policy', 'locations-game-server-deployments-test-iam-permissions', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ ("locations-game-server-deployments-get-iam-policy", Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3101,106 +775,6 @@ async fn main() { Some(false), Some(true)), - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-get-rollout", - Some(r##"Gets details of a single game server deployment rollout."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-get-rollout", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server deployment rollout to retrieve, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}/rollout`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-list", - Some(r##"Lists game server deployments in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-patch", - Some(r##"Patches a game server deployment."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-patch", - vec![ - (Some(r##"name"##), - None, - Some(r##"The resource name of the game server deployment, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}`. For example, `projects/my-project/locations/global/gameServerDeployments/my-deployment`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-preview-rollout", - Some(r##"Previews the game server deployment rollout. This API does not mutate the rollout resource."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-preview-rollout", - vec![ - (Some(r##"name"##), - None, - Some(r##"The resource name of the game server deployment rollout, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}/rollout`. For example, `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3213,7 +787,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3241,35 +815,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-game-server-deployments-update-rollout", - Some(r##"Patches a single game server deployment rollout. The method will not return an error if the update does not affect any existing realms. For example, the following cases will not return an error: * The default_game_server_config is changed but all existing realms use the override. * A non-existing realm is explicitly called out in the game_server_config_overrides field."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-game-server-deployments-update-rollout", - vec![ - (Some(r##"name"##), - None, - Some(r##"The resource name of the game server deployment rollout, in the following form: `projects/{project}/locations/{locationId}/gameServerDeployments/{deploymentId}/rollout`. For example, `projects/my-project/locations/global/gameServerDeployments/my-deployment/rollout`."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3423,356 +969,6 @@ async fn main() { Some(false), Some(true)), - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-create", - Some(r##"Creates a new realm in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-create", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-delete", - Some(r##"Deletes a single realm."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-delete", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the realm to delete, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-create", - Some(r##"Creates a new game server cluster in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-create", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-delete", - Some(r##"Deletes a single game server cluster."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-delete", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server cluster to delete, in the following form: `projects/{project}/locations/{locationId}/gameServerClusters/{gameServerClusterId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-get", - Some(r##"Gets details of a single game server cluster."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server cluster to retrieve, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}/gameServerClusters/{gameServerClusterId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-list", - Some(r##"Lists game server clusters in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-patch", - Some(r##"Patches a single game server cluster."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-patch", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The resource name of the game server cluster, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}/gameServerClusters/{gameServerClusterId}`. For example, `projects/my-project/locations/global/realms/zanzibar/gameServerClusters/my-gke-cluster`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-preview-create", - Some(r##"Previews creation of a new game server cluster in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-preview-create", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-preview-delete", - Some(r##"Previews deletion of a single game server cluster."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-preview-delete", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the game server cluster to delete, in the following form: `projects/{project}/locations/{locationId}/gameServerClusters/{gameServerClusterId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-game-server-clusters-preview-update", - Some(r##"Previews updating a GameServerCluster."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-game-server-clusters-preview-update", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The resource name of the game server cluster, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}/gameServerClusters/{gameServerClusterId}`. For example, `projects/my-project/locations/global/realms/zanzibar/gameServerClusters/my-gke-cluster`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-get", - Some(r##"Gets details of a single realm."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The name of the realm to retrieve, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-list", - Some(r##"Lists realms in a given project and location."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-list", - vec![ - (Some(r##"parent"##), - None, - Some(r##"Required. The parent resource name, in the following form: `projects/{project}/locations/{locationId}`."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-patch", - Some(r##"Patches a single realm."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-patch", - vec![ - (Some(r##"name"##), - None, - Some(r##"The resource name of the realm, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}`. For example, `projects/my-project/locations/global/realms/my-realm`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("locations-realms-preview-update", - Some(r##"Previews patches to a single realm."##), - "Details at http://byron.github.io/google-apis-rs/google_gameservices1_cli/projects_locations-realms-preview-update", - vec![ - (Some(r##"name"##), - None, - Some(r##"The resource name of the realm, in the following form: `projects/{project}/locations/{locationId}/realms/{realmId}`. For example, `projects/my-project/locations/global/realms/my-realm`."##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3785,7 +981,7 @@ async fn main() { let mut app = App::new("gameservices1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20230105") .about("Deploy and manage infrastructure for global multiplayer gaming experiences.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gameservices1_cli") .arg(Arg::with_name("url") diff --git a/gen/gameservices1/Cargo.toml b/gen/gameservices1/Cargo.toml index d003b4fcab..40fe5e1ab9 100644 --- a/gen/gameservices1/Cargo.toml +++ b/gen/gameservices1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gameservices1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Game Services (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gameservices1" homepage = "https://cloud.google.com/solutions/gaming/" -documentation = "https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-gameservices1/5.0.2+20230105" license = "MIT" keywords = ["gameservices", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gameservices1/README.md b/gen/gameservices1/README.md index 354c0a54ac..5f16dcb839 100644 --- a/gen/gameservices1/README.md +++ b/gen/gameservices1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-gameservices1` library allows access to all features of the *Google Game Services* service. -This documentation was generated from *Game Services* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *gameservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Game Services* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *gameservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Game Services* *v1* API can be found at the [official documentation site](https://cloud.google.com/solutions/gaming/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/GameServices) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/GameServices) ... * projects - * [*locations game server deployments get iam policy*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationGameServerDeploymentGetIamPolicyCall), [*locations game server deployments set iam policy*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationGameServerDeploymentSetIamPolicyCall), [*locations game server deployments test iam permissions*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationGameServerDeploymentTestIamPermissionCall), [*locations get*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/api::ProjectLocationOperationListCall) + * [*locations game server deployments get iam policy*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationGameServerDeploymentGetIamPolicyCall), [*locations game server deployments set iam policy*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationGameServerDeploymentSetIamPolicyCall), [*locations game server deployments test iam permissions*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationGameServerDeploymentTestIamPermissionCall), [*locations get*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/GameServices)** +* **[Hub](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/GameServices)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::CallBuilder) -* **[Resources](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::CallBuilder) +* **[Resources](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Part)** + * **[Parts](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Delegate) to the -[Method Builder](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Delegate) to the +[Method Builder](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::RequestValue) and -[decodable](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::RequestValue) and +[decodable](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gameservices1/5.0.2-beta-1+20230105/google_gameservices1/client::RequestValue) are moved +* [request values](https://docs.rs/google-gameservices1/5.0.2+20230105/google_gameservices1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gameservices1/src/api.rs b/gen/gameservices1/src/api.rs index 000a3b9e64..c416b42469 100644 --- a/gen/gameservices1/src/api.rs +++ b/gen/gameservices1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> GameServices { GameServices { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://gameservices.googleapis.com/".to_string(), _root_url: "https://gameservices.googleapis.com/".to_string(), } @@ -131,7 +131,7 @@ impl<'a, S> GameServices { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gameservices1/src/client.rs b/gen/gameservices1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gameservices1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gameservices1/src/lib.rs b/gen/gameservices1/src/lib.rs index 4e411b6c53..f3ebed6937 100644 --- a/gen/gameservices1/src/lib.rs +++ b/gen/gameservices1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Game Services* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *gameservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Game Services* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *gameservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Game Services* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/solutions/gaming/). diff --git a/gen/gamesmanagement1_management-cli/Cargo.toml b/gen/gamesmanagement1_management-cli/Cargo.toml index 1f66267b39..3ce3e03b4b 100644 --- a/gen/gamesmanagement1_management-cli/Cargo.toml +++ b/gen/gamesmanagement1_management-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gamesmanagement1_management-cli" -version = "4.0.1+20220217" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Games Management (protocol v1management)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gamesmanagement1_management-cli" @@ -20,13 +20,13 @@ name = "gamesmanagement1-management" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gamesmanagement1_management] path = "../gamesmanagement1_management" -version = "4.0.1+20220217" +version = "5.0.2+20230112" + diff --git a/gen/gamesmanagement1_management-cli/README.md b/gen/gamesmanagement1_management-cli/README.md index 03676d2c95..80dc7aef33 100644 --- a/gen/gamesmanagement1_management-cli/README.md +++ b/gen/gamesmanagement1_management-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Games Management* API at revision *20220217*. The CLI is at version *4.0.1*. +This documentation was generated from the *Games Management* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash gamesmanagement1-management [options] diff --git a/gen/gamesmanagement1_management-cli/mkdocs.yml b/gen/gamesmanagement1_management-cli/mkdocs.yml index 7d56e83899..313ad221c6 100644 --- a/gen/gamesmanagement1_management-cli/mkdocs.yml +++ b/gen/gamesmanagement1_management-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Games Management v4.0.1+20220217 +site_name: Games Management v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-gamesmanagement1_management-cli site_description: A complete library to interact with Games Management (protocol v1management) @@ -7,26 +7,31 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gamesmanagement1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['achievements_reset.md', 'Achievements', 'Reset'] -- ['achievements_reset-all.md', 'Achievements', 'Reset All'] -- ['achievements_reset-all-for-all-players.md', 'Achievements', 'Reset All For All Players'] -- ['achievements_reset-for-all-players.md', 'Achievements', 'Reset For All Players'] -- ['achievements_reset-multiple-for-all-players.md', 'Achievements', 'Reset Multiple For All Players'] -- ['applications_list-hidden.md', 'Applications', 'List Hidden'] -- ['events_reset.md', 'Events', 'Reset'] -- ['events_reset-all.md', 'Events', 'Reset All'] -- ['events_reset-all-for-all-players.md', 'Events', 'Reset All For All Players'] -- ['events_reset-for-all-players.md', 'Events', 'Reset For All Players'] -- ['events_reset-multiple-for-all-players.md', 'Events', 'Reset Multiple For All Players'] -- ['players_hide.md', 'Players', 'Hide'] -- ['players_unhide.md', 'Players', 'Unhide'] -- ['scores_reset.md', 'Scores', 'Reset'] -- ['scores_reset-all.md', 'Scores', 'Reset All'] -- ['scores_reset-all-for-all-players.md', 'Scores', 'Reset All For All Players'] -- ['scores_reset-for-all-players.md', 'Scores', 'Reset For All Players'] -- ['scores_reset-multiple-for-all-players.md', 'Scores', 'Reset Multiple For All Players'] +nav: +- Home: 'index.md' +- 'Achievements': + - 'Reset': 'achievements_reset.md' + - 'Reset All': 'achievements_reset-all.md' + - 'Reset All For All Players': 'achievements_reset-all-for-all-players.md' + - 'Reset For All Players': 'achievements_reset-for-all-players.md' + - 'Reset Multiple For All Players': 'achievements_reset-multiple-for-all-players.md' +- 'Applications': + - 'List Hidden': 'applications_list-hidden.md' +- 'Events': + - 'Reset': 'events_reset.md' + - 'Reset All': 'events_reset-all.md' + - 'Reset All For All Players': 'events_reset-all-for-all-players.md' + - 'Reset For All Players': 'events_reset-for-all-players.md' + - 'Reset Multiple For All Players': 'events_reset-multiple-for-all-players.md' +- 'Players': + - 'Hide': 'players_hide.md' + - 'Unhide': 'players_unhide.md' +- 'Scores': + - 'Reset': 'scores_reset.md' + - 'Reset All': 'scores_reset-all.md' + - 'Reset All For All Players': 'scores_reset-all-for-all-players.md' + - 'Reset For All Players': 'scores_reset-for-all-players.md' + - 'Reset Multiple For All Players': 'scores_reset-multiple-for-all-players.md' theme: readthedocs diff --git a/gen/gamesmanagement1_management-cli/src/client.rs b/gen/gamesmanagement1_management-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gamesmanagement1_management-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gamesmanagement1_management-cli/src/main.rs b/gen/gamesmanagement1_management-cli/src/main.rs index 2b911ed214..9f7c6e3dee 100644 --- a/gen/gamesmanagement1_management-cli/src/main.rs +++ b/gen/gamesmanagement1_management-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gamesmanagement1_management::{api, Error, oauth2}; +use google_gamesmanagement1_management::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -331,7 +330,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1477,7 +1476,7 @@ async fn main() { let mut app = App::new("gamesmanagement1-management") .author("Sebastian Thiel ") - .version("4.0.1+20220217") + .version("5.0.2+20230112") .about("The Google Play Game Management API allows developers to manage resources from the Google Play Game service.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gamesmanagement1_management_cli") .arg(Arg::with_name("url") diff --git a/gen/gamesmanagement1_management/Cargo.toml b/gen/gamesmanagement1_management/Cargo.toml index c199b863ff..8d80ccb7b6 100644 --- a/gen/gamesmanagement1_management/Cargo.toml +++ b/gen/gamesmanagement1_management/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gamesmanagement1_management" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Games Management (protocol v1management)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gamesmanagement1_management" homepage = "https://developers.google.com/games/" -documentation = "https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112" license = "MIT" keywords = ["gamesManagement", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gamesmanagement1_management/README.md b/gen/gamesmanagement1_management/README.md index e1897520bd..ad3123295d 100644 --- a/gen/gamesmanagement1_management/README.md +++ b/gen/gamesmanagement1_management/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-gamesmanagement1_management` library allows access to all features of the *Google Games Management* service. -This documentation was generated from *Games Management* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *gamesManagement:v1management* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Games Management* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *gamesManagement:v1management* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Games Management* *v1_management* API can be found at the [official documentation site](https://developers.google.com/games/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/GamesManagement) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/GamesManagement) ... * achievements - * [*reset*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::AchievementResetCall), [*reset all*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::AchievementResetAllCall), [*reset all for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::AchievementResetAllForAllPlayerCall), [*reset for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::AchievementResetForAllPlayerCall) and [*reset multiple for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::AchievementResetMultipleForAllPlayerCall) + * [*reset*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::AchievementResetCall), [*reset all*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::AchievementResetAllCall), [*reset all for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::AchievementResetAllForAllPlayerCall), [*reset for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::AchievementResetForAllPlayerCall) and [*reset multiple for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::AchievementResetMultipleForAllPlayerCall) * applications - * [*list hidden*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::ApplicationListHiddenCall) + * [*list hidden*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::ApplicationListHiddenCall) * events - * [*reset*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::EventResetCall), [*reset all*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::EventResetAllCall), [*reset all for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::EventResetAllForAllPlayerCall), [*reset for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::EventResetForAllPlayerCall) and [*reset multiple for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::EventResetMultipleForAllPlayerCall) -* [players](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::Player) - * [*hide*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::PlayerHideCall) and [*unhide*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::PlayerUnhideCall) + * [*reset*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::EventResetCall), [*reset all*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::EventResetAllCall), [*reset all for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::EventResetAllForAllPlayerCall), [*reset for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::EventResetForAllPlayerCall) and [*reset multiple for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::EventResetMultipleForAllPlayerCall) +* [players](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::Player) + * [*hide*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::PlayerHideCall) and [*unhide*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::PlayerUnhideCall) * scores - * [*reset*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::ScoreResetCall), [*reset all*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::ScoreResetAllCall), [*reset all for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::ScoreResetAllForAllPlayerCall), [*reset for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::ScoreResetForAllPlayerCall) and [*reset multiple for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/api::ScoreResetMultipleForAllPlayerCall) + * [*reset*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::ScoreResetCall), [*reset all*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::ScoreResetAllCall), [*reset all for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::ScoreResetAllForAllPlayerCall), [*reset for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::ScoreResetForAllPlayerCall) and [*reset multiple for all players*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/api::ScoreResetMultipleForAllPlayerCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/GamesManagement)** +* **[Hub](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/GamesManagement)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::CallBuilder) -* **[Resources](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::CallBuilder) +* **[Resources](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Part)** + * **[Parts](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Delegate) to the -[Method Builder](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Delegate) to the +[Method Builder](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::RequestValue) and -[decodable](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::RequestValue) and +[decodable](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gamesmanagement1_management/5.0.2-beta-1+20230112/google_gamesmanagement1_management/client::RequestValue) are moved +* [request values](https://docs.rs/google-gamesmanagement1_management/5.0.2+20230112/google_gamesmanagement1_management/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gamesmanagement1_management/src/api.rs b/gen/gamesmanagement1_management/src/api.rs index 81a89695e7..9b0739f796 100644 --- a/gen/gamesmanagement1_management/src/api.rs +++ b/gen/gamesmanagement1_management/src/api.rs @@ -119,7 +119,7 @@ impl<'a, S> GamesManagement { GamesManagement { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://gamesmanagement.googleapis.com/".to_string(), _root_url: "https://gamesmanagement.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> GamesManagement { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gamesmanagement1_management/src/client.rs b/gen/gamesmanagement1_management/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gamesmanagement1_management/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gamesmanagement1_management/src/lib.rs b/gen/gamesmanagement1_management/src/lib.rs index 01f816c6d0..7bd5cfb9ec 100644 --- a/gen/gamesmanagement1_management/src/lib.rs +++ b/gen/gamesmanagement1_management/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Games Management* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *gamesManagement:v1management* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Games Management* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *gamesManagement:v1management* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Games Management* *v1_management* API can be found at the //! [official documentation site](https://developers.google.com/games/). diff --git a/gen/gan1_beta1-cli/Cargo.toml b/gen/gan1_beta1-cli/Cargo.toml index 2d053bfce9..6d3e853925 100644 --- a/gen/gan1_beta1-cli/Cargo.toml +++ b/gen/gan1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gan1_beta1-cli" -version = "4.0.1+20130205" +version = "5.0.2+20130205" authors = ["Sebastian Thiel "] description = "A complete library to interact with gan (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gan1_beta1-cli" @@ -20,13 +20,13 @@ name = "gan1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gan1_beta1] path = "../gan1_beta1" -version = "4.0.1+20130205" +version = "5.0.2+20130205" + diff --git a/gen/gan1_beta1-cli/README.md b/gen/gan1_beta1-cli/README.md index d61f1fab22..181f6c78f6 100644 --- a/gen/gan1_beta1-cli/README.md +++ b/gen/gan1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *gan* API at revision *20130205*. The CLI is at version *4.0.1*. +This documentation was generated from the *gan* API at revision *20130205*. The CLI is at version *5.0.2*. ```bash gan1-beta1 [options] diff --git a/gen/gan1_beta1-cli/mkdocs.yml b/gen/gan1_beta1-cli/mkdocs.yml index eebcd501c0..bae66f5756 100644 --- a/gen/gan1_beta1-cli/mkdocs.yml +++ b/gen/gan1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: gan v4.0.1+20130205 +site_name: gan v5.0.2+20130205 site_url: http://byron.github.io/google-apis-rs/google-gan1_beta1-cli site_description: A complete library to interact with gan (protocol v1beta1) @@ -7,18 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gan1_beta1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['advertisers_get.md', 'Advertisers', 'Get'] -- ['advertisers_list.md', 'Advertisers', 'List'] -- ['cc-offers_list.md', 'Cc Offers', 'List'] -- ['events_list.md', 'Events', 'List'] -- ['links_get.md', 'Links', 'Get'] -- ['links_insert.md', 'Links', 'Insert'] -- ['links_list.md', 'Links', 'List'] -- ['publishers_get.md', 'Publishers', 'Get'] -- ['publishers_list.md', 'Publishers', 'List'] -- ['reports_get.md', 'Reports', 'Get'] +nav: +- Home: 'index.md' +- 'Advertisers': + - 'Get': 'advertisers_get.md' + - 'List': 'advertisers_list.md' +- 'Cc Offers': + - 'List': 'cc-offers_list.md' +- 'Events': + - 'List': 'events_list.md' +- 'Links': + - 'Get': 'links_get.md' + - 'Insert': 'links_insert.md' + - 'List': 'links_list.md' +- 'Publishers': + - 'Get': 'publishers_get.md' + - 'List': 'publishers_list.md' +- 'Reports': + - 'Get': 'reports_get.md' theme: readthedocs diff --git a/gen/gan1_beta1-cli/src/client.rs b/gen/gan1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gan1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gan1_beta1-cli/src/main.rs b/gen/gan1_beta1-cli/src/main.rs index cc4d0af41c..7d1686ab5b 100644 --- a/gen/gan1_beta1-cli/src/main.rs +++ b/gen/gan1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gan1_beta1::{api, Error, oauth2}; +use google_gan1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -117,16 +116,16 @@ where call = call.page_token(value.unwrap_or("")); }, "min-seven-day-epc" => { - call = call.min_seven_day_epc(arg_from_str(value.unwrap_or("0.0"), err, "min-seven-day-epc", "number")); + call = call.min_seven_day_epc( value.map(|v| arg_from_str(v, err, "min-seven-day-epc", "double")).unwrap_or(0.0)); }, "min-payout-rank" => { - call = call.min_payout_rank(arg_from_str(value.unwrap_or("-0"), err, "min-payout-rank", "integer")); + call = call.min_payout_rank( value.map(|v| arg_from_str(v, err, "min-payout-rank", "int32")).unwrap_or(-0)); }, "min-ninety-day-epc" => { - call = call.min_ninety_day_epc(arg_from_str(value.unwrap_or("0.0"), err, "min-ninety-day-epc", "number")); + call = call.min_ninety_day_epc( value.map(|v| arg_from_str(v, err, "min-ninety-day-epc", "double")).unwrap_or(0.0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "advertiser-category" => { call = call.advertiser_category(value.unwrap_or("")); @@ -268,7 +267,7 @@ where call = call.member_id(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "link-id" => { call = call.link_id(value.unwrap_or("")); @@ -518,7 +517,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "link-type" => { call = call.link_type(value.unwrap_or("")); @@ -536,7 +535,7 @@ where call = call.add_asset_size(value.unwrap_or("")); }, "advertiser-id" => { - call = call.add_advertiser_id(value.unwrap_or("")); + call = call.add_advertiser_id( value.map(|v| arg_from_str(v, err, "advertiser-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -651,16 +650,16 @@ where call = call.page_token(value.unwrap_or("")); }, "min-seven-day-epc" => { - call = call.min_seven_day_epc(arg_from_str(value.unwrap_or("0.0"), err, "min-seven-day-epc", "number")); + call = call.min_seven_day_epc( value.map(|v| arg_from_str(v, err, "min-seven-day-epc", "double")).unwrap_or(0.0)); }, "min-payout-rank" => { - call = call.min_payout_rank(arg_from_str(value.unwrap_or("-0"), err, "min-payout-rank", "integer")); + call = call.min_payout_rank( value.map(|v| arg_from_str(v, err, "min-payout-rank", "int32")).unwrap_or(-0)); }, "min-ninety-day-epc" => { - call = call.min_ninety_day_epc(arg_from_str(value.unwrap_or("0.0"), err, "min-ninety-day-epc", "number")); + call = call.min_ninety_day_epc( value.map(|v| arg_from_str(v, err, "min-ninety-day-epc", "double")).unwrap_or(0.0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -716,7 +715,7 @@ where call = call.status(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "start-date" => { call = call.start_date(value.unwrap_or("")); @@ -728,7 +727,7 @@ where call = call.add_order_id(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "link-id" => { call = call.add_link_id(value.unwrap_or("")); @@ -740,7 +739,7 @@ where call = call.end_date(value.unwrap_or("")); }, "calculate-totals" => { - call = call.calculate_totals(arg_from_str(value.unwrap_or("false"), err, "calculate-totals", "boolean")); + call = call.calculate_totals( value.map(|v| arg_from_str(v, err, "calculate-totals", "boolean")).unwrap_or(false)); }, "advertiser-id" => { call = call.add_advertiser_id(value.unwrap_or("")); @@ -1256,7 +1255,7 @@ async fn main() { let mut app = App::new("gan1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20130205") + .version("5.0.2+20130205") .about("Lets you have programmatic access to your Google Affiliate Network data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gan1_beta1_cli") .arg(Arg::with_name("folder") diff --git a/gen/gan1_beta1/Cargo.toml b/gen/gan1_beta1/Cargo.toml index 9dc432122d..fabafb1dae 100644 --- a/gen/gan1_beta1/Cargo.toml +++ b/gen/gan1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gan1_beta1" -version = "5.0.2-beta-1+20130205" +version = "5.0.2+20130205" authors = ["Sebastian Thiel "] description = "A complete library to interact with gan (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gan1_beta1" homepage = "https://developers.google.com/affiliate-network/" -documentation = "https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205" +documentation = "https://docs.rs/google-gan1_beta1/5.0.2+20130205" license = "MIT" keywords = ["gan", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gan1_beta1/README.md b/gen/gan1_beta1/README.md index 2c12d7a1a6..bdebb0a3a4 100644 --- a/gen/gan1_beta1/README.md +++ b/gen/gan1_beta1/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-gan1_beta1` library allows access to all features of the *Google gan* service. -This documentation was generated from *gan* crate version *5.0.2-beta-1+20130205*, where *20130205* is the exact revision of the *gan:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *gan* crate version *5.0.2+20130205*, where *20130205* is the exact revision of the *gan:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *gan* *v1_beta1* API can be found at the [official documentation site](https://developers.google.com/affiliate-network/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/Gan) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/Gan) ... -* [advertisers](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::Advertiser) - * [*get*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::AdvertiserGetCall) and [*list*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::AdvertiserListCall) -* [cc offers](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::CcOffer) - * [*list*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::CcOfferListCall) -* [events](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::Event) - * [*list*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::EventListCall) -* [links](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::Link) - * [*get*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::LinkGetCall), [*insert*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::LinkInsertCall) and [*list*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::LinkListCall) -* [publishers](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::Publisher) - * [*get*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::PublisherGetCall) and [*list*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::PublisherListCall) -* [reports](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::Report) - * [*get*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/api::ReportGetCall) +* [advertisers](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::Advertiser) + * [*get*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::AdvertiserGetCall) and [*list*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::AdvertiserListCall) +* [cc offers](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::CcOffer) + * [*list*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::CcOfferListCall) +* [events](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::Event) + * [*list*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::EventListCall) +* [links](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::Link) + * [*get*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::LinkGetCall), [*insert*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::LinkInsertCall) and [*list*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::LinkListCall) +* [publishers](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::Publisher) + * [*get*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::PublisherGetCall) and [*list*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::PublisherListCall) +* [reports](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::Report) + * [*get*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/api::ReportGetCall) @@ -33,17 +33,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/Gan)** +* **[Hub](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/Gan)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -143,17 +143,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -163,29 +163,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gan1_beta1/5.0.2-beta-1+20130205/google_gan1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-gan1_beta1/5.0.2+20130205/google_gan1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gan1_beta1/src/api.rs b/gen/gan1_beta1/src/api.rs index cef98d2789..6d84884acc 100644 --- a/gen/gan1_beta1/src/api.rs +++ b/gen/gan1_beta1/src/api.rs @@ -110,7 +110,7 @@ impl<'a, S> Gan { Gan { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/gan/v1beta1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Gan { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gan1_beta1/src/client.rs b/gen/gan1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gan1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gan1_beta1/src/lib.rs b/gen/gan1_beta1/src/lib.rs index 484e0458cf..4305723242 100644 --- a/gen/gan1_beta1/src/lib.rs +++ b/gen/gan1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *gan* crate version *5.0.2-beta-1+20130205*, where *20130205* is the exact revision of the *gan:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *gan* crate version *5.0.2+20130205*, where *20130205* is the exact revision of the *gan:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *gan* *v1_beta1* API can be found at the //! [official documentation site](https://developers.google.com/affiliate-network/). diff --git a/gen/genomics1-cli/Cargo.toml b/gen/genomics1-cli/Cargo.toml index 4fe109a772..1fe37c977f 100644 --- a/gen/genomics1-cli/Cargo.toml +++ b/gen/genomics1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-genomics1-cli" -version = "4.0.1+20210324" +version = "5.0.2+20210324" authors = ["Sebastian Thiel "] description = "A complete library to interact with genomics (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/genomics1-cli" @@ -20,13 +20,13 @@ name = "genomics1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-genomics1] path = "../genomics1" -version = "4.0.1+20210324" +version = "5.0.2+20210324" + diff --git a/gen/genomics1-cli/README.md b/gen/genomics1-cli/README.md index a494393576..849b0d6e05 100644 --- a/gen/genomics1-cli/README.md +++ b/gen/genomics1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *genomics* API at revision *20210324*. The CLI is at version *4.0.1*. +This documentation was generated from the *genomics* API at revision *20210324*. The CLI is at version *5.0.2*. ```bash genomics1 [options] diff --git a/gen/genomics1-cli/mkdocs.yml b/gen/genomics1-cli/mkdocs.yml index 494fd8f9c6..e4fbff4ad6 100644 --- a/gen/genomics1-cli/mkdocs.yml +++ b/gen/genomics1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: genomics v4.0.1+20210324 +site_name: genomics v5.0.2+20210324 site_url: http://byron.github.io/google-apis-rs/google-genomics1-cli site_description: A complete library to interact with genomics (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/genomics1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' theme: readthedocs diff --git a/gen/genomics1-cli/src/client.rs b/gen/genomics1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/genomics1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/genomics1-cli/src/main.rs b/gen/genomics1-cli/src/main.rs index 4c037040aa..fc1ecff048 100644 --- a/gen/genomics1-cli/src/main.rs +++ b/gen/genomics1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_genomics1::{api, Error, oauth2}; +use google_genomics1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -197,7 +196,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -423,7 +422,7 @@ async fn main() { let mut app = App::new("genomics1") .author("Sebastian Thiel ") - .version("4.0.1+20210324") + .version("5.0.2+20210324") .about("Uploads, processes, queries, and searches Genomics data in the cloud.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_genomics1_cli") .arg(Arg::with_name("url") diff --git a/gen/genomics1/Cargo.toml b/gen/genomics1/Cargo.toml index bc388fa66b..f402dc0866 100644 --- a/gen/genomics1/Cargo.toml +++ b/gen/genomics1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-genomics1" -version = "5.0.2-beta-1+20210324" +version = "5.0.2+20210324" authors = ["Sebastian Thiel "] description = "A complete library to interact with genomics (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/genomics1" homepage = "https://cloud.google.com/genomics" -documentation = "https://docs.rs/google-genomics1/5.0.2-beta-1+20210324" +documentation = "https://docs.rs/google-genomics1/5.0.2+20210324" license = "MIT" keywords = ["genomics", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/genomics1/README.md b/gen/genomics1/README.md index 9337ea8b62..a416e4dab2 100644 --- a/gen/genomics1/README.md +++ b/gen/genomics1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-genomics1` library allows access to all features of the *Google genomics* service. -This documentation was generated from *genomics* crate version *5.0.2-beta-1+20210324*, where *20210324* is the exact revision of the *genomics:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *genomics* crate version *5.0.2+20210324*, where *20210324* is the exact revision of the *genomics:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *genomics* *v1* API can be found at the [official documentation site](https://cloud.google.com/genomics). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/Genomics) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/Genomics) ... -* [operations](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/api::Operation) - * [*cancel*](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/api::OperationCancelCall), [*get*](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/api::OperationGetCall) and [*list*](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/api::OperationListCall) +* [operations](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/api::Operation) + * [*cancel*](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/api::OperationCancelCall), [*get*](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/api::OperationGetCall) and [*list*](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/api::OperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/Genomics)** +* **[Hub](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/Genomics)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::CallBuilder) -* **[Resources](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::CallBuilder) +* **[Resources](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Part)** + * **[Parts](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -123,17 +123,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -143,29 +143,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Delegate) to the -[Method Builder](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Delegate) to the +[Method Builder](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::RequestValue) and -[decodable](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::RequestValue) and +[decodable](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-genomics1/5.0.2-beta-1+20210324/google_genomics1/client::RequestValue) are moved +* [request values](https://docs.rs/google-genomics1/5.0.2+20210324/google_genomics1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/genomics1/src/api.rs b/gen/genomics1/src/api.rs index abad7028c8..f38f38ef92 100644 --- a/gen/genomics1/src/api.rs +++ b/gen/genomics1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Genomics { Genomics { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://genomics.googleapis.com/".to_string(), _root_url: "https://genomics.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Genomics { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/genomics1/src/client.rs b/gen/genomics1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/genomics1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/genomics1/src/lib.rs b/gen/genomics1/src/lib.rs index a04e67ee63..24871ab75c 100644 --- a/gen/genomics1/src/lib.rs +++ b/gen/genomics1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *genomics* crate version *5.0.2-beta-1+20210324*, where *20210324* is the exact revision of the *genomics:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *genomics* crate version *5.0.2+20210324*, where *20210324* is the exact revision of the *genomics:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *genomics* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/genomics). diff --git a/gen/gkehub1-cli/Cargo.toml b/gen/gkehub1-cli/Cargo.toml index 6bc52cad48..337a447532 100644 --- a/gen/gkehub1-cli/Cargo.toml +++ b/gen/gkehub1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gkehub1-cli" -version = "4.0.1+20220211" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with GKE Hub (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gkehub1-cli" @@ -20,13 +20,13 @@ name = "gkehub1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gkehub1] path = "../gkehub1" -version = "4.0.1+20220211" +version = "5.0.2+20230106" + diff --git a/gen/gkehub1-cli/README.md b/gen/gkehub1-cli/README.md index 0f3c15fe21..3063d06402 100644 --- a/gen/gkehub1-cli/README.md +++ b/gen/gkehub1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *GKE Hub* API at revision *20220211*. The CLI is at version *4.0.1*. +This documentation was generated from the *GKE Hub* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash gkehub1 [options] diff --git a/gen/gkehub1-cli/mkdocs.yml b/gen/gkehub1-cli/mkdocs.yml index 24816af1e2..31de5e8ec6 100644 --- a/gen/gkehub1-cli/mkdocs.yml +++ b/gen/gkehub1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: GKE Hub v4.0.1+20220211 +site_name: GKE Hub v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-gkehub1-cli site_description: A complete library to interact with GKE Hub (protocol v1) @@ -7,31 +7,32 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gkehub1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-features-create.md', 'Projects', 'Locations Features Create'] -- ['projects_locations-features-delete.md', 'Projects', 'Locations Features Delete'] -- ['projects_locations-features-get.md', 'Projects', 'Locations Features Get'] -- ['projects_locations-features-get-iam-policy.md', 'Projects', 'Locations Features Get Iam Policy'] -- ['projects_locations-features-list.md', 'Projects', 'Locations Features List'] -- ['projects_locations-features-patch.md', 'Projects', 'Locations Features Patch'] -- ['projects_locations-features-set-iam-policy.md', 'Projects', 'Locations Features Set Iam Policy'] -- ['projects_locations-features-test-iam-permissions.md', 'Projects', 'Locations Features Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-memberships-create.md', 'Projects', 'Locations Memberships Create'] -- ['projects_locations-memberships-delete.md', 'Projects', 'Locations Memberships Delete'] -- ['projects_locations-memberships-generate-connect-manifest.md', 'Projects', 'Locations Memberships Generate Connect Manifest'] -- ['projects_locations-memberships-get.md', 'Projects', 'Locations Memberships Get'] -- ['projects_locations-memberships-get-iam-policy.md', 'Projects', 'Locations Memberships Get Iam Policy'] -- ['projects_locations-memberships-list.md', 'Projects', 'Locations Memberships List'] -- ['projects_locations-memberships-patch.md', 'Projects', 'Locations Memberships Patch'] -- ['projects_locations-memberships-set-iam-policy.md', 'Projects', 'Locations Memberships Set Iam Policy'] -- ['projects_locations-memberships-test-iam-permissions.md', 'Projects', 'Locations Memberships Test Iam Permissions'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Features Create': 'projects_locations-features-create.md' + - 'Locations Features Delete': 'projects_locations-features-delete.md' + - 'Locations Features Get': 'projects_locations-features-get.md' + - 'Locations Features Get Iam Policy': 'projects_locations-features-get-iam-policy.md' + - 'Locations Features List': 'projects_locations-features-list.md' + - 'Locations Features Patch': 'projects_locations-features-patch.md' + - 'Locations Features Set Iam Policy': 'projects_locations-features-set-iam-policy.md' + - 'Locations Features Test Iam Permissions': 'projects_locations-features-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Memberships Create': 'projects_locations-memberships-create.md' + - 'Locations Memberships Delete': 'projects_locations-memberships-delete.md' + - 'Locations Memberships Generate Connect Manifest': 'projects_locations-memberships-generate-connect-manifest.md' + - 'Locations Memberships Get': 'projects_locations-memberships-get.md' + - 'Locations Memberships Get Iam Policy': 'projects_locations-memberships-get-iam-policy.md' + - 'Locations Memberships List': 'projects_locations-memberships-list.md' + - 'Locations Memberships Patch': 'projects_locations-memberships-patch.md' + - 'Locations Memberships Set Iam Policy': 'projects_locations-memberships-set-iam-policy.md' + - 'Locations Memberships Test Iam Permissions': 'projects_locations-memberships-test-iam-permissions.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/gkehub1-cli/src/client.rs b/gen/gkehub1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gkehub1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gkehub1-cli/src/main.rs b/gen/gkehub1-cli/src/main.rs index 0cee8d2d2f..3fd858f1bc 100644 --- a/gen/gkehub1-cli/src/main.rs +++ b/gen/gkehub1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gkehub1::{api, Error, oauth2}; +use google_gkehub1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -164,7 +163,7 @@ where call = call.request_id(value.unwrap_or("")); }, "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -272,7 +271,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -331,7 +330,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -437,7 +436,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -723,7 +722,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -805,6 +804,7 @@ where "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delete-time" => Some(("deleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "endpoint.appliance-cluster.resource-link" => Some(("endpoint.applianceCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.edge-cluster.resource-link" => Some(("endpoint.edgeCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.gke-cluster.cluster-missing" => Some(("endpoint.gkeCluster.clusterMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "endpoint.gke-cluster.resource-link" => Some(("endpoint.gkeCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -822,6 +822,7 @@ where "endpoint.multi-cloud-cluster.resource-link" => Some(("endpoint.multiCloudCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.on-prem-cluster.admin-cluster" => Some(("endpoint.onPremCluster.adminCluster", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "endpoint.on-prem-cluster.cluster-missing" => Some(("endpoint.onPremCluster.clusterMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "endpoint.on-prem-cluster.cluster-type" => Some(("endpoint.onPremCluster.clusterType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.on-prem-cluster.resource-link" => Some(("endpoint.onPremCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-id" => Some(("externalId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -831,7 +832,7 @@ where "unique-id" => Some(("uniqueId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-cluster", "authority", "cluster-missing", "code", "connect-version", "create-time", "delete-time", "description", "edge-cluster", "endpoint", "external-id", "gke-cluster", "identity-provider", "issuer", "k8s-version", "kubernetes-api-server-version", "kubernetes-metadata", "kubernetes-resource", "labels", "last-connection-time", "membership-cr-manifest", "memory-mb", "multi-cloud-cluster", "name", "node-count", "node-provider-id", "oidc-jwks", "on-prem-cluster", "resource-link", "resource-options", "state", "unique-id", "update-time", "v1beta1-crd", "vcpu-count", "workload-identity-pool"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-cluster", "appliance-cluster", "authority", "cluster-missing", "cluster-type", "code", "connect-version", "create-time", "delete-time", "description", "edge-cluster", "endpoint", "external-id", "gke-cluster", "identity-provider", "issuer", "k8s-version", "kubernetes-api-server-version", "kubernetes-metadata", "kubernetes-resource", "labels", "last-connection-time", "membership-cr-manifest", "memory-mb", "multi-cloud-cluster", "name", "node-count", "node-provider-id", "oidc-jwks", "on-prem-cluster", "resource-link", "resource-options", "state", "unique-id", "update-time", "v1beta1-crd", "vcpu-count", "workload-identity-pool"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -967,16 +968,16 @@ where call = call.registry(value.unwrap_or("")); }, "proxy" => { - call = call.proxy(value.unwrap_or("")); + call = call.proxy( value.map(|v| arg_from_str(v, err, "proxy", "byte")).unwrap_or(b"hello world")); }, "namespace" => { call = call.namespace(value.unwrap_or("")); }, "is-upgrade" => { - call = call.is_upgrade(arg_from_str(value.unwrap_or("false"), err, "is-upgrade", "boolean")); + call = call.is_upgrade( value.map(|v| arg_from_str(v, err, "is-upgrade", "boolean")).unwrap_or(false)); }, "image-pull-secret-content" => { - call = call.image_pull_secret_content(value.unwrap_or("")); + call = call.image_pull_secret_content( value.map(|v| arg_from_str(v, err, "image-pull-secret-content", "byte")).unwrap_or(b"hello world")); }, _ => { let mut found = false; @@ -1084,7 +1085,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1143,7 +1144,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1228,6 +1229,7 @@ where "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "delete-time" => Some(("deleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "endpoint.appliance-cluster.resource-link" => Some(("endpoint.applianceCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.edge-cluster.resource-link" => Some(("endpoint.edgeCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.gke-cluster.cluster-missing" => Some(("endpoint.gkeCluster.clusterMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "endpoint.gke-cluster.resource-link" => Some(("endpoint.gkeCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1245,6 +1247,7 @@ where "endpoint.multi-cloud-cluster.resource-link" => Some(("endpoint.multiCloudCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.on-prem-cluster.admin-cluster" => Some(("endpoint.onPremCluster.adminCluster", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "endpoint.on-prem-cluster.cluster-missing" => Some(("endpoint.onPremCluster.clusterMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "endpoint.on-prem-cluster.cluster-type" => Some(("endpoint.onPremCluster.clusterType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "endpoint.on-prem-cluster.resource-link" => Some(("endpoint.onPremCluster.resourceLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-id" => Some(("externalId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -1254,7 +1257,7 @@ where "unique-id" => Some(("uniqueId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-cluster", "authority", "cluster-missing", "code", "connect-version", "create-time", "delete-time", "description", "edge-cluster", "endpoint", "external-id", "gke-cluster", "identity-provider", "issuer", "k8s-version", "kubernetes-api-server-version", "kubernetes-metadata", "kubernetes-resource", "labels", "last-connection-time", "membership-cr-manifest", "memory-mb", "multi-cloud-cluster", "name", "node-count", "node-provider-id", "oidc-jwks", "on-prem-cluster", "resource-link", "resource-options", "state", "unique-id", "update-time", "v1beta1-crd", "vcpu-count", "workload-identity-pool"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["admin-cluster", "appliance-cluster", "authority", "cluster-missing", "cluster-type", "code", "connect-version", "create-time", "delete-time", "description", "edge-cluster", "endpoint", "external-id", "gke-cluster", "identity-provider", "issuer", "k8s-version", "kubernetes-api-server-version", "kubernetes-metadata", "kubernetes-resource", "labels", "last-connection-time", "membership-cr-manifest", "memory-mb", "multi-cloud-cluster", "name", "node-count", "node-provider-id", "oidc-jwks", "on-prem-cluster", "resource-link", "resource-options", "state", "unique-id", "update-time", "v1beta1-crd", "vcpu-count", "workload-identity-pool"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1269,7 +1272,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -1691,7 +1694,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1977,7 +1980,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2049,7 +2052,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2077,7 +2080,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2243,7 +2246,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2315,7 +2318,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2343,7 +2346,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2465,7 +2468,7 @@ async fn main() { let mut app = App::new("gkehub1") .author("Sebastian Thiel ") - .version("4.0.1+20220211") + .version("5.0.2+20230106") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gkehub1_cli") .arg(Arg::with_name("url") diff --git a/gen/gkehub1/Cargo.toml b/gen/gkehub1/Cargo.toml index ad64228269..0462404738 100644 --- a/gen/gkehub1/Cargo.toml +++ b/gen/gkehub1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gkehub1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with GKE Hub (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gkehub1" homepage = "https://cloud.google.com/anthos/multicluster-management/connect/registering-a-cluster" -documentation = "https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-gkehub1/5.0.2+20230106" license = "MIT" keywords = ["gkehub", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gkehub1/README.md b/gen/gkehub1/README.md index 8afb768bb6..9f0fb42d8f 100644 --- a/gen/gkehub1/README.md +++ b/gen/gkehub1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-gkehub1` library allows access to all features of the *Google GKE Hub* service. -This documentation was generated from *GKE Hub* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *gkehub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *GKE Hub* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *gkehub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *GKE Hub* *v1* API can be found at the [official documentation site](https://cloud.google.com/anthos/multicluster-management/connect/registering-a-cluster). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/GKEHub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/GKEHub) ... * projects - * [*locations features create*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeatureCreateCall), [*locations features delete*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeatureDeleteCall), [*locations features get*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeatureGetCall), [*locations features get iam policy*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeatureGetIamPolicyCall), [*locations features list*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeatureListCall), [*locations features patch*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeaturePatchCall), [*locations features set iam policy*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeatureSetIamPolicyCall), [*locations features test iam permissions*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationFeatureTestIamPermissionCall), [*locations get*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationListCall), [*locations memberships create*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipCreateCall), [*locations memberships delete*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipDeleteCall), [*locations memberships generate connect manifest*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipGenerateConnectManifestCall), [*locations memberships get*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipGetCall), [*locations memberships get iam policy*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipGetIamPolicyCall), [*locations memberships list*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipListCall), [*locations memberships patch*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipPatchCall), [*locations memberships set iam policy*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipSetIamPolicyCall), [*locations memberships test iam permissions*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationMembershipTestIamPermissionCall), [*locations operations cancel*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/api::ProjectLocationOperationListCall) + * [*locations features create*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeatureCreateCall), [*locations features delete*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeatureDeleteCall), [*locations features get*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeatureGetCall), [*locations features get iam policy*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeatureGetIamPolicyCall), [*locations features list*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeatureListCall), [*locations features patch*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeaturePatchCall), [*locations features set iam policy*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeatureSetIamPolicyCall), [*locations features test iam permissions*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationFeatureTestIamPermissionCall), [*locations get*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationListCall), [*locations memberships create*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipCreateCall), [*locations memberships delete*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipDeleteCall), [*locations memberships generate connect manifest*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipGenerateConnectManifestCall), [*locations memberships get*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipGetCall), [*locations memberships get iam policy*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipGetIamPolicyCall), [*locations memberships list*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipListCall), [*locations memberships patch*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipPatchCall), [*locations memberships set iam policy*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipSetIamPolicyCall), [*locations memberships test iam permissions*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationMembershipTestIamPermissionCall), [*locations operations cancel*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/GKEHub)** +* **[Hub](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/GKEHub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::CallBuilder) -* **[Resources](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::CallBuilder) +* **[Resources](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Part)** + * **[Parts](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Delegate) to the -[Method Builder](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Delegate) to the +[Method Builder](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::RequestValue) and -[decodable](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::RequestValue) and +[decodable](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gkehub1/5.0.2-beta-1+20230106/google_gkehub1/client::RequestValue) are moved +* [request values](https://docs.rs/google-gkehub1/5.0.2+20230106/google_gkehub1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gkehub1/src/api.rs b/gen/gkehub1/src/api.rs index a59e48777d..7a5721d71b 100644 --- a/gen/gkehub1/src/api.rs +++ b/gen/gkehub1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> GKEHub { GKEHub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://gkehub.googleapis.com/".to_string(), _root_url: "https://gkehub.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> GKEHub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gkehub1/src/client.rs b/gen/gkehub1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gkehub1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gkehub1/src/lib.rs b/gen/gkehub1/src/lib.rs index b608c15c32..7141e022e4 100644 --- a/gen/gkehub1/src/lib.rs +++ b/gen/gkehub1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *GKE Hub* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *gkehub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *GKE Hub* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *gkehub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *GKE Hub* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/anthos/multicluster-management/connect/registering-a-cluster). diff --git a/gen/gmail1-cli/Cargo.toml b/gen/gmail1-cli/Cargo.toml index 5bf626ce85..c9413dc52f 100644 --- a/gen/gmail1-cli/Cargo.toml +++ b/gen/gmail1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gmail1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230116" authors = ["Sebastian Thiel "] description = "A complete library to interact with Gmail (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gmail1-cli" @@ -20,13 +20,13 @@ name = "gmail1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gmail1] path = "../gmail1" -version = "4.0.1+20220228" +version = "5.0.2+20230116" + diff --git a/gen/gmail1-cli/README.md b/gen/gmail1-cli/README.md index 0518fc4d1d..178af28f92 100644 --- a/gen/gmail1-cli/README.md +++ b/gen/gmail1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Gmail* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Gmail* API at revision *20230116*. The CLI is at version *5.0.2*. ```bash gmail1 [options] @@ -56,6 +56,17 @@ gmail1 [options] messages-send (-r )... (-u simple -f [-m ]) [-p ]... [-o ] messages-trash [-p ]... [-o ] messages-untrash [-p ]... [-o ] + settings-cse-identities-create (-r )... [-p ]... [-o ] + settings-cse-identities-delete [-p ]... + settings-cse-identities-get [-p ]... [-o ] + settings-cse-identities-list [-p ]... [-o ] + settings-cse-identities-patch (-r )... [-p ]... [-o ] + settings-cse-keypairs-create (-r )... [-p ]... [-o ] + settings-cse-keypairs-disable (-r )... [-p ]... [-o ] + settings-cse-keypairs-enable (-r )... [-p ]... [-o ] + settings-cse-keypairs-get [-p ]... [-o ] + settings-cse-keypairs-list [-p ]... [-o ] + settings-cse-keypairs-obliterate (-r )... [-p ]... settings-delegates-create (-r )... [-p ]... [-o ] settings-delegates-delete [-p ]... settings-delegates-get [-p ]... [-o ] diff --git a/gen/gmail1-cli/mkdocs.yml b/gen/gmail1-cli/mkdocs.yml index f28651ede5..505d977d1a 100644 --- a/gen/gmail1-cli/mkdocs.yml +++ b/gen/gmail1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Gmail v4.0.1+20220228 +site_name: Gmail v5.0.2+20230116 site_url: http://byron.github.io/google-apis-rs/google-gmail1-cli site_description: A complete library to interact with Gmail (protocol v1) @@ -7,76 +7,88 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gmail1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['users_drafts-create.md', 'Users', 'Drafts Create'] -- ['users_drafts-delete.md', 'Users', 'Drafts Delete'] -- ['users_drafts-get.md', 'Users', 'Drafts Get'] -- ['users_drafts-list.md', 'Users', 'Drafts List'] -- ['users_drafts-send.md', 'Users', 'Drafts Send'] -- ['users_drafts-update.md', 'Users', 'Drafts Update'] -- ['users_get-profile.md', 'Users', 'Get Profile'] -- ['users_history-list.md', 'Users', 'History List'] -- ['users_labels-create.md', 'Users', 'Labels Create'] -- ['users_labels-delete.md', 'Users', 'Labels Delete'] -- ['users_labels-get.md', 'Users', 'Labels Get'] -- ['users_labels-list.md', 'Users', 'Labels List'] -- ['users_labels-patch.md', 'Users', 'Labels Patch'] -- ['users_labels-update.md', 'Users', 'Labels Update'] -- ['users_messages-attachments-get.md', 'Users', 'Messages Attachments Get'] -- ['users_messages-batch-delete.md', 'Users', 'Messages Batch Delete'] -- ['users_messages-batch-modify.md', 'Users', 'Messages Batch Modify'] -- ['users_messages-delete.md', 'Users', 'Messages Delete'] -- ['users_messages-get.md', 'Users', 'Messages Get'] -- ['users_messages-import.md', 'Users', 'Messages Import'] -- ['users_messages-insert.md', 'Users', 'Messages Insert'] -- ['users_messages-list.md', 'Users', 'Messages List'] -- ['users_messages-modify.md', 'Users', 'Messages Modify'] -- ['users_messages-send.md', 'Users', 'Messages Send'] -- ['users_messages-trash.md', 'Users', 'Messages Trash'] -- ['users_messages-untrash.md', 'Users', 'Messages Untrash'] -- ['users_settings-delegates-create.md', 'Users', 'Settings Delegates Create'] -- ['users_settings-delegates-delete.md', 'Users', 'Settings Delegates Delete'] -- ['users_settings-delegates-get.md', 'Users', 'Settings Delegates Get'] -- ['users_settings-delegates-list.md', 'Users', 'Settings Delegates List'] -- ['users_settings-filters-create.md', 'Users', 'Settings Filters Create'] -- ['users_settings-filters-delete.md', 'Users', 'Settings Filters Delete'] -- ['users_settings-filters-get.md', 'Users', 'Settings Filters Get'] -- ['users_settings-filters-list.md', 'Users', 'Settings Filters List'] -- ['users_settings-forwarding-addresses-create.md', 'Users', 'Settings Forwarding Addresses Create'] -- ['users_settings-forwarding-addresses-delete.md', 'Users', 'Settings Forwarding Addresses Delete'] -- ['users_settings-forwarding-addresses-get.md', 'Users', 'Settings Forwarding Addresses Get'] -- ['users_settings-forwarding-addresses-list.md', 'Users', 'Settings Forwarding Addresses List'] -- ['users_settings-get-auto-forwarding.md', 'Users', 'Settings Get Auto Forwarding'] -- ['users_settings-get-imap.md', 'Users', 'Settings Get Imap'] -- ['users_settings-get-language.md', 'Users', 'Settings Get Language'] -- ['users_settings-get-pop.md', 'Users', 'Settings Get Pop'] -- ['users_settings-get-vacation.md', 'Users', 'Settings Get Vacation'] -- ['users_settings-send-as-create.md', 'Users', 'Settings Send As Create'] -- ['users_settings-send-as-delete.md', 'Users', 'Settings Send As Delete'] -- ['users_settings-send-as-get.md', 'Users', 'Settings Send As Get'] -- ['users_settings-send-as-list.md', 'Users', 'Settings Send As List'] -- ['users_settings-send-as-patch.md', 'Users', 'Settings Send As Patch'] -- ['users_settings-send-as-smime-info-delete.md', 'Users', 'Settings Send As Smime Info Delete'] -- ['users_settings-send-as-smime-info-get.md', 'Users', 'Settings Send As Smime Info Get'] -- ['users_settings-send-as-smime-info-insert.md', 'Users', 'Settings Send As Smime Info Insert'] -- ['users_settings-send-as-smime-info-list.md', 'Users', 'Settings Send As Smime Info List'] -- ['users_settings-send-as-smime-info-set-default.md', 'Users', 'Settings Send As Smime Info Set Default'] -- ['users_settings-send-as-update.md', 'Users', 'Settings Send As Update'] -- ['users_settings-send-as-verify.md', 'Users', 'Settings Send As Verify'] -- ['users_settings-update-auto-forwarding.md', 'Users', 'Settings Update Auto Forwarding'] -- ['users_settings-update-imap.md', 'Users', 'Settings Update Imap'] -- ['users_settings-update-language.md', 'Users', 'Settings Update Language'] -- ['users_settings-update-pop.md', 'Users', 'Settings Update Pop'] -- ['users_settings-update-vacation.md', 'Users', 'Settings Update Vacation'] -- ['users_stop.md', 'Users', 'Stop'] -- ['users_threads-delete.md', 'Users', 'Threads Delete'] -- ['users_threads-get.md', 'Users', 'Threads Get'] -- ['users_threads-list.md', 'Users', 'Threads List'] -- ['users_threads-modify.md', 'Users', 'Threads Modify'] -- ['users_threads-trash.md', 'Users', 'Threads Trash'] -- ['users_threads-untrash.md', 'Users', 'Threads Untrash'] -- ['users_watch.md', 'Users', 'Watch'] +nav: +- Home: 'index.md' +- 'Users': + - 'Drafts Create': 'users_drafts-create.md' + - 'Drafts Delete': 'users_drafts-delete.md' + - 'Drafts Get': 'users_drafts-get.md' + - 'Drafts List': 'users_drafts-list.md' + - 'Drafts Send': 'users_drafts-send.md' + - 'Drafts Update': 'users_drafts-update.md' + - 'Get Profile': 'users_get-profile.md' + - 'History List': 'users_history-list.md' + - 'Labels Create': 'users_labels-create.md' + - 'Labels Delete': 'users_labels-delete.md' + - 'Labels Get': 'users_labels-get.md' + - 'Labels List': 'users_labels-list.md' + - 'Labels Patch': 'users_labels-patch.md' + - 'Labels Update': 'users_labels-update.md' + - 'Messages Attachments Get': 'users_messages-attachments-get.md' + - 'Messages Batch Delete': 'users_messages-batch-delete.md' + - 'Messages Batch Modify': 'users_messages-batch-modify.md' + - 'Messages Delete': 'users_messages-delete.md' + - 'Messages Get': 'users_messages-get.md' + - 'Messages Import': 'users_messages-import.md' + - 'Messages Insert': 'users_messages-insert.md' + - 'Messages List': 'users_messages-list.md' + - 'Messages Modify': 'users_messages-modify.md' + - 'Messages Send': 'users_messages-send.md' + - 'Messages Trash': 'users_messages-trash.md' + - 'Messages Untrash': 'users_messages-untrash.md' + - 'Settings Cse Identities Create': 'users_settings-cse-identities-create.md' + - 'Settings Cse Identities Delete': 'users_settings-cse-identities-delete.md' + - 'Settings Cse Identities Get': 'users_settings-cse-identities-get.md' + - 'Settings Cse Identities List': 'users_settings-cse-identities-list.md' + - 'Settings Cse Identities Patch': 'users_settings-cse-identities-patch.md' + - 'Settings Cse Keypairs Create': 'users_settings-cse-keypairs-create.md' + - 'Settings Cse Keypairs Disable': 'users_settings-cse-keypairs-disable.md' + - 'Settings Cse Keypairs Enable': 'users_settings-cse-keypairs-enable.md' + - 'Settings Cse Keypairs Get': 'users_settings-cse-keypairs-get.md' + - 'Settings Cse Keypairs List': 'users_settings-cse-keypairs-list.md' + - 'Settings Cse Keypairs Obliterate': 'users_settings-cse-keypairs-obliterate.md' + - 'Settings Delegates Create': 'users_settings-delegates-create.md' + - 'Settings Delegates Delete': 'users_settings-delegates-delete.md' + - 'Settings Delegates Get': 'users_settings-delegates-get.md' + - 'Settings Delegates List': 'users_settings-delegates-list.md' + - 'Settings Filters Create': 'users_settings-filters-create.md' + - 'Settings Filters Delete': 'users_settings-filters-delete.md' + - 'Settings Filters Get': 'users_settings-filters-get.md' + - 'Settings Filters List': 'users_settings-filters-list.md' + - 'Settings Forwarding Addresses Create': 'users_settings-forwarding-addresses-create.md' + - 'Settings Forwarding Addresses Delete': 'users_settings-forwarding-addresses-delete.md' + - 'Settings Forwarding Addresses Get': 'users_settings-forwarding-addresses-get.md' + - 'Settings Forwarding Addresses List': 'users_settings-forwarding-addresses-list.md' + - 'Settings Get Auto Forwarding': 'users_settings-get-auto-forwarding.md' + - 'Settings Get Imap': 'users_settings-get-imap.md' + - 'Settings Get Language': 'users_settings-get-language.md' + - 'Settings Get Pop': 'users_settings-get-pop.md' + - 'Settings Get Vacation': 'users_settings-get-vacation.md' + - 'Settings Send As Create': 'users_settings-send-as-create.md' + - 'Settings Send As Delete': 'users_settings-send-as-delete.md' + - 'Settings Send As Get': 'users_settings-send-as-get.md' + - 'Settings Send As List': 'users_settings-send-as-list.md' + - 'Settings Send As Patch': 'users_settings-send-as-patch.md' + - 'Settings Send As Smime Info Delete': 'users_settings-send-as-smime-info-delete.md' + - 'Settings Send As Smime Info Get': 'users_settings-send-as-smime-info-get.md' + - 'Settings Send As Smime Info Insert': 'users_settings-send-as-smime-info-insert.md' + - 'Settings Send As Smime Info List': 'users_settings-send-as-smime-info-list.md' + - 'Settings Send As Smime Info Set Default': 'users_settings-send-as-smime-info-set-default.md' + - 'Settings Send As Update': 'users_settings-send-as-update.md' + - 'Settings Send As Verify': 'users_settings-send-as-verify.md' + - 'Settings Update Auto Forwarding': 'users_settings-update-auto-forwarding.md' + - 'Settings Update Imap': 'users_settings-update-imap.md' + - 'Settings Update Language': 'users_settings-update-language.md' + - 'Settings Update Pop': 'users_settings-update-pop.md' + - 'Settings Update Vacation': 'users_settings-update-vacation.md' + - 'Stop': 'users_stop.md' + - 'Threads Delete': 'users_threads-delete.md' + - 'Threads Get': 'users_threads-get.md' + - 'Threads List': 'users_threads-list.md' + - 'Threads Modify': 'users_threads-modify.md' + - 'Threads Trash': 'users_threads-trash.md' + - 'Threads Untrash': 'users_threads-untrash.md' + - 'Watch': 'users_watch.md' theme: readthedocs diff --git a/gen/gmail1-cli/src/client.rs b/gen/gmail1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gmail1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gmail1-cli/src/main.rs b/gen/gmail1-cli/src/main.rs index 85a7112afd..54795abfc7 100644 --- a/gen/gmail1-cli/src/main.rs +++ b/gen/gmail1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gmail1::{api, Error, oauth2}; +use google_gmail1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -266,10 +265,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-spam-trash" => { - call = call.include_spam_trash(arg_from_str(value.unwrap_or("false"), err, "include-spam-trash", "boolean")); + call = call.include_spam_trash( value.map(|v| arg_from_str(v, err, "include-spam-trash", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -581,13 +580,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-history-id" => { - call = call.start_history_id(value.unwrap_or("")); + call = call.start_history_id( value.map(|v| arg_from_str(v, err, "start-history-id", "uint64")).unwrap_or(0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "label-id" => { call = call.label_id(value.unwrap_or("")); @@ -1439,16 +1438,16 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "process-for-calendar" => { - call = call.process_for_calendar(arg_from_str(value.unwrap_or("false"), err, "process-for-calendar", "boolean")); + call = call.process_for_calendar( value.map(|v| arg_from_str(v, err, "process-for-calendar", "boolean")).unwrap_or(false)); }, "never-mark-spam" => { - call = call.never_mark_spam(arg_from_str(value.unwrap_or("false"), err, "never-mark-spam", "boolean")); + call = call.never_mark_spam( value.map(|v| arg_from_str(v, err, "never-mark-spam", "boolean")).unwrap_or(false)); }, "internal-date-source" => { call = call.internal_date_source(value.unwrap_or("")); }, "deleted" => { - call = call.deleted(arg_from_str(value.unwrap_or("false"), err, "deleted", "boolean")); + call = call.deleted( value.map(|v| arg_from_str(v, err, "deleted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1556,7 +1555,7 @@ where call = call.internal_date_source(value.unwrap_or("")); }, "deleted" => { - call = call.deleted(arg_from_str(value.unwrap_or("false"), err, "deleted", "boolean")); + call = call.deleted( value.map(|v| arg_from_str(v, err, "deleted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1621,13 +1620,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "label-ids" => { call = call.add_label_ids(value.unwrap_or("")); }, "include-spam-trash" => { - call = call.include_spam_trash(arg_from_str(value.unwrap_or("false"), err, "include-spam-trash", "boolean")); + call = call.include_spam_trash( value.map(|v| arg_from_str(v, err, "include-spam-trash", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1967,6 +1966,778 @@ where } } + async fn _users_settings_cse_identities_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "email-address" => Some(("emailAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "primary-key-pair-id" => Some(("primaryKeyPairId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["email-address", "primary-key-pair-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CseIdentity = json::value::from_value(object).unwrap(); + let mut call = self.hub.users().settings_cse_identities_create(request, opt.value_of("user-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_identities_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.users().settings_cse_identities_delete(opt.value_of("user-id").unwrap_or(""), opt.value_of("cse-email-address").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok(mut response) => { + Ok(()) + } + } + } + } + + async fn _users_settings_cse_identities_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.users().settings_cse_identities_get(opt.value_of("user-id").unwrap_or(""), opt.value_of("cse-email-address").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_identities_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.users().settings_cse_identities_list(opt.value_of("user-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_identities_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "email-address" => Some(("emailAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "primary-key-pair-id" => Some(("primaryKeyPairId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["email-address", "primary-key-pair-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CseIdentity = json::value::from_value(object).unwrap(); + let mut call = self.hub.users().settings_cse_identities_patch(request, opt.value_of("user-id").unwrap_or(""), opt.value_of("email-address").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_keypairs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "disable-time" => Some(("disableTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enablement-state" => Some(("enablementState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "key-pair-id" => Some(("keyPairId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pem" => Some(("pem", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pkcs7" => Some(("pkcs7", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "subject-email-addresses" => Some(("subjectEmailAddresses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["disable-time", "enablement-state", "key-pair-id", "pem", "pkcs7", "subject-email-addresses"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CseKeyPair = json::value::from_value(object).unwrap(); + let mut call = self.hub.users().settings_cse_keypairs_create(request, opt.value_of("user-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_keypairs_disable(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DisableCseKeyPairRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.users().settings_cse_keypairs_disable(request, opt.value_of("user-id").unwrap_or(""), opt.value_of("key-pair-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_keypairs_enable(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::EnableCseKeyPairRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.users().settings_cse_keypairs_enable(request, opt.value_of("user-id").unwrap_or(""), opt.value_of("key-pair-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_keypairs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.users().settings_cse_keypairs_get(opt.value_of("user-id").unwrap_or(""), opt.value_of("key-pair-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_keypairs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.users().settings_cse_keypairs_list(opt.value_of("user-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _users_settings_cse_keypairs_obliterate(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ObliterateCseKeyPairRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.users().settings_cse_keypairs_obliterate(request, opt.value_of("user-id").unwrap_or(""), opt.value_of("key-pair-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok(mut response) => { + Ok(()) + } + } + } + } + async fn _users_settings_delegates_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -4304,13 +5075,13 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "label-ids" => { call = call.add_label_ids(value.unwrap_or("")); }, "include-spam-trash" => { - call = call.include_spam_trash(arg_from_str(value.unwrap_or("false"), err, "include-spam-trash", "boolean")); + call = call.include_spam_trash( value.map(|v| arg_from_str(v, err, "include-spam-trash", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4721,6 +5492,39 @@ where ("messages-untrash", Some(opt)) => { call_result = self._users_messages_untrash(opt, dry_run, &mut err).await; }, + ("settings-cse-identities-create", Some(opt)) => { + call_result = self._users_settings_cse_identities_create(opt, dry_run, &mut err).await; + }, + ("settings-cse-identities-delete", Some(opt)) => { + call_result = self._users_settings_cse_identities_delete(opt, dry_run, &mut err).await; + }, + ("settings-cse-identities-get", Some(opt)) => { + call_result = self._users_settings_cse_identities_get(opt, dry_run, &mut err).await; + }, + ("settings-cse-identities-list", Some(opt)) => { + call_result = self._users_settings_cse_identities_list(opt, dry_run, &mut err).await; + }, + ("settings-cse-identities-patch", Some(opt)) => { + call_result = self._users_settings_cse_identities_patch(opt, dry_run, &mut err).await; + }, + ("settings-cse-keypairs-create", Some(opt)) => { + call_result = self._users_settings_cse_keypairs_create(opt, dry_run, &mut err).await; + }, + ("settings-cse-keypairs-disable", Some(opt)) => { + call_result = self._users_settings_cse_keypairs_disable(opt, dry_run, &mut err).await; + }, + ("settings-cse-keypairs-enable", Some(opt)) => { + call_result = self._users_settings_cse_keypairs_enable(opt, dry_run, &mut err).await; + }, + ("settings-cse-keypairs-get", Some(opt)) => { + call_result = self._users_settings_cse_keypairs_get(opt, dry_run, &mut err).await; + }, + ("settings-cse-keypairs-list", Some(opt)) => { + call_result = self._users_settings_cse_keypairs_list(opt, dry_run, &mut err).await; + }, + ("settings-cse-keypairs-obliterate", Some(opt)) => { + call_result = self._users_settings_cse_keypairs_obliterate(opt, dry_run, &mut err).await; + }, ("settings-delegates-create", Some(opt)) => { call_result = self._users_settings_delegates_create(opt, dry_run, &mut err).await; }, @@ -4927,7 +5731,7 @@ async fn main() { let mut exit_status = 0i32; let upload_value_names = ["mode", "file"]; let arg_data = [ - ("users", "methods: 'drafts-create', 'drafts-delete', 'drafts-get', 'drafts-list', 'drafts-send', 'drafts-update', 'get-profile', 'history-list', 'labels-create', 'labels-delete', 'labels-get', 'labels-list', 'labels-patch', 'labels-update', 'messages-attachments-get', 'messages-batch-delete', 'messages-batch-modify', 'messages-delete', 'messages-get', 'messages-import', 'messages-insert', 'messages-list', 'messages-modify', 'messages-send', 'messages-trash', 'messages-untrash', 'settings-delegates-create', 'settings-delegates-delete', 'settings-delegates-get', 'settings-delegates-list', 'settings-filters-create', 'settings-filters-delete', 'settings-filters-get', 'settings-filters-list', 'settings-forwarding-addresses-create', 'settings-forwarding-addresses-delete', 'settings-forwarding-addresses-get', 'settings-forwarding-addresses-list', 'settings-get-auto-forwarding', 'settings-get-imap', 'settings-get-language', 'settings-get-pop', 'settings-get-vacation', 'settings-send-as-create', 'settings-send-as-delete', 'settings-send-as-get', 'settings-send-as-list', 'settings-send-as-patch', 'settings-send-as-smime-info-delete', 'settings-send-as-smime-info-get', 'settings-send-as-smime-info-insert', 'settings-send-as-smime-info-list', 'settings-send-as-smime-info-set-default', 'settings-send-as-update', 'settings-send-as-verify', 'settings-update-auto-forwarding', 'settings-update-imap', 'settings-update-language', 'settings-update-pop', 'settings-update-vacation', 'stop', 'threads-delete', 'threads-get', 'threads-list', 'threads-modify', 'threads-trash', 'threads-untrash' and 'watch'", vec![ + ("users", "methods: 'drafts-create', 'drafts-delete', 'drafts-get', 'drafts-list', 'drafts-send', 'drafts-update', 'get-profile', 'history-list', 'labels-create', 'labels-delete', 'labels-get', 'labels-list', 'labels-patch', 'labels-update', 'messages-attachments-get', 'messages-batch-delete', 'messages-batch-modify', 'messages-delete', 'messages-get', 'messages-import', 'messages-insert', 'messages-list', 'messages-modify', 'messages-send', 'messages-trash', 'messages-untrash', 'settings-cse-identities-create', 'settings-cse-identities-delete', 'settings-cse-identities-get', 'settings-cse-identities-list', 'settings-cse-identities-patch', 'settings-cse-keypairs-create', 'settings-cse-keypairs-disable', 'settings-cse-keypairs-enable', 'settings-cse-keypairs-get', 'settings-cse-keypairs-list', 'settings-cse-keypairs-obliterate', 'settings-delegates-create', 'settings-delegates-delete', 'settings-delegates-get', 'settings-delegates-list', 'settings-filters-create', 'settings-filters-delete', 'settings-filters-get', 'settings-filters-list', 'settings-forwarding-addresses-create', 'settings-forwarding-addresses-delete', 'settings-forwarding-addresses-get', 'settings-forwarding-addresses-list', 'settings-get-auto-forwarding', 'settings-get-imap', 'settings-get-language', 'settings-get-pop', 'settings-get-vacation', 'settings-send-as-create', 'settings-send-as-delete', 'settings-send-as-get', 'settings-send-as-list', 'settings-send-as-patch', 'settings-send-as-smime-info-delete', 'settings-send-as-smime-info-get', 'settings-send-as-smime-info-insert', 'settings-send-as-smime-info-list', 'settings-send-as-smime-info-set-default', 'settings-send-as-update', 'settings-send-as-verify', 'settings-update-auto-forwarding', 'settings-update-imap', 'settings-update-language', 'settings-update-pop', 'settings-update-vacation', 'stop', 'threads-delete', 'threads-get', 'threads-list', 'threads-modify', 'threads-trash', 'threads-untrash' and 'watch'", vec![ ("drafts-create", Some(r##"Creates a new draft with the `DRAFT` label."##), "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_drafts-create", @@ -5662,6 +6466,314 @@ async fn main() { Some(false), Some(false)), ]), + ("settings-cse-identities-create", + Some(r##"Creates and configures a client-side encryption identity that's authorized to send mail from the user account. Google publishes the S/MIME certificate to a shared domain-wide directory so that people within a Google Workspace organization can encrypt and send mail to the identity."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-identities-create", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-identities-delete", + Some(r##"Deletes a client-side encryption identity. The authenticated user can no longer use the identity to send encrypted messages. You cannot restore the identity after you delete it. Instead, use the CreateCseIdentity method to create another identity with the same configuration."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-identities-delete", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"cse-email-address"##), + None, + Some(r##"The primary email address associated with the client-side encryption identity configuration that's removed."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + ]), + ("settings-cse-identities-get", + Some(r##"Retrieves a client-side encryption identity configuration."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-identities-get", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"cse-email-address"##), + None, + Some(r##"The primary email address associated with the client-side encryption identity configuration that's retrieved."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-identities-list", + Some(r##"Lists the client-side encrypted identities for an authenticated user."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-identities-list", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-identities-patch", + Some(r##"Associates a different key pair with an existing client-side encryption identity. The updated key pair must validate against Google's [S/MIME certificate profiles](https://support.google.com/a/answer/7300887)."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-identities-patch", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"email-address"##), + None, + Some(r##"The email address of the client-side encryption identity to update."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-keypairs-create", + Some(r##"Creates and uploads a client-side encryption S/MIME public key certificate chain and private key metadata for the authenticated user."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-keypairs-create", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-keypairs-disable", + Some(r##"Turns off a client-side encryption key pair. The authenticated user can no longer use the key pair to decrypt incoming CSE message texts or sign outgoing CSE mail. To regain access, use the EnableCseKeyPair to turn on the key pair. After 30 days, you can permanently delete the key pair by using the ObliterateCseKeyPair method."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-keypairs-disable", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"key-pair-id"##), + None, + Some(r##"The identifier of the key pair to turn off."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-keypairs-enable", + Some(r##"Turns on a client-side encryption key pair that was turned off. The key pair becomes active again for any associated client-side encryption identities."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-keypairs-enable", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"key-pair-id"##), + None, + Some(r##"The identifier of the key pair to turn on."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-keypairs-get", + Some(r##"Retrieves an existing client-side encryption key pair."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-keypairs-get", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"key-pair-id"##), + None, + Some(r##"The identifier of the key pair to retrieve."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-keypairs-list", + Some(r##"Lists client-side encryption key pairs for an authenticated user."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-keypairs-list", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("settings-cse-keypairs-obliterate", + Some(r##"Deletes a client-side encryption key pair permanently and immediately. You can only permanently delete key pairs that have been turned off for more than 30 days. To turn off a key pair, use the DisableCseKeyPair method. Gmail can't restore or decrypt any messages that were encrypted by an obliterated key. Authenticated users and Google Workspace administrators lose access to reading the encrypted messages."##), + "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-cse-keypairs-obliterate", + vec![ + (Some(r##"user-id"##), + None, + Some(r##"The requester's primary email address. To indicate the authenticated user, you can use the special value `me`."##), + Some(true), + Some(false)), + + (Some(r##"key-pair-id"##), + None, + Some(r##"The identifier of the key pair to obliterate."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + ]), ("settings-delegates-create", Some(r##"Adds a delegate with its verification status set directly to `accepted`, without sending any verification email. The delegate user must be a member of the same G Suite organization as the delegator user. Gmail imposes limitations on the number of delegates and delegators each user in a G Suite organization can have. These limits depend on your organization, but in general each user can have up to 25 delegates and up to 10 delegators. Note that a delegate user must be referred to by their primary email address, and not an email alias. Also note that when a new delegate is created, there may be up to a one minute delay before the new delegate is available for use. This method is only available to service account clients that have been delegated domain-wide authority."##), "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-delegates-create", @@ -5791,7 +6903,7 @@ async fn main() { Some(false)), ]), ("settings-filters-delete", - Some(r##"Deletes a filter."##), + Some(r##"Immediately and permanently deletes the specified filter."##), "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_settings-filters-delete", vec![ (Some(r##"user-id"##), @@ -6571,7 +7683,7 @@ async fn main() { Some(true)), ]), ("threads-delete", - Some(r##"Immediately and permanently deletes the specified thread. This operation cannot be undone. Prefer `threads.trash` instead."##), + Some(r##"Immediately and permanently deletes the specified thread. Any messages that belong to the thread are also deleted. This operation cannot be undone. Prefer `threads.trash` instead."##), "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_threads-delete", vec![ (Some(r##"user-id"##), @@ -6677,7 +7789,7 @@ async fn main() { Some(false)), ]), ("threads-trash", - Some(r##"Moves the specified thread to the trash."##), + Some(r##"Moves the specified thread to the trash. Any messages that belong to the thread are also moved to the trash."##), "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_threads-trash", vec![ (Some(r##"user-id"##), @@ -6705,7 +7817,7 @@ async fn main() { Some(false)), ]), ("threads-untrash", - Some(r##"Removes the specified thread from the trash."##), + Some(r##"Removes the specified thread from the trash. Any messages that belong to the thread are also removed from the trash."##), "Details at http://byron.github.io/google-apis-rs/google_gmail1_cli/users_threads-untrash", vec![ (Some(r##"user-id"##), @@ -6766,7 +7878,7 @@ async fn main() { let mut app = App::new("gmail1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230116") .about("The Gmail API lets you view and manage Gmail mailbox data like threads, messages, and labels.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gmail1_cli") .arg(Arg::with_name("url") diff --git a/gen/gmail1/Cargo.toml b/gen/gmail1/Cargo.toml index bc1af308f0..98b2a70b8e 100644 --- a/gen/gmail1/Cargo.toml +++ b/gen/gmail1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gmail1" -version = "5.0.2-beta-1+20230116" +version = "5.0.2+20230116" authors = ["Sebastian Thiel "] description = "A complete library to interact with Gmail (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gmail1" homepage = "https://developers.google.com/gmail/api/" -documentation = "https://docs.rs/google-gmail1/5.0.2-beta-1+20230116" +documentation = "https://docs.rs/google-gmail1/5.0.2+20230116" license = "MIT" keywords = ["gmail", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gmail1/README.md b/gen/gmail1/README.md index f3f0f693af..9a272ece25 100644 --- a/gen/gmail1/README.md +++ b/gen/gmail1/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-gmail1` library allows access to all features of the *Google Gmail* service. -This documentation was generated from *Gmail* crate version *5.0.2-beta-1+20230116*, where *20230116* is the exact revision of the *gmail:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Gmail* crate version *5.0.2+20230116*, where *20230116* is the exact revision of the *gmail:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Gmail* *v1* API can be found at the [official documentation site](https://developers.google.com/gmail/api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/Gmail) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/Gmail) ... * users - * [*drafts create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftCreateCall), [*drafts delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftDeleteCall), [*drafts get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftGetCall), [*drafts list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftListCall), [*drafts send*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftSendCall), [*drafts update*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftUpdateCall), [*get profile*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserGetProfileCall), [*history list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserHistoryListCall), [*labels create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserLabelCreateCall), [*labels delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserLabelDeleteCall), [*labels get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserLabelGetCall), [*labels list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserLabelListCall), [*labels patch*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserLabelPatchCall), [*labels update*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserLabelUpdateCall), [*messages attachments get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageAttachmentGetCall), [*messages batch delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageBatchDeleteCall), [*messages batch modify*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageBatchModifyCall), [*messages delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageDeleteCall), [*messages get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageGetCall), [*messages import*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageImportCall), [*messages insert*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageInsertCall), [*messages list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageListCall), [*messages modify*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageModifyCall), [*messages send*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageSendCall), [*messages trash*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageTrashCall), [*messages untrash*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageUntrashCall), [*settings cse identities create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseIdentityCreateCall), [*settings cse identities delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseIdentityDeleteCall), [*settings cse identities get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseIdentityGetCall), [*settings cse identities list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseIdentityListCall), [*settings cse identities patch*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseIdentityPatchCall), [*settings cse keypairs create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseKeypairCreateCall), [*settings cse keypairs disable*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseKeypairDisableCall), [*settings cse keypairs enable*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseKeypairEnableCall), [*settings cse keypairs get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseKeypairGetCall), [*settings cse keypairs list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseKeypairListCall), [*settings cse keypairs obliterate*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingCseKeypairObliterateCall), [*settings delegates create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingDelegateCreateCall), [*settings delegates delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingDelegateDeleteCall), [*settings delegates get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingDelegateGetCall), [*settings delegates list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingDelegateListCall), [*settings filters create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingFilterCreateCall), [*settings filters delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingFilterDeleteCall), [*settings filters get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingFilterGetCall), [*settings filters list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingFilterListCall), [*settings forwarding addresses create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingForwardingAddressCreateCall), [*settings forwarding addresses delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingForwardingAddressDeleteCall), [*settings forwarding addresses get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingForwardingAddressGetCall), [*settings forwarding addresses list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingForwardingAddressListCall), [*settings get auto forwarding*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingGetAutoForwardingCall), [*settings get imap*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingGetImapCall), [*settings get language*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingGetLanguageCall), [*settings get pop*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingGetPopCall), [*settings get vacation*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingGetVacationCall), [*settings send as create*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendACreateCall), [*settings send as delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendADeleteCall), [*settings send as get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendAGetCall), [*settings send as list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendAListCall), [*settings send as patch*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendAPatchCall), [*settings send as smime info delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendASmimeInfoDeleteCall), [*settings send as smime info get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendASmimeInfoGetCall), [*settings send as smime info insert*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendASmimeInfoInsertCall), [*settings send as smime info list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendASmimeInfoListCall), [*settings send as smime info set default*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendASmimeInfoSetDefaultCall), [*settings send as update*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendAUpdateCall), [*settings send as verify*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingSendAVerifyCall), [*settings update auto forwarding*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingUpdateAutoForwardingCall), [*settings update imap*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingUpdateImapCall), [*settings update language*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingUpdateLanguageCall), [*settings update pop*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingUpdatePopCall), [*settings update vacation*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserSettingUpdateVacationCall), [*stop*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserStopCall), [*threads delete*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserThreadDeleteCall), [*threads get*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserThreadGetCall), [*threads list*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserThreadListCall), [*threads modify*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserThreadModifyCall), [*threads trash*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserThreadTrashCall), [*threads untrash*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserThreadUntrashCall) and [*watch*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserWatchCall) + * [*drafts create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftCreateCall), [*drafts delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftDeleteCall), [*drafts get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftGetCall), [*drafts list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftListCall), [*drafts send*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftSendCall), [*drafts update*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftUpdateCall), [*get profile*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserGetProfileCall), [*history list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserHistoryListCall), [*labels create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserLabelCreateCall), [*labels delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserLabelDeleteCall), [*labels get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserLabelGetCall), [*labels list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserLabelListCall), [*labels patch*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserLabelPatchCall), [*labels update*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserLabelUpdateCall), [*messages attachments get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageAttachmentGetCall), [*messages batch delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageBatchDeleteCall), [*messages batch modify*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageBatchModifyCall), [*messages delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageDeleteCall), [*messages get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageGetCall), [*messages import*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageImportCall), [*messages insert*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageInsertCall), [*messages list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageListCall), [*messages modify*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageModifyCall), [*messages send*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageSendCall), [*messages trash*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageTrashCall), [*messages untrash*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageUntrashCall), [*settings cse identities create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseIdentityCreateCall), [*settings cse identities delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseIdentityDeleteCall), [*settings cse identities get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseIdentityGetCall), [*settings cse identities list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseIdentityListCall), [*settings cse identities patch*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseIdentityPatchCall), [*settings cse keypairs create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseKeypairCreateCall), [*settings cse keypairs disable*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseKeypairDisableCall), [*settings cse keypairs enable*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseKeypairEnableCall), [*settings cse keypairs get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseKeypairGetCall), [*settings cse keypairs list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseKeypairListCall), [*settings cse keypairs obliterate*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingCseKeypairObliterateCall), [*settings delegates create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingDelegateCreateCall), [*settings delegates delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingDelegateDeleteCall), [*settings delegates get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingDelegateGetCall), [*settings delegates list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingDelegateListCall), [*settings filters create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingFilterCreateCall), [*settings filters delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingFilterDeleteCall), [*settings filters get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingFilterGetCall), [*settings filters list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingFilterListCall), [*settings forwarding addresses create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingForwardingAddressCreateCall), [*settings forwarding addresses delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingForwardingAddressDeleteCall), [*settings forwarding addresses get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingForwardingAddressGetCall), [*settings forwarding addresses list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingForwardingAddressListCall), [*settings get auto forwarding*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingGetAutoForwardingCall), [*settings get imap*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingGetImapCall), [*settings get language*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingGetLanguageCall), [*settings get pop*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingGetPopCall), [*settings get vacation*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingGetVacationCall), [*settings send as create*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendACreateCall), [*settings send as delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendADeleteCall), [*settings send as get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendAGetCall), [*settings send as list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendAListCall), [*settings send as patch*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendAPatchCall), [*settings send as smime info delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendASmimeInfoDeleteCall), [*settings send as smime info get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendASmimeInfoGetCall), [*settings send as smime info insert*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendASmimeInfoInsertCall), [*settings send as smime info list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendASmimeInfoListCall), [*settings send as smime info set default*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendASmimeInfoSetDefaultCall), [*settings send as update*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendAUpdateCall), [*settings send as verify*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingSendAVerifyCall), [*settings update auto forwarding*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingUpdateAutoForwardingCall), [*settings update imap*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingUpdateImapCall), [*settings update language*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingUpdateLanguageCall), [*settings update pop*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingUpdatePopCall), [*settings update vacation*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserSettingUpdateVacationCall), [*stop*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserStopCall), [*threads delete*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserThreadDeleteCall), [*threads get*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserThreadGetCall), [*threads list*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserThreadListCall), [*threads modify*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserThreadModifyCall), [*threads trash*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserThreadTrashCall), [*threads untrash*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserThreadUntrashCall) and [*watch*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserWatchCall) Upload supported by ... -* [*drafts create users*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftCreateCall) -* [*drafts send users*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftSendCall) -* [*drafts update users*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserDraftUpdateCall) -* [*messages import users*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageImportCall) -* [*messages insert users*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageInsertCall) -* [*messages send users*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/api::UserMessageSendCall) +* [*drafts create users*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftCreateCall) +* [*drafts send users*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftSendCall) +* [*drafts update users*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserDraftUpdateCall) +* [*messages import users*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageImportCall) +* [*messages insert users*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageInsertCall) +* [*messages send users*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/api::UserMessageSendCall) @@ -32,17 +32,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/Gmail)** +* **[Hub](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/Gmail)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::CallBuilder) -* **[Resources](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::CallBuilder) +* **[Resources](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Part)** + * **[Parts](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -145,17 +145,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -165,29 +165,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Delegate) to the -[Method Builder](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Delegate) to the +[Method Builder](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::RequestValue) and -[decodable](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::RequestValue) and +[decodable](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gmail1/5.0.2-beta-1+20230116/google_gmail1/client::RequestValue) are moved +* [request values](https://docs.rs/google-gmail1/5.0.2+20230116/google_gmail1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gmail1/src/api.rs b/gen/gmail1/src/api.rs index 557ca7d111..48256442ec 100644 --- a/gen/gmail1/src/api.rs +++ b/gen/gmail1/src/api.rs @@ -182,7 +182,7 @@ impl<'a, S> Gmail { Gmail { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://gmail.googleapis.com/".to_string(), _root_url: "https://gmail.googleapis.com/".to_string(), } @@ -193,7 +193,7 @@ impl<'a, S> Gmail { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gmail1/src/client.rs b/gen/gmail1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gmail1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gmail1/src/lib.rs b/gen/gmail1/src/lib.rs index 60d92c6385..ddd8b97715 100644 --- a/gen/gmail1/src/lib.rs +++ b/gen/gmail1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Gmail* crate version *5.0.2-beta-1+20230116*, where *20230116* is the exact revision of the *gmail:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Gmail* crate version *5.0.2+20230116*, where *20230116* is the exact revision of the *gmail:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Gmail* *v1* API can be found at the //! [official documentation site](https://developers.google.com/gmail/api/). diff --git a/gen/gmailpostmastertools1-cli/Cargo.toml b/gen/gmailpostmastertools1-cli/Cargo.toml index c46638a18e..f42d748543 100644 --- a/gen/gmailpostmastertools1-cli/Cargo.toml +++ b/gen/gmailpostmastertools1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gmailpostmastertools1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Postmaster Tools (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gmailpostmastertools1-cli" @@ -20,13 +20,13 @@ name = "gmailpostmastertools1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gmailpostmastertools1] path = "../gmailpostmastertools1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/gmailpostmastertools1-cli/README.md b/gen/gmailpostmastertools1-cli/README.md index 006db075ee..3178135dc4 100644 --- a/gen/gmailpostmastertools1-cli/README.md +++ b/gen/gmailpostmastertools1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Postmaster Tools* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Postmaster Tools* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash gmailpostmastertools1 [options] diff --git a/gen/gmailpostmastertools1-cli/mkdocs.yml b/gen/gmailpostmastertools1-cli/mkdocs.yml index 8b52f88348..26fe0eb123 100644 --- a/gen/gmailpostmastertools1-cli/mkdocs.yml +++ b/gen/gmailpostmastertools1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Postmaster Tools v4.0.1+20220305 +site_name: Postmaster Tools v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-gmailpostmastertools1-cli site_description: A complete library to interact with Postmaster Tools (protocol v1) @@ -7,12 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gmailpostmastert docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['domains_get.md', 'Domains', 'Get'] -- ['domains_list.md', 'Domains', 'List'] -- ['domains_traffic-stats-get.md', 'Domains', 'Traffic Stats Get'] -- ['domains_traffic-stats-list.md', 'Domains', 'Traffic Stats List'] +nav: +- Home: 'index.md' +- 'Domains': + - 'Get': 'domains_get.md' + - 'List': 'domains_list.md' + - 'Traffic Stats Get': 'domains_traffic-stats-get.md' + - 'Traffic Stats List': 'domains_traffic-stats-list.md' theme: readthedocs diff --git a/gen/gmailpostmastertools1-cli/src/client.rs b/gen/gmailpostmastertools1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gmailpostmastertools1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gmailpostmastertools1-cli/src/main.rs b/gen/gmailpostmastertools1-cli/src/main.rs index a9b75a6908..fc5cf49fe2 100644 --- a/gen/gmailpostmastertools1-cli/src/main.rs +++ b/gen/gmailpostmastertools1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gmailpostmastertools1::{api, Error, oauth2}; +use google_gmailpostmastertools1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -221,28 +220,28 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -478,7 +477,7 @@ async fn main() { let mut app = App::new("gmailpostmastertools1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("The Postmaster Tools API is a RESTful API that provides programmatic access to email traffic metrics (like spam reports, delivery errors etc) otherwise available through the Gmail Postmaster Tools UI currently.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gmailpostmastertools1_cli") .arg(Arg::with_name("url") diff --git a/gen/gmailpostmastertools1/Cargo.toml b/gen/gmailpostmastertools1/Cargo.toml index 0dc64e76b3..43e1b0941a 100644 --- a/gen/gmailpostmastertools1/Cargo.toml +++ b/gen/gmailpostmastertools1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gmailpostmastertools1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Postmaster Tools (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gmailpostmastertools1" homepage = "https://developers.google.com/gmail/postmaster" -documentation = "https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123" license = "MIT" keywords = ["gmailpostmastertools", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gmailpostmastertools1/README.md b/gen/gmailpostmastertools1/README.md index 47786ccb22..370fd3e94b 100644 --- a/gen/gmailpostmastertools1/README.md +++ b/gen/gmailpostmastertools1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-gmailpostmastertools1` library allows access to all features of the *Google Postmaster Tools* service. -This documentation was generated from *Postmaster Tools* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Postmaster Tools* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Postmaster Tools* *v1* API can be found at the [official documentation site](https://developers.google.com/gmail/postmaster). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/PostmasterTools) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/PostmasterTools) ... -* [domains](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/api::Domain) - * [*get*](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/api::DomainGetCall), [*list*](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/api::DomainListCall), [*traffic stats get*](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/api::DomainTrafficStatGetCall) and [*traffic stats list*](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/api::DomainTrafficStatListCall) +* [domains](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/api::Domain) + * [*get*](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/api::DomainGetCall), [*list*](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/api::DomainListCall), [*traffic stats get*](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/api::DomainTrafficStatGetCall) and [*traffic stats list*](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/api::DomainTrafficStatListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/PostmasterTools)** +* **[Hub](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/PostmasterTools)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::CallBuilder) -* **[Resources](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::CallBuilder) +* **[Resources](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Part)** + * **[Parts](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Delegate) to the -[Method Builder](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Delegate) to the +[Method Builder](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::RequestValue) and -[decodable](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::RequestValue) and +[decodable](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gmailpostmastertools1/5.0.2-beta-1+20230123/google_gmailpostmastertools1/client::RequestValue) are moved +* [request values](https://docs.rs/google-gmailpostmastertools1/5.0.2+20230123/google_gmailpostmastertools1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gmailpostmastertools1/src/api.rs b/gen/gmailpostmastertools1/src/api.rs index 4530984da2..ca668df4da 100644 --- a/gen/gmailpostmastertools1/src/api.rs +++ b/gen/gmailpostmastertools1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> PostmasterTools { PostmasterTools { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://gmailpostmastertools.googleapis.com/".to_string(), _root_url: "https://gmailpostmastertools.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> PostmasterTools { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gmailpostmastertools1/src/client.rs b/gen/gmailpostmastertools1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gmailpostmastertools1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gmailpostmastertools1/src/lib.rs b/gen/gmailpostmastertools1/src/lib.rs index 1ab0bcfb45..d66067412a 100644 --- a/gen/gmailpostmastertools1/src/lib.rs +++ b/gen/gmailpostmastertools1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Postmaster Tools* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Postmaster Tools* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Postmaster Tools* *v1* API can be found at the //! [official documentation site](https://developers.google.com/gmail/postmaster). diff --git a/gen/gmailpostmastertools1_beta1-cli/Cargo.toml b/gen/gmailpostmastertools1_beta1-cli/Cargo.toml index 95f548578b..04059331b5 100644 --- a/gen/gmailpostmastertools1_beta1-cli/Cargo.toml +++ b/gen/gmailpostmastertools1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-gmailpostmastertools1_beta1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Postmaster Tools (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gmailpostmastertools1_beta1-cli" @@ -20,13 +20,13 @@ name = "gmailpostmastertools1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-gmailpostmastertools1_beta1] path = "../gmailpostmastertools1_beta1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/gmailpostmastertools1_beta1-cli/README.md b/gen/gmailpostmastertools1_beta1-cli/README.md index ecc6de4e56..6a79a327bd 100644 --- a/gen/gmailpostmastertools1_beta1-cli/README.md +++ b/gen/gmailpostmastertools1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Postmaster Tools* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Postmaster Tools* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash gmailpostmastertools1-beta1 [options] diff --git a/gen/gmailpostmastertools1_beta1-cli/mkdocs.yml b/gen/gmailpostmastertools1_beta1-cli/mkdocs.yml index 57f49dcf78..cf1f4081a9 100644 --- a/gen/gmailpostmastertools1_beta1-cli/mkdocs.yml +++ b/gen/gmailpostmastertools1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Postmaster Tools v4.0.1+20220305 +site_name: Postmaster Tools v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-gmailpostmastertools1_beta1-cli site_description: A complete library to interact with Postmaster Tools (protocol v1beta1) @@ -7,12 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/gmailpostmastert docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['domains_get.md', 'Domains', 'Get'] -- ['domains_list.md', 'Domains', 'List'] -- ['domains_traffic-stats-get.md', 'Domains', 'Traffic Stats Get'] -- ['domains_traffic-stats-list.md', 'Domains', 'Traffic Stats List'] +nav: +- Home: 'index.md' +- 'Domains': + - 'Get': 'domains_get.md' + - 'List': 'domains_list.md' + - 'Traffic Stats Get': 'domains_traffic-stats-get.md' + - 'Traffic Stats List': 'domains_traffic-stats-list.md' theme: readthedocs diff --git a/gen/gmailpostmastertools1_beta1-cli/src/client.rs b/gen/gmailpostmastertools1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/gmailpostmastertools1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/gmailpostmastertools1_beta1-cli/src/main.rs b/gen/gmailpostmastertools1_beta1-cli/src/main.rs index 1a89536b34..ec305f9ddb 100644 --- a/gen/gmailpostmastertools1_beta1-cli/src/main.rs +++ b/gen/gmailpostmastertools1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_gmailpostmastertools1_beta1::{api, Error, oauth2}; +use google_gmailpostmastertools1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -221,28 +220,28 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -478,7 +477,7 @@ async fn main() { let mut app = App::new("gmailpostmastertools1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("The Postmaster Tools API is a RESTful API that provides programmatic access to email traffic metrics (like spam reports, delivery errors etc) otherwise available through the Gmail Postmaster Tools UI currently.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_gmailpostmastertools1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/gmailpostmastertools1_beta1/Cargo.toml b/gen/gmailpostmastertools1_beta1/Cargo.toml index b7bc6b5d7f..2f2fabcae3 100644 --- a/gen/gmailpostmastertools1_beta1/Cargo.toml +++ b/gen/gmailpostmastertools1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-gmailpostmastertools1_beta1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Postmaster Tools (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/gmailpostmastertools1_beta1" homepage = "https://developers.google.com/gmail/postmaster" -documentation = "https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123" license = "MIT" keywords = ["gmailpostmastertools", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/gmailpostmastertools1_beta1/README.md b/gen/gmailpostmastertools1_beta1/README.md index 896c9cd0aa..8c8469eb4f 100644 --- a/gen/gmailpostmastertools1_beta1/README.md +++ b/gen/gmailpostmastertools1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-gmailpostmastertools1_beta1` library allows access to all features of the *Google Postmaster Tools* service. -This documentation was generated from *Postmaster Tools* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Postmaster Tools* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Postmaster Tools* *v1_beta1* API can be found at the [official documentation site](https://developers.google.com/gmail/postmaster). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/PostmasterTools) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/PostmasterTools) ... -* [domains](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/api::Domain) - * [*get*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/api::DomainGetCall), [*list*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/api::DomainListCall), [*traffic stats get*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/api::DomainTrafficStatGetCall) and [*traffic stats list*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/api::DomainTrafficStatListCall) +* [domains](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/api::Domain) + * [*get*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/api::DomainGetCall), [*list*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/api::DomainListCall), [*traffic stats get*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/api::DomainTrafficStatGetCall) and [*traffic stats list*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/api::DomainTrafficStatListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/PostmasterTools)** +* **[Hub](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/PostmasterTools)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2-beta-1+20230123/google_gmailpostmastertools1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-gmailpostmastertools1_beta1/5.0.2+20230123/google_gmailpostmastertools1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/gmailpostmastertools1_beta1/src/api.rs b/gen/gmailpostmastertools1_beta1/src/api.rs index 7b38c3b311..6850405ac4 100644 --- a/gen/gmailpostmastertools1_beta1/src/api.rs +++ b/gen/gmailpostmastertools1_beta1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> PostmasterTools { PostmasterTools { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://gmailpostmastertools.googleapis.com/".to_string(), _root_url: "https://gmailpostmastertools.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> PostmasterTools { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/gmailpostmastertools1_beta1/src/client.rs b/gen/gmailpostmastertools1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/gmailpostmastertools1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/gmailpostmastertools1_beta1/src/lib.rs b/gen/gmailpostmastertools1_beta1/src/lib.rs index f829ffec7e..607b6b53b1 100644 --- a/gen/gmailpostmastertools1_beta1/src/lib.rs +++ b/gen/gmailpostmastertools1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Postmaster Tools* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Postmaster Tools* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *gmailpostmastertools:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Postmaster Tools* *v1_beta1* API can be found at the //! [official documentation site](https://developers.google.com/gmail/postmaster). diff --git a/gen/groupsmigration1-cli/Cargo.toml b/gen/groupsmigration1-cli/Cargo.toml index 97f43998a4..2e8d173477 100644 --- a/gen/groupsmigration1-cli/Cargo.toml +++ b/gen/groupsmigration1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-groupsmigration1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Groups Migration (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/groupsmigration1-cli" @@ -20,13 +20,13 @@ name = "groupsmigration1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-groupsmigration1] path = "../groupsmigration1" -version = "4.0.1+20220226" +version = "5.0.2+20230117" + diff --git a/gen/groupsmigration1-cli/README.md b/gen/groupsmigration1-cli/README.md index f59c3f1a88..abe979493a 100644 --- a/gen/groupsmigration1-cli/README.md +++ b/gen/groupsmigration1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Groups Migration* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *Groups Migration* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash groupsmigration1 [options] diff --git a/gen/groupsmigration1-cli/mkdocs.yml b/gen/groupsmigration1-cli/mkdocs.yml index ca300162ee..2419ae564c 100644 --- a/gen/groupsmigration1-cli/mkdocs.yml +++ b/gen/groupsmigration1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Groups Migration v4.0.1+20220226 +site_name: Groups Migration v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-groupsmigration1-cli site_description: A complete library to interact with Groups Migration (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/groupsmigration1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['archive_insert.md', 'Archive', 'Insert'] +nav: +- Home: 'index.md' +- 'Archive': + - 'Insert': 'archive_insert.md' theme: readthedocs diff --git a/gen/groupsmigration1-cli/src/client.rs b/gen/groupsmigration1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/groupsmigration1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/groupsmigration1-cli/src/main.rs b/gen/groupsmigration1-cli/src/main.rs index 1d086fe905..c4f1dd7c35 100644 --- a/gen/groupsmigration1-cli/src/main.rs +++ b/gen/groupsmigration1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_groupsmigration1::{api, Error, oauth2}; +use google_groupsmigration1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -231,7 +230,7 @@ async fn main() { let mut app = App::new("groupsmigration1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230117") .about("The Groups Migration API allows domain administrators to archive emails into Google groups.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_groupsmigration1_cli") .arg(Arg::with_name("url") diff --git a/gen/groupsmigration1/Cargo.toml b/gen/groupsmigration1/Cargo.toml index f5bd0e9762..bb2ef85899 100644 --- a/gen/groupsmigration1/Cargo.toml +++ b/gen/groupsmigration1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-groupsmigration1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Groups Migration (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/groupsmigration1" homepage = "https://developers.google.com/google-apps/groups-migration/" -documentation = "https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-groupsmigration1/5.0.2+20230117" license = "MIT" keywords = ["groupsmigration", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/groupsmigration1/README.md b/gen/groupsmigration1/README.md index d8607cc226..520a26419d 100644 --- a/gen/groupsmigration1/README.md +++ b/gen/groupsmigration1/README.md @@ -5,21 +5,21 @@ DO NOT EDIT ! --> The `google-groupsmigration1` library allows access to all features of the *Google Groups Migration* service. -This documentation was generated from *Groups Migration* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *groupsmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Groups Migration* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *groupsmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Groups Migration* *v1* API can be found at the [official documentation site](https://developers.google.com/google-apps/groups-migration/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/GroupsMigration) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/GroupsMigration) ... * archive - * [*insert*](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/api::ArchiveInsertCall) + * [*insert*](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/api::ArchiveInsertCall) Upload supported by ... -* [*insert archive*](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/api::ArchiveInsertCall) +* [*insert archive*](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/api::ArchiveInsertCall) @@ -27,17 +27,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/GroupsMigration)** +* **[Hub](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/GroupsMigration)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::CallBuilder) -* **[Resources](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::CallBuilder) +* **[Resources](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Part)** + * **[Parts](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -123,17 +123,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -143,29 +143,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Delegate) to the -[Method Builder](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Delegate) to the +[Method Builder](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::RequestValue) and -[decodable](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::RequestValue) and +[decodable](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-groupsmigration1/5.0.2-beta-1+20230117/google_groupsmigration1/client::RequestValue) are moved +* [request values](https://docs.rs/google-groupsmigration1/5.0.2+20230117/google_groupsmigration1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/groupsmigration1/src/api.rs b/gen/groupsmigration1/src/api.rs index 269bf22ff0..194cb21a4a 100644 --- a/gen/groupsmigration1/src/api.rs +++ b/gen/groupsmigration1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> GroupsMigration { GroupsMigration { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://groupsmigration.googleapis.com/".to_string(), _root_url: "https://groupsmigration.googleapis.com/".to_string(), } @@ -131,7 +131,7 @@ impl<'a, S> GroupsMigration { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/groupsmigration1/src/client.rs b/gen/groupsmigration1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/groupsmigration1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/groupsmigration1/src/lib.rs b/gen/groupsmigration1/src/lib.rs index 84d78e62cc..1408fbdc88 100644 --- a/gen/groupsmigration1/src/lib.rs +++ b/gen/groupsmigration1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Groups Migration* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *groupsmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Groups Migration* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *groupsmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Groups Migration* *v1* API can be found at the //! [official documentation site](https://developers.google.com/google-apps/groups-migration/). diff --git a/gen/groupssettings1-cli/Cargo.toml b/gen/groupssettings1-cli/Cargo.toml index d1a460f65f..0c81a000a8 100644 --- a/gen/groupssettings1-cli/Cargo.toml +++ b/gen/groupssettings1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-groupssettings1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20220614" authors = ["Sebastian Thiel "] description = "A complete library to interact with groupssettings (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/groupssettings1-cli" @@ -20,13 +20,13 @@ name = "groupssettings1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-groupssettings1] path = "../groupssettings1" -version = "4.0.1+20220224" +version = "5.0.2+20220614" + diff --git a/gen/groupssettings1-cli/README.md b/gen/groupssettings1-cli/README.md index 8859bdea82..edc46a1dbc 100644 --- a/gen/groupssettings1-cli/README.md +++ b/gen/groupssettings1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *groupssettings* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *groupssettings* API at revision *20220614*. The CLI is at version *5.0.2*. ```bash groupssettings1 [options] diff --git a/gen/groupssettings1-cli/mkdocs.yml b/gen/groupssettings1-cli/mkdocs.yml index a8546444ff..affda58555 100644 --- a/gen/groupssettings1-cli/mkdocs.yml +++ b/gen/groupssettings1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: groupssettings v4.0.1+20220224 +site_name: groupssettings v5.0.2+20220614 site_url: http://byron.github.io/google-apis-rs/google-groupssettings1-cli site_description: A complete library to interact with groupssettings (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/groupssettings1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['groups_get.md', 'Groups', 'Get'] -- ['groups_patch.md', 'Groups', 'Patch'] -- ['groups_update.md', 'Groups', 'Update'] +nav: +- Home: 'index.md' +- 'Groups': + - 'Get': 'groups_get.md' + - 'Patch': 'groups_patch.md' + - 'Update': 'groups_update.md' theme: readthedocs diff --git a/gen/groupssettings1-cli/src/client.rs b/gen/groupssettings1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/groupssettings1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/groupssettings1-cli/src/main.rs b/gen/groupssettings1-cli/src/main.rs index ddb6cbdd6f..9b26337437 100644 --- a/gen/groupssettings1-cli/src/main.rs +++ b/gen/groupssettings1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_groupssettings1::{api, Error, oauth2}; +use google_groupssettings1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -572,7 +571,7 @@ async fn main() { let mut app = App::new("groupssettings1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20220614") .about("Manages permission levels and related settings of a group.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_groupssettings1_cli") .arg(Arg::with_name("url") diff --git a/gen/groupssettings1/Cargo.toml b/gen/groupssettings1/Cargo.toml index 8ed355f66e..b25eee0099 100644 --- a/gen/groupssettings1/Cargo.toml +++ b/gen/groupssettings1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-groupssettings1" -version = "5.0.2-beta-1+20220614" +version = "5.0.2+20220614" authors = ["Sebastian Thiel "] description = "A complete library to interact with groupssettings (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/groupssettings1" homepage = "https://developers.google.com/google-apps/groups-settings/get_started" -documentation = "https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614" +documentation = "https://docs.rs/google-groupssettings1/5.0.2+20220614" license = "MIT" keywords = ["groupssettings", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/groupssettings1/README.md b/gen/groupssettings1/README.md index db5e387ef3..c197de483c 100644 --- a/gen/groupssettings1/README.md +++ b/gen/groupssettings1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-groupssettings1` library allows access to all features of the *Google groupssettings* service. -This documentation was generated from *groupssettings* crate version *5.0.2-beta-1+20220614*, where *20220614* is the exact revision of the *groupssettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *groupssettings* crate version *5.0.2+20220614*, where *20220614* is the exact revision of the *groupssettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *groupssettings* *v1* API can be found at the [official documentation site](https://developers.google.com/google-apps/groups-settings/get_started). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/Groupssettings) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/Groupssettings) ... * groups - * [*get*](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/api::GroupGetCall), [*patch*](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/api::GroupPatchCall) and [*update*](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/api::GroupUpdateCall) + * [*get*](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/api::GroupGetCall), [*patch*](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/api::GroupPatchCall) and [*update*](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/api::GroupUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/Groupssettings)** +* **[Hub](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/Groupssettings)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::CallBuilder) -* **[Resources](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::CallBuilder) +* **[Resources](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Part)** + * **[Parts](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Delegate) to the -[Method Builder](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Delegate) to the +[Method Builder](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::RequestValue) and -[decodable](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::RequestValue) and +[decodable](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-groupssettings1/5.0.2-beta-1+20220614/google_groupssettings1/client::RequestValue) are moved +* [request values](https://docs.rs/google-groupssettings1/5.0.2+20220614/google_groupssettings1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/groupssettings1/src/api.rs b/gen/groupssettings1/src/api.rs index 9fbdba9fdb..3d832b8402 100644 --- a/gen/groupssettings1/src/api.rs +++ b/gen/groupssettings1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Groupssettings { Groupssettings { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/groups/v1/groups/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Groupssettings { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/groupssettings1/src/client.rs b/gen/groupssettings1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/groupssettings1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/groupssettings1/src/lib.rs b/gen/groupssettings1/src/lib.rs index bdfdd37615..ab37a36705 100644 --- a/gen/groupssettings1/src/lib.rs +++ b/gen/groupssettings1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *groupssettings* crate version *5.0.2-beta-1+20220614*, where *20220614* is the exact revision of the *groupssettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *groupssettings* crate version *5.0.2+20220614*, where *20220614* is the exact revision of the *groupssettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *groupssettings* *v1* API can be found at the //! [official documentation site](https://developers.google.com/google-apps/groups-settings/get_started). diff --git a/gen/healthcare1-cli/Cargo.toml b/gen/healthcare1-cli/Cargo.toml index fd811b8b72..2b891cfa0a 100644 --- a/gen/healthcare1-cli/Cargo.toml +++ b/gen/healthcare1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-healthcare1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20221220" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Healthcare (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/healthcare1-cli" @@ -20,13 +20,13 @@ name = "healthcare1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-healthcare1] path = "../healthcare1" -version = "4.0.1+20220223" +version = "5.0.2+20221220" + diff --git a/gen/healthcare1-cli/README.md b/gen/healthcare1-cli/README.md index 58bff6ce70..caf28f7ccc 100644 --- a/gen/healthcare1-cli/README.md +++ b/gen/healthcare1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Healthcare* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Healthcare* API at revision *20221220*. The CLI is at version *5.0.2*. ```bash healthcare1 [options] diff --git a/gen/healthcare1-cli/mkdocs.yml b/gen/healthcare1-cli/mkdocs.yml index 161138cb14..a20cfe5562 100644 --- a/gen/healthcare1-cli/mkdocs.yml +++ b/gen/healthcare1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Healthcare v4.0.1+20220223 +site_name: Cloud Healthcare v5.0.2+20221220 site_url: http://byron.github.io/google-apis-rs/google-healthcare1-cli site_description: A complete library to interact with Cloud Healthcare (protocol v1) @@ -7,131 +7,132 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/healthcare1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-create.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Create'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-delete.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Delete'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-get.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Get'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-list.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions List'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-patch.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Patch'] -- ['projects_locations-datasets-consent-stores-check-data-access.md', 'Projects', 'Locations Datasets Consent Stores Check Data Access'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-create.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts Create'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-delete.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts Delete'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-get.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts Get'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-list.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts List'] -- ['projects_locations-datasets-consent-stores-consents-activate.md', 'Projects', 'Locations Datasets Consent Stores Consents Activate'] -- ['projects_locations-datasets-consent-stores-consents-create.md', 'Projects', 'Locations Datasets Consent Stores Consents Create'] -- ['projects_locations-datasets-consent-stores-consents-delete.md', 'Projects', 'Locations Datasets Consent Stores Consents Delete'] -- ['projects_locations-datasets-consent-stores-consents-delete-revision.md', 'Projects', 'Locations Datasets Consent Stores Consents Delete Revision'] -- ['projects_locations-datasets-consent-stores-consents-get.md', 'Projects', 'Locations Datasets Consent Stores Consents Get'] -- ['projects_locations-datasets-consent-stores-consents-list.md', 'Projects', 'Locations Datasets Consent Stores Consents List'] -- ['projects_locations-datasets-consent-stores-consents-list-revisions.md', 'Projects', 'Locations Datasets Consent Stores Consents List Revisions'] -- ['projects_locations-datasets-consent-stores-consents-patch.md', 'Projects', 'Locations Datasets Consent Stores Consents Patch'] -- ['projects_locations-datasets-consent-stores-consents-reject.md', 'Projects', 'Locations Datasets Consent Stores Consents Reject'] -- ['projects_locations-datasets-consent-stores-consents-revoke.md', 'Projects', 'Locations Datasets Consent Stores Consents Revoke'] -- ['projects_locations-datasets-consent-stores-create.md', 'Projects', 'Locations Datasets Consent Stores Create'] -- ['projects_locations-datasets-consent-stores-delete.md', 'Projects', 'Locations Datasets Consent Stores Delete'] -- ['projects_locations-datasets-consent-stores-evaluate-user-consents.md', 'Projects', 'Locations Datasets Consent Stores Evaluate User Consents'] -- ['projects_locations-datasets-consent-stores-get.md', 'Projects', 'Locations Datasets Consent Stores Get'] -- ['projects_locations-datasets-consent-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Consent Stores Get Iam Policy'] -- ['projects_locations-datasets-consent-stores-list.md', 'Projects', 'Locations Datasets Consent Stores List'] -- ['projects_locations-datasets-consent-stores-patch.md', 'Projects', 'Locations Datasets Consent Stores Patch'] -- ['projects_locations-datasets-consent-stores-query-accessible-data.md', 'Projects', 'Locations Datasets Consent Stores Query Accessible Data'] -- ['projects_locations-datasets-consent-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Consent Stores Set Iam Policy'] -- ['projects_locations-datasets-consent-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Consent Stores Test Iam Permissions'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-archive.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Archive'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-create.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Create'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-delete.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Delete'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-get.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Get'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-list.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings List'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-patch.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Patch'] -- ['projects_locations-datasets-create.md', 'Projects', 'Locations Datasets Create'] -- ['projects_locations-datasets-deidentify.md', 'Projects', 'Locations Datasets Deidentify'] -- ['projects_locations-datasets-delete.md', 'Projects', 'Locations Datasets Delete'] -- ['projects_locations-datasets-dicom-stores-create.md', 'Projects', 'Locations Datasets Dicom Stores Create'] -- ['projects_locations-datasets-dicom-stores-deidentify.md', 'Projects', 'Locations Datasets Dicom Stores Deidentify'] -- ['projects_locations-datasets-dicom-stores-delete.md', 'Projects', 'Locations Datasets Dicom Stores Delete'] -- ['projects_locations-datasets-dicom-stores-export.md', 'Projects', 'Locations Datasets Dicom Stores Export'] -- ['projects_locations-datasets-dicom-stores-get.md', 'Projects', 'Locations Datasets Dicom Stores Get'] -- ['projects_locations-datasets-dicom-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Dicom Stores Get Iam Policy'] -- ['projects_locations-datasets-dicom-stores-import.md', 'Projects', 'Locations Datasets Dicom Stores Import'] -- ['projects_locations-datasets-dicom-stores-list.md', 'Projects', 'Locations Datasets Dicom Stores List'] -- ['projects_locations-datasets-dicom-stores-patch.md', 'Projects', 'Locations Datasets Dicom Stores Patch'] -- ['projects_locations-datasets-dicom-stores-search-for-instances.md', 'Projects', 'Locations Datasets Dicom Stores Search For Instances'] -- ['projects_locations-datasets-dicom-stores-search-for-series.md', 'Projects', 'Locations Datasets Dicom Stores Search For Series'] -- ['projects_locations-datasets-dicom-stores-search-for-studies.md', 'Projects', 'Locations Datasets Dicom Stores Search For Studies'] -- ['projects_locations-datasets-dicom-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Dicom Stores Set Iam Policy'] -- ['projects_locations-datasets-dicom-stores-store-instances.md', 'Projects', 'Locations Datasets Dicom Stores Store Instances'] -- ['projects_locations-datasets-dicom-stores-studies-delete.md', 'Projects', 'Locations Datasets Dicom Stores Studies Delete'] -- ['projects_locations-datasets-dicom-stores-studies-retrieve-metadata.md', 'Projects', 'Locations Datasets Dicom Stores Studies Retrieve Metadata'] -- ['projects_locations-datasets-dicom-stores-studies-retrieve-study.md', 'Projects', 'Locations Datasets Dicom Stores Studies Retrieve Study'] -- ['projects_locations-datasets-dicom-stores-studies-search-for-instances.md', 'Projects', 'Locations Datasets Dicom Stores Studies Search For Instances'] -- ['projects_locations-datasets-dicom-stores-studies-search-for-series.md', 'Projects', 'Locations Datasets Dicom Stores Studies Search For Series'] -- ['projects_locations-datasets-dicom-stores-studies-series-delete.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Delete'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-delete.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Delete'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-frames.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Frames'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-rendered.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Rendered'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-instance.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Instance'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-metadata.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Metadata'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-rendered.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Rendered'] -- ['projects_locations-datasets-dicom-stores-studies-series-retrieve-metadata.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Retrieve Metadata'] -- ['projects_locations-datasets-dicom-stores-studies-series-retrieve-series.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Retrieve Series'] -- ['projects_locations-datasets-dicom-stores-studies-series-search-for-instances.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Search For Instances'] -- ['projects_locations-datasets-dicom-stores-studies-store-instances.md', 'Projects', 'Locations Datasets Dicom Stores Studies Store Instances'] -- ['projects_locations-datasets-dicom-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Dicom Stores Test Iam Permissions'] -- ['projects_locations-datasets-fhir-stores-create.md', 'Projects', 'Locations Datasets Fhir Stores Create'] -- ['projects_locations-datasets-fhir-stores-deidentify.md', 'Projects', 'Locations Datasets Fhir Stores Deidentify'] -- ['projects_locations-datasets-fhir-stores-delete.md', 'Projects', 'Locations Datasets Fhir Stores Delete'] -- ['projects_locations-datasets-fhir-stores-export.md', 'Projects', 'Locations Datasets Fhir Stores Export'] -- ['projects_locations-datasets-fhir-stores-fhir--patient-everything.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Patient Everything'] -- ['projects_locations-datasets-fhir-stores-fhir--resource-purge.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Resource Purge'] -- ['projects_locations-datasets-fhir-stores-fhir--resource-validate.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Resource Validate'] -- ['projects_locations-datasets-fhir-stores-fhir-capabilities.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Capabilities'] -- ['projects_locations-datasets-fhir-stores-fhir-create.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Create'] -- ['projects_locations-datasets-fhir-stores-fhir-delete.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Delete'] -- ['projects_locations-datasets-fhir-stores-fhir-execute-bundle.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Execute Bundle'] -- ['projects_locations-datasets-fhir-stores-fhir-history.md', 'Projects', 'Locations Datasets Fhir Stores Fhir History'] -- ['projects_locations-datasets-fhir-stores-fhir-patch.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Patch'] -- ['projects_locations-datasets-fhir-stores-fhir-read.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Read'] -- ['projects_locations-datasets-fhir-stores-fhir-search.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Search'] -- ['projects_locations-datasets-fhir-stores-fhir-search-type.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Search Type'] -- ['projects_locations-datasets-fhir-stores-fhir-update.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Update'] -- ['projects_locations-datasets-fhir-stores-fhir-vread.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Vread'] -- ['projects_locations-datasets-fhir-stores-get.md', 'Projects', 'Locations Datasets Fhir Stores Get'] -- ['projects_locations-datasets-fhir-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Fhir Stores Get Iam Policy'] -- ['projects_locations-datasets-fhir-stores-import.md', 'Projects', 'Locations Datasets Fhir Stores Import'] -- ['projects_locations-datasets-fhir-stores-list.md', 'Projects', 'Locations Datasets Fhir Stores List'] -- ['projects_locations-datasets-fhir-stores-patch.md', 'Projects', 'Locations Datasets Fhir Stores Patch'] -- ['projects_locations-datasets-fhir-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Fhir Stores Set Iam Policy'] -- ['projects_locations-datasets-fhir-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Fhir Stores Test Iam Permissions'] -- ['projects_locations-datasets-get.md', 'Projects', 'Locations Datasets Get'] -- ['projects_locations-datasets-get-iam-policy.md', 'Projects', 'Locations Datasets Get Iam Policy'] -- ['projects_locations-datasets-hl7-v2-stores-create.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Create'] -- ['projects_locations-datasets-hl7-v2-stores-delete.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Delete'] -- ['projects_locations-datasets-hl7-v2-stores-export.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Export'] -- ['projects_locations-datasets-hl7-v2-stores-get.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Get'] -- ['projects_locations-datasets-hl7-v2-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Get Iam Policy'] -- ['projects_locations-datasets-hl7-v2-stores-import.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Import'] -- ['projects_locations-datasets-hl7-v2-stores-list.md', 'Projects', 'Locations Datasets Hl7 V2 Stores List'] -- ['projects_locations-datasets-hl7-v2-stores-messages-create.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Create'] -- ['projects_locations-datasets-hl7-v2-stores-messages-delete.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Delete'] -- ['projects_locations-datasets-hl7-v2-stores-messages-get.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Get'] -- ['projects_locations-datasets-hl7-v2-stores-messages-ingest.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Ingest'] -- ['projects_locations-datasets-hl7-v2-stores-messages-list.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages List'] -- ['projects_locations-datasets-hl7-v2-stores-messages-patch.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Patch'] -- ['projects_locations-datasets-hl7-v2-stores-patch.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Patch'] -- ['projects_locations-datasets-hl7-v2-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Set Iam Policy'] -- ['projects_locations-datasets-hl7-v2-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Test Iam Permissions'] -- ['projects_locations-datasets-list.md', 'Projects', 'Locations Datasets List'] -- ['projects_locations-datasets-operations-cancel.md', 'Projects', 'Locations Datasets Operations Cancel'] -- ['projects_locations-datasets-operations-get.md', 'Projects', 'Locations Datasets Operations Get'] -- ['projects_locations-datasets-operations-list.md', 'Projects', 'Locations Datasets Operations List'] -- ['projects_locations-datasets-patch.md', 'Projects', 'Locations Datasets Patch'] -- ['projects_locations-datasets-set-iam-policy.md', 'Projects', 'Locations Datasets Set Iam Policy'] -- ['projects_locations-datasets-test-iam-permissions.md', 'Projects', 'Locations Datasets Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-services-nlp-analyze-entities.md', 'Projects', 'Locations Services Nlp Analyze Entities'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Datasets Consent Stores Attribute Definitions Create': 'projects_locations-datasets-consent-stores-attribute-definitions-create.md' + - 'Locations Datasets Consent Stores Attribute Definitions Delete': 'projects_locations-datasets-consent-stores-attribute-definitions-delete.md' + - 'Locations Datasets Consent Stores Attribute Definitions Get': 'projects_locations-datasets-consent-stores-attribute-definitions-get.md' + - 'Locations Datasets Consent Stores Attribute Definitions List': 'projects_locations-datasets-consent-stores-attribute-definitions-list.md' + - 'Locations Datasets Consent Stores Attribute Definitions Patch': 'projects_locations-datasets-consent-stores-attribute-definitions-patch.md' + - 'Locations Datasets Consent Stores Check Data Access': 'projects_locations-datasets-consent-stores-check-data-access.md' + - 'Locations Datasets Consent Stores Consent Artifacts Create': 'projects_locations-datasets-consent-stores-consent-artifacts-create.md' + - 'Locations Datasets Consent Stores Consent Artifacts Delete': 'projects_locations-datasets-consent-stores-consent-artifacts-delete.md' + - 'Locations Datasets Consent Stores Consent Artifacts Get': 'projects_locations-datasets-consent-stores-consent-artifacts-get.md' + - 'Locations Datasets Consent Stores Consent Artifacts List': 'projects_locations-datasets-consent-stores-consent-artifacts-list.md' + - 'Locations Datasets Consent Stores Consents Activate': 'projects_locations-datasets-consent-stores-consents-activate.md' + - 'Locations Datasets Consent Stores Consents Create': 'projects_locations-datasets-consent-stores-consents-create.md' + - 'Locations Datasets Consent Stores Consents Delete': 'projects_locations-datasets-consent-stores-consents-delete.md' + - 'Locations Datasets Consent Stores Consents Delete Revision': 'projects_locations-datasets-consent-stores-consents-delete-revision.md' + - 'Locations Datasets Consent Stores Consents Get': 'projects_locations-datasets-consent-stores-consents-get.md' + - 'Locations Datasets Consent Stores Consents List': 'projects_locations-datasets-consent-stores-consents-list.md' + - 'Locations Datasets Consent Stores Consents List Revisions': 'projects_locations-datasets-consent-stores-consents-list-revisions.md' + - 'Locations Datasets Consent Stores Consents Patch': 'projects_locations-datasets-consent-stores-consents-patch.md' + - 'Locations Datasets Consent Stores Consents Reject': 'projects_locations-datasets-consent-stores-consents-reject.md' + - 'Locations Datasets Consent Stores Consents Revoke': 'projects_locations-datasets-consent-stores-consents-revoke.md' + - 'Locations Datasets Consent Stores Create': 'projects_locations-datasets-consent-stores-create.md' + - 'Locations Datasets Consent Stores Delete': 'projects_locations-datasets-consent-stores-delete.md' + - 'Locations Datasets Consent Stores Evaluate User Consents': 'projects_locations-datasets-consent-stores-evaluate-user-consents.md' + - 'Locations Datasets Consent Stores Get': 'projects_locations-datasets-consent-stores-get.md' + - 'Locations Datasets Consent Stores Get Iam Policy': 'projects_locations-datasets-consent-stores-get-iam-policy.md' + - 'Locations Datasets Consent Stores List': 'projects_locations-datasets-consent-stores-list.md' + - 'Locations Datasets Consent Stores Patch': 'projects_locations-datasets-consent-stores-patch.md' + - 'Locations Datasets Consent Stores Query Accessible Data': 'projects_locations-datasets-consent-stores-query-accessible-data.md' + - 'Locations Datasets Consent Stores Set Iam Policy': 'projects_locations-datasets-consent-stores-set-iam-policy.md' + - 'Locations Datasets Consent Stores Test Iam Permissions': 'projects_locations-datasets-consent-stores-test-iam-permissions.md' + - 'Locations Datasets Consent Stores User Data Mappings Archive': 'projects_locations-datasets-consent-stores-user-data-mappings-archive.md' + - 'Locations Datasets Consent Stores User Data Mappings Create': 'projects_locations-datasets-consent-stores-user-data-mappings-create.md' + - 'Locations Datasets Consent Stores User Data Mappings Delete': 'projects_locations-datasets-consent-stores-user-data-mappings-delete.md' + - 'Locations Datasets Consent Stores User Data Mappings Get': 'projects_locations-datasets-consent-stores-user-data-mappings-get.md' + - 'Locations Datasets Consent Stores User Data Mappings List': 'projects_locations-datasets-consent-stores-user-data-mappings-list.md' + - 'Locations Datasets Consent Stores User Data Mappings Patch': 'projects_locations-datasets-consent-stores-user-data-mappings-patch.md' + - 'Locations Datasets Create': 'projects_locations-datasets-create.md' + - 'Locations Datasets Deidentify': 'projects_locations-datasets-deidentify.md' + - 'Locations Datasets Delete': 'projects_locations-datasets-delete.md' + - 'Locations Datasets Dicom Stores Create': 'projects_locations-datasets-dicom-stores-create.md' + - 'Locations Datasets Dicom Stores Deidentify': 'projects_locations-datasets-dicom-stores-deidentify.md' + - 'Locations Datasets Dicom Stores Delete': 'projects_locations-datasets-dicom-stores-delete.md' + - 'Locations Datasets Dicom Stores Export': 'projects_locations-datasets-dicom-stores-export.md' + - 'Locations Datasets Dicom Stores Get': 'projects_locations-datasets-dicom-stores-get.md' + - 'Locations Datasets Dicom Stores Get Iam Policy': 'projects_locations-datasets-dicom-stores-get-iam-policy.md' + - 'Locations Datasets Dicom Stores Import': 'projects_locations-datasets-dicom-stores-import.md' + - 'Locations Datasets Dicom Stores List': 'projects_locations-datasets-dicom-stores-list.md' + - 'Locations Datasets Dicom Stores Patch': 'projects_locations-datasets-dicom-stores-patch.md' + - 'Locations Datasets Dicom Stores Search For Instances': 'projects_locations-datasets-dicom-stores-search-for-instances.md' + - 'Locations Datasets Dicom Stores Search For Series': 'projects_locations-datasets-dicom-stores-search-for-series.md' + - 'Locations Datasets Dicom Stores Search For Studies': 'projects_locations-datasets-dicom-stores-search-for-studies.md' + - 'Locations Datasets Dicom Stores Set Iam Policy': 'projects_locations-datasets-dicom-stores-set-iam-policy.md' + - 'Locations Datasets Dicom Stores Store Instances': 'projects_locations-datasets-dicom-stores-store-instances.md' + - 'Locations Datasets Dicom Stores Studies Delete': 'projects_locations-datasets-dicom-stores-studies-delete.md' + - 'Locations Datasets Dicom Stores Studies Retrieve Metadata': 'projects_locations-datasets-dicom-stores-studies-retrieve-metadata.md' + - 'Locations Datasets Dicom Stores Studies Retrieve Study': 'projects_locations-datasets-dicom-stores-studies-retrieve-study.md' + - 'Locations Datasets Dicom Stores Studies Search For Instances': 'projects_locations-datasets-dicom-stores-studies-search-for-instances.md' + - 'Locations Datasets Dicom Stores Studies Search For Series': 'projects_locations-datasets-dicom-stores-studies-search-for-series.md' + - 'Locations Datasets Dicom Stores Studies Series Delete': 'projects_locations-datasets-dicom-stores-studies-series-delete.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Delete': 'projects_locations-datasets-dicom-stores-studies-series-instances-delete.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Frames': 'projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-frames.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Rendered': 'projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-rendered.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Instance': 'projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-instance.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Metadata': 'projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-metadata.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Rendered': 'projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-rendered.md' + - 'Locations Datasets Dicom Stores Studies Series Retrieve Metadata': 'projects_locations-datasets-dicom-stores-studies-series-retrieve-metadata.md' + - 'Locations Datasets Dicom Stores Studies Series Retrieve Series': 'projects_locations-datasets-dicom-stores-studies-series-retrieve-series.md' + - 'Locations Datasets Dicom Stores Studies Series Search For Instances': 'projects_locations-datasets-dicom-stores-studies-series-search-for-instances.md' + - 'Locations Datasets Dicom Stores Studies Store Instances': 'projects_locations-datasets-dicom-stores-studies-store-instances.md' + - 'Locations Datasets Dicom Stores Test Iam Permissions': 'projects_locations-datasets-dicom-stores-test-iam-permissions.md' + - 'Locations Datasets Fhir Stores Create': 'projects_locations-datasets-fhir-stores-create.md' + - 'Locations Datasets Fhir Stores Deidentify': 'projects_locations-datasets-fhir-stores-deidentify.md' + - 'Locations Datasets Fhir Stores Delete': 'projects_locations-datasets-fhir-stores-delete.md' + - 'Locations Datasets Fhir Stores Export': 'projects_locations-datasets-fhir-stores-export.md' + - 'Locations Datasets Fhir Stores Fhir Patient Everything': 'projects_locations-datasets-fhir-stores-fhir--patient-everything.md' + - 'Locations Datasets Fhir Stores Fhir Resource Purge': 'projects_locations-datasets-fhir-stores-fhir--resource-purge.md' + - 'Locations Datasets Fhir Stores Fhir Resource Validate': 'projects_locations-datasets-fhir-stores-fhir--resource-validate.md' + - 'Locations Datasets Fhir Stores Fhir Capabilities': 'projects_locations-datasets-fhir-stores-fhir-capabilities.md' + - 'Locations Datasets Fhir Stores Fhir Create': 'projects_locations-datasets-fhir-stores-fhir-create.md' + - 'Locations Datasets Fhir Stores Fhir Delete': 'projects_locations-datasets-fhir-stores-fhir-delete.md' + - 'Locations Datasets Fhir Stores Fhir Execute Bundle': 'projects_locations-datasets-fhir-stores-fhir-execute-bundle.md' + - 'Locations Datasets Fhir Stores Fhir History': 'projects_locations-datasets-fhir-stores-fhir-history.md' + - 'Locations Datasets Fhir Stores Fhir Patch': 'projects_locations-datasets-fhir-stores-fhir-patch.md' + - 'Locations Datasets Fhir Stores Fhir Read': 'projects_locations-datasets-fhir-stores-fhir-read.md' + - 'Locations Datasets Fhir Stores Fhir Search': 'projects_locations-datasets-fhir-stores-fhir-search.md' + - 'Locations Datasets Fhir Stores Fhir Search Type': 'projects_locations-datasets-fhir-stores-fhir-search-type.md' + - 'Locations Datasets Fhir Stores Fhir Update': 'projects_locations-datasets-fhir-stores-fhir-update.md' + - 'Locations Datasets Fhir Stores Fhir Vread': 'projects_locations-datasets-fhir-stores-fhir-vread.md' + - 'Locations Datasets Fhir Stores Get': 'projects_locations-datasets-fhir-stores-get.md' + - 'Locations Datasets Fhir Stores Get Iam Policy': 'projects_locations-datasets-fhir-stores-get-iam-policy.md' + - 'Locations Datasets Fhir Stores Import': 'projects_locations-datasets-fhir-stores-import.md' + - 'Locations Datasets Fhir Stores List': 'projects_locations-datasets-fhir-stores-list.md' + - 'Locations Datasets Fhir Stores Patch': 'projects_locations-datasets-fhir-stores-patch.md' + - 'Locations Datasets Fhir Stores Set Iam Policy': 'projects_locations-datasets-fhir-stores-set-iam-policy.md' + - 'Locations Datasets Fhir Stores Test Iam Permissions': 'projects_locations-datasets-fhir-stores-test-iam-permissions.md' + - 'Locations Datasets Get': 'projects_locations-datasets-get.md' + - 'Locations Datasets Get Iam Policy': 'projects_locations-datasets-get-iam-policy.md' + - 'Locations Datasets Hl7 V2 Stores Create': 'projects_locations-datasets-hl7-v2-stores-create.md' + - 'Locations Datasets Hl7 V2 Stores Delete': 'projects_locations-datasets-hl7-v2-stores-delete.md' + - 'Locations Datasets Hl7 V2 Stores Export': 'projects_locations-datasets-hl7-v2-stores-export.md' + - 'Locations Datasets Hl7 V2 Stores Get': 'projects_locations-datasets-hl7-v2-stores-get.md' + - 'Locations Datasets Hl7 V2 Stores Get Iam Policy': 'projects_locations-datasets-hl7-v2-stores-get-iam-policy.md' + - 'Locations Datasets Hl7 V2 Stores Import': 'projects_locations-datasets-hl7-v2-stores-import.md' + - 'Locations Datasets Hl7 V2 Stores List': 'projects_locations-datasets-hl7-v2-stores-list.md' + - 'Locations Datasets Hl7 V2 Stores Messages Create': 'projects_locations-datasets-hl7-v2-stores-messages-create.md' + - 'Locations Datasets Hl7 V2 Stores Messages Delete': 'projects_locations-datasets-hl7-v2-stores-messages-delete.md' + - 'Locations Datasets Hl7 V2 Stores Messages Get': 'projects_locations-datasets-hl7-v2-stores-messages-get.md' + - 'Locations Datasets Hl7 V2 Stores Messages Ingest': 'projects_locations-datasets-hl7-v2-stores-messages-ingest.md' + - 'Locations Datasets Hl7 V2 Stores Messages List': 'projects_locations-datasets-hl7-v2-stores-messages-list.md' + - 'Locations Datasets Hl7 V2 Stores Messages Patch': 'projects_locations-datasets-hl7-v2-stores-messages-patch.md' + - 'Locations Datasets Hl7 V2 Stores Patch': 'projects_locations-datasets-hl7-v2-stores-patch.md' + - 'Locations Datasets Hl7 V2 Stores Set Iam Policy': 'projects_locations-datasets-hl7-v2-stores-set-iam-policy.md' + - 'Locations Datasets Hl7 V2 Stores Test Iam Permissions': 'projects_locations-datasets-hl7-v2-stores-test-iam-permissions.md' + - 'Locations Datasets List': 'projects_locations-datasets-list.md' + - 'Locations Datasets Operations Cancel': 'projects_locations-datasets-operations-cancel.md' + - 'Locations Datasets Operations Get': 'projects_locations-datasets-operations-get.md' + - 'Locations Datasets Operations List': 'projects_locations-datasets-operations-list.md' + - 'Locations Datasets Patch': 'projects_locations-datasets-patch.md' + - 'Locations Datasets Set Iam Policy': 'projects_locations-datasets-set-iam-policy.md' + - 'Locations Datasets Test Iam Permissions': 'projects_locations-datasets-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Services Nlp Analyze Entities': 'projects_locations-services-nlp-analyze-entities.md' theme: readthedocs diff --git a/gen/healthcare1-cli/src/client.rs b/gen/healthcare1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/healthcare1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/healthcare1-cli/src/main.rs b/gen/healthcare1-cli/src/main.rs index b5ab7e283e..5e0e2c9bfe 100644 --- a/gen/healthcare1-cli/src/main.rs +++ b/gen/healthcare1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_healthcare1::{api, Error, oauth2}; +use google_healthcare1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -259,7 +258,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -356,7 +355,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -710,7 +709,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1108,7 +1107,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1170,7 +1169,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1270,7 +1269,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1783,7 +1782,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1842,7 +1841,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1937,7 +1936,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2532,7 +2531,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2628,7 +2627,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2797,8 +2796,9 @@ where "config.fhir.default-keep-extensions" => Some(("config.fhir.defaultKeepExtensions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.image.text-redaction-mode" => Some(("config.image.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-dataset" => Some(("destinationDataset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-config-uri" => Some(("gcsConfigUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "default-keep-extensions", "destination-dataset", "dicom", "fhir", "filter-profile", "image", "keep-list", "remove-list", "skip-id-redaction", "tags", "text-redaction-mode"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "default-keep-extensions", "destination-dataset", "dicom", "fhir", "filter-profile", "gcs-config-uri", "image", "keep-list", "remove-list", "skip-id-redaction", "tags", "text-redaction-mode"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3032,8 +3032,9 @@ where "config.image.text-redaction-mode" => Some(("config.image.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-store" => Some(("destinationStore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "filter-config.resource-paths-gcs-uri" => Some(("filterConfig.resourcePathsGcsUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-config-uri" => Some(("gcsConfigUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "default-keep-extensions", "destination-store", "dicom", "fhir", "filter-config", "filter-profile", "image", "keep-list", "remove-list", "resource-paths-gcs-uri", "skip-id-redaction", "tags", "text-redaction-mode"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "default-keep-extensions", "destination-store", "dicom", "fhir", "filter-config", "filter-profile", "gcs-config-uri", "image", "keep-list", "remove-list", "resource-paths-gcs-uri", "skip-id-redaction", "tags", "text-redaction-mode"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3293,7 +3294,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3437,7 +3438,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3531,7 +3532,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4883,6 +4884,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "complex-data-type-reference-parsing" => Some(("complexDataTypeReferenceParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "default-search-handling-strict" => Some(("defaultSearchHandlingStrict", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-referential-integrity" => Some(("disableReferentialIntegrity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-resource-versioning" => Some(("disableResourceVersioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -4897,7 +4899,7 @@ where "validation-config.enabled-implementation-guides" => Some(("validationConfig.enabledImplementationGuides", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "validation-config", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["complex-data-type-reference-parsing", "default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "validation-config", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4991,9 +4993,11 @@ where "config.fhir.default-keep-extensions" => Some(("config.fhir.defaultKeepExtensions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "config.image.text-redaction-mode" => Some(("config.image.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-store" => Some(("destinationStore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-config-uri" => Some(("gcsConfigUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-filter.resources.resources" => Some(("resourceFilter.resources.resources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "skip-modified-resources" => Some(("skipModifiedResources", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "default-keep-extensions", "destination-store", "dicom", "fhir", "filter-profile", "image", "keep-list", "remove-list", "resource-filter", "resources", "skip-id-redaction", "tags", "text-redaction-mode"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "default-keep-extensions", "destination-store", "dicom", "fhir", "filter-profile", "gcs-config-uri", "image", "keep-list", "remove-list", "resource-filter", "resources", "skip-id-redaction", "skip-modified-resources", "tags", "text-redaction-mode"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5128,6 +5132,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "-since" => Some(("_since", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "-type" => Some(("_type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bigquery-destination.dataset-uri" => Some(("bigqueryDestination.datasetUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bigquery-destination.force" => Some(("bigqueryDestination.force", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "bigquery-destination.schema-config.recursive-structure-depth" => Some(("bigqueryDestination.schemaConfig.recursiveStructureDepth", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5135,7 +5141,7 @@ where "bigquery-destination.write-disposition" => Some(("bigqueryDestination.writeDisposition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gcs-destination.uri-prefix" => Some(("gcsDestination.uriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bigquery-destination", "dataset-uri", "force", "gcs-destination", "recursive-structure-depth", "schema-config", "schema-type", "uri-prefix", "write-disposition"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["-since", "-type", "bigquery-destination", "dataset-uri", "force", "gcs-destination", "recursive-structure-depth", "schema-config", "schema-type", "uri-prefix", "write-disposition"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5217,7 +5223,7 @@ where call = call._page_token(value.unwrap_or("")); }, "-count" => { - call = call._count(arg_from_str(value.unwrap_or("-0"), err, "-count", "integer")); + call = call._count( value.map(|v| arg_from_str(v, err, "-count", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5697,7 +5703,7 @@ where call = call._page_token(value.unwrap_or("")); }, "-count" => { - call = call._count(arg_from_str(value.unwrap_or("-0"), err, "-count", "integer")); + call = call._count( value.map(|v| arg_from_str(v, err, "-count", "int32")).unwrap_or(-0)); }, "-at" => { call = call._at(value.unwrap_or("")); @@ -6254,7 +6260,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6399,7 +6405,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6474,6 +6480,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "complex-data-type-reference-parsing" => Some(("complexDataTypeReferenceParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "default-search-handling-strict" => Some(("defaultSearchHandlingStrict", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-referential-integrity" => Some(("disableReferentialIntegrity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-resource-versioning" => Some(("disableResourceVersioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -6488,7 +6495,7 @@ where "validation-config.enabled-implementation-guides" => Some(("validationConfig.enabledImplementationGuides", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "validation-config", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["complex-data-type-reference-parsing", "default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "validation-config", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6503,7 +6510,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6783,7 +6790,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7129,7 +7136,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7273,7 +7280,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -7632,7 +7639,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -7735,7 +7742,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7832,7 +7839,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8063,7 +8070,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8258,7 +8265,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8351,7 +8358,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8634,7 +8641,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -9828,7 +9835,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -9928,7 +9935,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -9956,7 +9963,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -10235,7 +10242,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-dicom-stores-deidentify", - Some(r##"De-identifies data from the source store and writes it to the destination store. The metadata field type is OperationMetadata. If the request is successful, the response field type is DeidentifyDicomStoreSummary. If errors occur, error is set. The LRO result may still be successful if de-identification fails for some DICOM instances. The output DICOM store will not contain these failed resources. Failed resource totals are tracked in Operation.metadata. Error details are also logged to Cloud Logging (see [Viewing error logs in Cloud Logging](/healthcare/docs/how-tos/logging))."##), + Some(r##"De-identifies data from the source store and writes it to the destination store. The metadata field type is OperationMetadata. If the request is successful, the response field type is DeidentifyDicomStoreSummary. If errors occur, error is set. The LRO result may still be successful if de-identification fails for some DICOM instances. The output DICOM store will not contain these failed resources. Failed resource totals are tracked in Operation.metadata. Error details are also logged to Cloud Logging (see [Viewing error logs in Cloud Logging](https://cloud.google.com/healthcare/docs/how-tos/logging))."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-dicom-stores-deidentify", vec![ (Some(r##"source-store"##), @@ -10340,7 +10347,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -10524,7 +10531,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11040,7 +11047,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11169,7 +11176,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir--patient-everything", - Some(r##"Retrieves a Patient resource and resources related to that patient. Implements the FHIR extended operation Patient-everything ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/patient-operations.html#everything), [STU3](http://hl7.org/implement/standards/fhir/STU3/patient-operations.html#everything), [R4](http://hl7.org/implement/standards/fhir/R4/patient-operations.html#everything)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the operation. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The resources in scope for the response are: * The patient resource itself. * All the resources directly referenced by the patient resource. * Resources directly referencing the patient resource that meet the inclusion criteria. The inclusion criteria are based on the membership rules in the patient compartment definition ([DSTU2](http://hl7.org/fhir/DSTU2/compartment-patient.html), [STU3](http://www.hl7.org/fhir/stu3/compartmentdefinition-patient.html), [R4](http://hl7.org/fhir/R4/compartmentdefinition-patient.html)), which details the eligible resource types and referencing search parameters. For samples that show how to call `Patient-everything`, see [Getting all patient compartment resources](/healthcare/docs/how-tos/fhir-resources#getting_all_patient_compartment_resources)."##), + Some(r##"Retrieves a Patient resource and resources related to that patient. Implements the FHIR extended operation Patient-everything ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/patient-operations.html#everything), [STU3](http://hl7.org/implement/standards/fhir/STU3/patient-operations.html#everything), [R4](http://hl7.org/implement/standards/fhir/R4/patient-operations.html#everything)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the operation. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The resources in scope for the response are: * The patient resource itself. * All the resources directly referenced by the patient resource. * Resources directly referencing the patient resource that meet the inclusion criteria. The inclusion criteria are based on the membership rules in the patient compartment definition ([DSTU2](http://hl7.org/fhir/DSTU2/compartment-patient.html), [STU3](http://www.hl7.org/fhir/stu3/compartmentdefinition-patient.html), [R4](http://hl7.org/fhir/R4/compartmentdefinition-patient.html)), which details the eligible resource types and referencing search parameters. For samples that show how to call `Patient-everything`, see [Getting all patient compartment resources](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#getting_all_patient_compartment_resources)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir--patient-everything", vec![ (Some(r##"name"##), @@ -11191,7 +11198,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir--resource-purge", - Some(r##"Deletes all the historical versions of a resource (excluding the current version) from the FHIR store. To remove all versions of a resource, first delete the current version and then call this method. This is not a FHIR standard operation. For samples that show how to call `Resource-purge`, see [Deleting historical versions of a FHIR resource](/healthcare/docs/how-tos/fhir-resources#deleting_historical_versions_of_a_fhir_resource)."##), + Some(r##"Deletes all the historical versions of a resource (excluding the current version) from the FHIR store. To remove all versions of a resource, first delete the current version and then call this method. This is not a FHIR standard operation. For samples that show how to call `Resource-purge`, see [Deleting historical versions of a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#deleting_historical_versions_of_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir--resource-purge", vec![ (Some(r##"name"##), @@ -11269,7 +11276,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-create", - Some(r##"Creates a FHIR resource. Implements the FHIR standard create interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#create), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#create), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#create)), which creates a new resource with a server-assigned resource ID. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. On success, the response body contains a JSON-encoded representation of the resource as it was created on the server, including the server-assigned resource ID and version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `create`, see [Creating a FHIR resource](/healthcare/docs/how-tos/fhir-resources#creating_a_fhir_resource)."##), + Some(r##"Creates a FHIR resource. Implements the FHIR standard create interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#create), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#create), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#create)), which creates a new resource with a server-assigned resource ID. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. On success, the response body contains a JSON-encoded representation of the resource as it was created on the server, including the server-assigned resource ID and version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `create`, see [Creating a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#creating_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-create", vec![ (Some(r##"parent"##), @@ -11303,7 +11310,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-delete", - Some(r##"Deletes a FHIR resource. Implements the FHIR standard delete interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#delete), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#delete), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#delete)). Note: Unless resource versioning is disabled by setting the disable_resource_versioning flag on the FHIR store, the deleted resources will be moved to a history repository that can still be retrieved through vread and related methods, unless they are removed by the purge method. For samples that show how to call `delete`, see [Deleting a FHIR resource](/healthcare/docs/how-tos/fhir-resources#deleting_a_fhir_resource)."##), + Some(r##"Deletes a FHIR resource. Implements the FHIR standard delete interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#delete), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#delete), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#delete)). Note: Unless resource versioning is disabled by setting the disable_resource_versioning flag on the FHIR store, the deleted resources will be moved to a history repository that can still be retrieved through vread and related methods, unless they are removed by the purge method. For samples that show how to call `delete`, see [Deleting a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#deleting_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-delete", vec![ (Some(r##"name"##), @@ -11325,7 +11332,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-execute-bundle", - Some(r##"Executes all the requests in the given Bundle. Implements the FHIR standard batch/transaction interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#transaction), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#transaction), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#transaction)). Supports all interactions within a bundle, except search. This method accepts Bundles of type `batch` and `transaction`, processing them according to the batch processing rules ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.1), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.1), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#brules)) and transaction processing rules ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.2), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.2), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#trules)). The request body must contain a JSON-encoded FHIR `Bundle` resource, and the request headers must contain `Content-Type: application/fhir+json`. For a batch bundle or a successful transaction the response body will contain a JSON-encoded representation of a `Bundle` resource of type `batch-response` or `transaction-response` containing one entry for each entry in the request, with the outcome of processing the entry. In the case of an error for a transaction bundle, the response body will contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `executeBundle`, see [Managing FHIR resources using FHIR bundles](/healthcare/docs/how-tos/fhir-bundles)."##), + Some(r##"Executes all the requests in the given Bundle. Implements the FHIR standard batch/transaction interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#transaction), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#transaction), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#transaction)). Supports all interactions within a bundle, except search. This method accepts Bundles of type `batch` and `transaction`, processing them according to the batch processing rules ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.1), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.1), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#brules)) and transaction processing rules ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.2), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.2), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#trules)). The request body must contain a JSON-encoded FHIR `Bundle` resource, and the request headers must contain `Content-Type: application/fhir+json`. For a batch bundle or a successful transaction, the response body contains a JSON-encoded representation of a `Bundle` resource of type `batch-response` or `transaction-response` containing one entry for each entry in the request, with the outcome of processing the entry. In the case of an error for a transaction bundle, the response body contains a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. This method checks permissions for each request in the bundle. The `executeBundle` permission is required to call this method, but you must also grant sufficient permissions to execute the individual requests in the bundle. For example, if the bundle contains a request to create a FHIR resource, the caller must also have been granted the `healthcare.fhirResources.create` permission. You can use audit logs to view the permissions for `executeBundle` and each request in the bundle. For more information, see [Viewing Cloud Audit logs](https://cloud.google.com/healthcare-api/docs/how-tos/audit-logging). For samples that show how to call `executeBundle`, see [Managing FHIR resources using FHIR bundles](https://cloud.google.com/healthcare/docs/how-tos/fhir-bundles)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-execute-bundle", vec![ (Some(r##"parent"##), @@ -11353,7 +11360,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-history", - Some(r##"Lists all the versions of a resource (including the current version and deleted versions) from the FHIR store. Implements the per-resource form of the FHIR standard history interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#history), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#history), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#history)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `history`, containing the version history sorted from most recent to oldest versions. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `history`, see [Listing FHIR resource versions](/healthcare/docs/how-tos/fhir-resources#listing_fhir_resource_versions)."##), + Some(r##"Lists all the versions of a resource (including the current version and deleted versions) from the FHIR store. Implements the per-resource form of the FHIR standard history interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#history), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#history), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#history)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `history`, containing the version history sorted from most recent to oldest versions. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `history`, see [Listing FHIR resource versions](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#listing_fhir_resource_versions)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-history", vec![ (Some(r##"name"##), @@ -11375,7 +11382,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-patch", - Some(r##"Updates part of an existing resource by applying the operations specified in a [JSON Patch](http://jsonpatch.com/) document. Implements the FHIR standard patch interaction ([STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#patch), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#patch)). DSTU2 doesn't define a patch method, but the server supports it in the same way it supports STU3. The request body must contain a JSON Patch document, and the request headers must contain `Content-Type: application/json-patch+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `patch`, see [Patching a FHIR resource](/healthcare/docs/how-tos/fhir-resources#patching_a_fhir_resource)."##), + Some(r##"Updates part of an existing resource by applying the operations specified in a [JSON Patch](http://jsonpatch.com/) document. Implements the FHIR standard patch interaction ([STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#patch), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#patch)). DSTU2 doesn't define a patch method, but the server supports it in the same way it supports STU3. The request body must contain a JSON Patch document, and the request headers must contain `Content-Type: application/json-patch+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `patch`, see [Patching a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#patching_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-patch", vec![ (Some(r##"name"##), @@ -11403,7 +11410,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-read", - Some(r##"Gets the contents of a FHIR resource. Implements the FHIR standard read interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#read), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#read), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#read)). Also supports the FHIR standard conditional read interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#cread), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#cread), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#cread)) specified by supplying an `If-Modified-Since` header with a date/time value or an `If-None-Match` header with an ETag value. On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `read`, see [Getting a FHIR resource](/healthcare/docs/how-tos/fhir-resources#getting_a_fhir_resource)."##), + Some(r##"Gets the contents of a FHIR resource. Implements the FHIR standard read interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#read), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#read), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#read)). Also supports the FHIR standard conditional read interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#cread), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#cread), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#cread)) specified by supplying an `If-Modified-Since` header with a date/time value or an `If-None-Match` header with an ETag value. On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `read`, see [Getting a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#getting_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-read", vec![ (Some(r##"name"##), @@ -11425,7 +11432,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-search", - Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](http://hl7.org/implement/standards/fhir/STU3/search.html), [R4](http://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](http://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](http://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `:recurse`. Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](/healthcare/docs/how-tos/fhir-advanced-search)."##), + Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](http://hl7.org/implement/standards/fhir/STU3/search.html), [R4](http://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](http://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](http://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `recurse` (DSTU2 and STU3) or `:iterate` (R4). Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](https://cloud.google.com/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](https://cloud.google.com/healthcare/docs/how-tos/fhir-advanced-search)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-search", vec![ (Some(r##"parent"##), @@ -11453,7 +11460,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-search-type", - Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](http://hl7.org/implement/standards/fhir/STU3/search.html), [R4](http://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](http://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](http://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `:recurse`. Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](/healthcare/docs/how-tos/fhir-advanced-search)."##), + Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](http://hl7.org/implement/standards/fhir/STU3/search.html), [R4](http://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](http://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](http://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `recurse` (DSTU2 and STU3) or `:iterate` (R4). Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](https://cloud.google.com/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](https://cloud.google.com/healthcare/docs/how-tos/fhir-advanced-search)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-search-type", vec![ (Some(r##"parent"##), @@ -11487,7 +11494,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-update", - Some(r##"Updates the entire contents of a resource. Implements the FHIR standard update interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#update), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#update), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#update)). If the specified resource does not exist and the FHIR store has enable_update_create set, creates the resource with the client-specified ID. It is strongly advised not to include or encode any sensitive data such as patient identifiers in client-specified resource IDs. Those IDs are part of the FHIR resource path recorded in Cloud Audit Logs and Pub/Sub notifications. Those IDs can also be contained in reference fields within other resources. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. The resource must contain an `id` element having an identical value to the ID in the REST path of the request. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `update`, see [Updating a FHIR resource](/healthcare/docs/how-tos/fhir-resources#updating_a_fhir_resource)."##), + Some(r##"Updates the entire contents of a resource. Implements the FHIR standard update interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#update), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#update), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#update)). If the specified resource does not exist and the FHIR store has enable_update_create set, creates the resource with the client-specified ID. It is strongly advised not to include or encode any sensitive data such as patient identifiers in client-specified resource IDs. Those IDs are part of the FHIR resource path recorded in Cloud Audit Logs and Pub/Sub notifications. Those IDs can also be contained in reference fields within other resources. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. The resource must contain an `id` element having an identical value to the ID in the REST path of the request. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `update`, see [Updating a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#updating_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-update", vec![ (Some(r##"name"##), @@ -11515,7 +11522,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-vread", - Some(r##"Gets the contents of a version (current or historical) of a FHIR resource by version ID. Implements the FHIR standard vread interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#vread), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#vread), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#vread)). On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `vread`, see [Retrieving a FHIR resource version](/healthcare/docs/how-tos/fhir-resources#retrieving_a_fhir_resource_version)."##), + Some(r##"Gets the contents of a version (current or historical) of a FHIR resource by version ID. Implements the FHIR standard vread interaction ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/http.html#vread), [STU3](http://hl7.org/implement/standards/fhir/STU3/http.html#vread), [R4](http://hl7.org/implement/standards/fhir/R4/http.html#vread)). On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `vread`, see [Retrieving a FHIR resource version](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#retrieving_a_fhir_resource_version)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_cli/projects_locations-datasets-fhir-stores-fhir-vread", vec![ (Some(r##"name"##), @@ -11564,7 +11571,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11664,7 +11671,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11692,7 +11699,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11742,7 +11749,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11864,7 +11871,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12058,7 +12065,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Resource name of the Message, of the form `projects/{project_id}/datasets/{dataset_id}/hl7V2Stores/{hl7_v2_store_id}/messages/{message_id}`. Assigned by the server."##), + Some(r##"Resource name of the Message, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/hl7V2Stores/{hl7_v2_store_id}/messages/{message_id}`. Assigned by the server."##), Some(true), Some(false)), @@ -12086,7 +12093,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Resource name of the HL7v2 store, of the form `projects/{project_id}/datasets/{dataset_id}/hl7V2Stores/{hl7v2_store_id}`."##), + Some(r##"Resource name of the HL7v2 store, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/hl7V2Stores/{hl7v2_store_id}`."##), Some(true), Some(false)), @@ -12114,7 +12121,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12142,7 +12149,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12292,7 +12299,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12320,7 +12327,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12420,7 +12427,7 @@ async fn main() { let mut app = App::new("healthcare1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20221220") .about("Manage, store, and access healthcare data in Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_healthcare1_cli") .arg(Arg::with_name("url") diff --git a/gen/healthcare1/Cargo.toml b/gen/healthcare1/Cargo.toml index d29eeddc94..00fe567fe7 100644 --- a/gen/healthcare1/Cargo.toml +++ b/gen/healthcare1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-healthcare1" -version = "5.0.2-beta-1+20221220" +version = "5.0.2+20221220" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Healthcare (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/healthcare1" homepage = "https://cloud.google.com/healthcare" -documentation = "https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220" +documentation = "https://docs.rs/google-healthcare1/5.0.2+20221220" license = "MIT" keywords = ["healthcare", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/healthcare1/README.md b/gen/healthcare1/README.md index 0b2537fa17..4f2c0f204c 100644 --- a/gen/healthcare1/README.md +++ b/gen/healthcare1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-healthcare1` library allows access to all features of the *Google Cloud Healthcare* service. -This documentation was generated from *Cloud Healthcare* crate version *5.0.2-beta-1+20221220*, where *20221220* is the exact revision of the *healthcare:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Healthcare* crate version *5.0.2+20221220*, where *20221220* is the exact revision of the *healthcare:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Healthcare* *v1* API can be found at the [official documentation site](https://cloud.google.com/healthcare). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/CloudHealthcare) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/CloudHealthcare) ... * projects - * [*locations datasets consent stores attribute definitions create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionCreateCall), [*locations datasets consent stores attribute definitions delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionDeleteCall), [*locations datasets consent stores attribute definitions get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionGetCall), [*locations datasets consent stores attribute definitions list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionListCall), [*locations datasets consent stores attribute definitions patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionPatchCall), [*locations datasets consent stores check data access*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreCheckDataAccesCall), [*locations datasets consent stores consent artifacts create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactCreateCall), [*locations datasets consent stores consent artifacts delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactDeleteCall), [*locations datasets consent stores consent artifacts get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactGetCall), [*locations datasets consent stores consent artifacts list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactListCall), [*locations datasets consent stores consents activate*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentActivateCall), [*locations datasets consent stores consents create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentCreateCall), [*locations datasets consent stores consents delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentDeleteCall), [*locations datasets consent stores consents delete revision*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentDeleteRevisionCall), [*locations datasets consent stores consents get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentGetCall), [*locations datasets consent stores consents list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentListCall), [*locations datasets consent stores consents list revisions*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentListRevisionCall), [*locations datasets consent stores consents patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentPatchCall), [*locations datasets consent stores consents reject*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentRejectCall), [*locations datasets consent stores consents revoke*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentRevokeCall), [*locations datasets consent stores create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreCreateCall), [*locations datasets consent stores delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreDeleteCall), [*locations datasets consent stores evaluate user consents*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreEvaluateUserConsentCall), [*locations datasets consent stores get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreGetCall), [*locations datasets consent stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreGetIamPolicyCall), [*locations datasets consent stores list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreListCall), [*locations datasets consent stores patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStorePatchCall), [*locations datasets consent stores query accessible data*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreQueryAccessibleDataCall), [*locations datasets consent stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreSetIamPolicyCall), [*locations datasets consent stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreTestIamPermissionCall), [*locations datasets consent stores user data mappings archive*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingArchiveCall), [*locations datasets consent stores user data mappings create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingCreateCall), [*locations datasets consent stores user data mappings delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingDeleteCall), [*locations datasets consent stores user data mappings get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingGetCall), [*locations datasets consent stores user data mappings list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingListCall), [*locations datasets consent stores user data mappings patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingPatchCall), [*locations datasets create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetCreateCall), [*locations datasets deidentify*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDeidentifyCall), [*locations datasets delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDeleteCall), [*locations datasets dicom stores create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreCreateCall), [*locations datasets dicom stores deidentify*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreDeidentifyCall), [*locations datasets dicom stores delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreDeleteCall), [*locations datasets dicom stores export*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreExportCall), [*locations datasets dicom stores get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreGetCall), [*locations datasets dicom stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreGetIamPolicyCall), [*locations datasets dicom stores import*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreImportCall), [*locations datasets dicom stores list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreListCall), [*locations datasets dicom stores patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStorePatchCall), [*locations datasets dicom stores search for instances*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSearchForInstanceCall), [*locations datasets dicom stores search for series*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSearchForSeryCall), [*locations datasets dicom stores search for studies*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSearchForStudyCall), [*locations datasets dicom stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSetIamPolicyCall), [*locations datasets dicom stores store instances*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStoreInstanceCall), [*locations datasets dicom stores studies delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyDeleteCall), [*locations datasets dicom stores studies retrieve metadata*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyRetrieveMetadataCall), [*locations datasets dicom stores studies retrieve study*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyRetrieveStudyCall), [*locations datasets dicom stores studies search for instances*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySearchForInstanceCall), [*locations datasets dicom stores studies search for series*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySearchForSeryCall), [*locations datasets dicom stores studies series delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesDeleteCall), [*locations datasets dicom stores studies series instances delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceDeleteCall), [*locations datasets dicom stores studies series instances frames retrieve frames*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveFrameCall), [*locations datasets dicom stores studies series instances frames retrieve rendered*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveRenderedCall), [*locations datasets dicom stores studies series instances retrieve instance*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveInstanceCall), [*locations datasets dicom stores studies series instances retrieve metadata*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveMetadataCall), [*locations datasets dicom stores studies series instances retrieve rendered*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveRenderedCall), [*locations datasets dicom stores studies series retrieve metadata*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveMetadataCall), [*locations datasets dicom stores studies series retrieve series*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveSeryCall), [*locations datasets dicom stores studies series search for instances*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesSearchForInstanceCall), [*locations datasets dicom stores studies store instances*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyStoreInstanceCall), [*locations datasets dicom stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreTestIamPermissionCall), [*locations datasets fhir stores create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreCreateCall), [*locations datasets fhir stores deidentify*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreDeidentifyCall), [*locations datasets fhir stores delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreDeleteCall), [*locations datasets fhir stores export*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreExportCall), [*locations datasets fhir stores fhir patient-everything*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirPatientEverythingCall), [*locations datasets fhir stores fhir resource-purge*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirResourcePurgeCall), [*locations datasets fhir stores fhir resource-validate*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirResourceValidateCall), [*locations datasets fhir stores fhir capabilities*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirCapabilityCall), [*locations datasets fhir stores fhir create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirCreateCall), [*locations datasets fhir stores fhir delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirDeleteCall), [*locations datasets fhir stores fhir execute bundle*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirExecuteBundleCall), [*locations datasets fhir stores fhir history*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirHistoryCall), [*locations datasets fhir stores fhir patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirPatchCall), [*locations datasets fhir stores fhir read*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirReadCall), [*locations datasets fhir stores fhir search*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirSearchCall), [*locations datasets fhir stores fhir search-type*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirSearchTypeCall), [*locations datasets fhir stores fhir update*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirUpdateCall), [*locations datasets fhir stores fhir vread*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirVreadCall), [*locations datasets fhir stores get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreGetCall), [*locations datasets fhir stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreGetIamPolicyCall), [*locations datasets fhir stores import*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreImportCall), [*locations datasets fhir stores list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreListCall), [*locations datasets fhir stores patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStorePatchCall), [*locations datasets fhir stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreSetIamPolicyCall), [*locations datasets fhir stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreTestIamPermissionCall), [*locations datasets get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetGetCall), [*locations datasets get iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetGetIamPolicyCall), [*locations datasets hl7 v2 stores create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreCreateCall), [*locations datasets hl7 v2 stores delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreDeleteCall), [*locations datasets hl7 v2 stores export*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreExportCall), [*locations datasets hl7 v2 stores get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreGetCall), [*locations datasets hl7 v2 stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreGetIamPolicyCall), [*locations datasets hl7 v2 stores import*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreImportCall), [*locations datasets hl7 v2 stores list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreListCall), [*locations datasets hl7 v2 stores messages create*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageCreateCall), [*locations datasets hl7 v2 stores messages delete*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageDeleteCall), [*locations datasets hl7 v2 stores messages get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageGetCall), [*locations datasets hl7 v2 stores messages ingest*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageIngestCall), [*locations datasets hl7 v2 stores messages list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageListCall), [*locations datasets hl7 v2 stores messages patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessagePatchCall), [*locations datasets hl7 v2 stores patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StorePatchCall), [*locations datasets hl7 v2 stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreSetIamPolicyCall), [*locations datasets hl7 v2 stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreTestIamPermissionCall), [*locations datasets list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetListCall), [*locations datasets operations cancel*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetOperationCancelCall), [*locations datasets operations get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetOperationGetCall), [*locations datasets operations list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetOperationListCall), [*locations datasets patch*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetPatchCall), [*locations datasets set iam policy*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetSetIamPolicyCall), [*locations datasets test iam permissions*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationDatasetTestIamPermissionCall), [*locations get*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationListCall) and [*locations services nlp analyze entities*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/api::ProjectLocationServiceNlpAnalyzeEntityCall) + * [*locations datasets consent stores attribute definitions create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionCreateCall), [*locations datasets consent stores attribute definitions delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionDeleteCall), [*locations datasets consent stores attribute definitions get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionGetCall), [*locations datasets consent stores attribute definitions list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionListCall), [*locations datasets consent stores attribute definitions patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionPatchCall), [*locations datasets consent stores check data access*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreCheckDataAccesCall), [*locations datasets consent stores consent artifacts create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactCreateCall), [*locations datasets consent stores consent artifacts delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactDeleteCall), [*locations datasets consent stores consent artifacts get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactGetCall), [*locations datasets consent stores consent artifacts list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentArtifactListCall), [*locations datasets consent stores consents activate*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentActivateCall), [*locations datasets consent stores consents create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentCreateCall), [*locations datasets consent stores consents delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentDeleteCall), [*locations datasets consent stores consents delete revision*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentDeleteRevisionCall), [*locations datasets consent stores consents get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentGetCall), [*locations datasets consent stores consents list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentListCall), [*locations datasets consent stores consents list revisions*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentListRevisionCall), [*locations datasets consent stores consents patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentPatchCall), [*locations datasets consent stores consents reject*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentRejectCall), [*locations datasets consent stores consents revoke*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreConsentRevokeCall), [*locations datasets consent stores create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreCreateCall), [*locations datasets consent stores delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreDeleteCall), [*locations datasets consent stores evaluate user consents*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreEvaluateUserConsentCall), [*locations datasets consent stores get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreGetCall), [*locations datasets consent stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreGetIamPolicyCall), [*locations datasets consent stores list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreListCall), [*locations datasets consent stores patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStorePatchCall), [*locations datasets consent stores query accessible data*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreQueryAccessibleDataCall), [*locations datasets consent stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreSetIamPolicyCall), [*locations datasets consent stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreTestIamPermissionCall), [*locations datasets consent stores user data mappings archive*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingArchiveCall), [*locations datasets consent stores user data mappings create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingCreateCall), [*locations datasets consent stores user data mappings delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingDeleteCall), [*locations datasets consent stores user data mappings get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingGetCall), [*locations datasets consent stores user data mappings list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingListCall), [*locations datasets consent stores user data mappings patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetConsentStoreUserDataMappingPatchCall), [*locations datasets create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetCreateCall), [*locations datasets deidentify*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDeidentifyCall), [*locations datasets delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDeleteCall), [*locations datasets dicom stores create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreCreateCall), [*locations datasets dicom stores deidentify*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreDeidentifyCall), [*locations datasets dicom stores delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreDeleteCall), [*locations datasets dicom stores export*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreExportCall), [*locations datasets dicom stores get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreGetCall), [*locations datasets dicom stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreGetIamPolicyCall), [*locations datasets dicom stores import*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreImportCall), [*locations datasets dicom stores list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreListCall), [*locations datasets dicom stores patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStorePatchCall), [*locations datasets dicom stores search for instances*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSearchForInstanceCall), [*locations datasets dicom stores search for series*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSearchForSeryCall), [*locations datasets dicom stores search for studies*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSearchForStudyCall), [*locations datasets dicom stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreSetIamPolicyCall), [*locations datasets dicom stores store instances*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStoreInstanceCall), [*locations datasets dicom stores studies delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyDeleteCall), [*locations datasets dicom stores studies retrieve metadata*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyRetrieveMetadataCall), [*locations datasets dicom stores studies retrieve study*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyRetrieveStudyCall), [*locations datasets dicom stores studies search for instances*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySearchForInstanceCall), [*locations datasets dicom stores studies search for series*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySearchForSeryCall), [*locations datasets dicom stores studies series delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesDeleteCall), [*locations datasets dicom stores studies series instances delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceDeleteCall), [*locations datasets dicom stores studies series instances frames retrieve frames*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveFrameCall), [*locations datasets dicom stores studies series instances frames retrieve rendered*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveRenderedCall), [*locations datasets dicom stores studies series instances retrieve instance*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveInstanceCall), [*locations datasets dicom stores studies series instances retrieve metadata*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveMetadataCall), [*locations datasets dicom stores studies series instances retrieve rendered*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveRenderedCall), [*locations datasets dicom stores studies series retrieve metadata*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveMetadataCall), [*locations datasets dicom stores studies series retrieve series*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveSeryCall), [*locations datasets dicom stores studies series search for instances*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudySeriesSearchForInstanceCall), [*locations datasets dicom stores studies store instances*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreStudyStoreInstanceCall), [*locations datasets dicom stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetDicomStoreTestIamPermissionCall), [*locations datasets fhir stores create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreCreateCall), [*locations datasets fhir stores deidentify*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreDeidentifyCall), [*locations datasets fhir stores delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreDeleteCall), [*locations datasets fhir stores export*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreExportCall), [*locations datasets fhir stores fhir patient-everything*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirPatientEverythingCall), [*locations datasets fhir stores fhir resource-purge*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirResourcePurgeCall), [*locations datasets fhir stores fhir resource-validate*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirResourceValidateCall), [*locations datasets fhir stores fhir capabilities*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirCapabilityCall), [*locations datasets fhir stores fhir create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirCreateCall), [*locations datasets fhir stores fhir delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirDeleteCall), [*locations datasets fhir stores fhir execute bundle*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirExecuteBundleCall), [*locations datasets fhir stores fhir history*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirHistoryCall), [*locations datasets fhir stores fhir patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirPatchCall), [*locations datasets fhir stores fhir read*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirReadCall), [*locations datasets fhir stores fhir search*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirSearchCall), [*locations datasets fhir stores fhir search-type*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirSearchTypeCall), [*locations datasets fhir stores fhir update*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirUpdateCall), [*locations datasets fhir stores fhir vread*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreFhirVreadCall), [*locations datasets fhir stores get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreGetCall), [*locations datasets fhir stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreGetIamPolicyCall), [*locations datasets fhir stores import*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreImportCall), [*locations datasets fhir stores list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreListCall), [*locations datasets fhir stores patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStorePatchCall), [*locations datasets fhir stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreSetIamPolicyCall), [*locations datasets fhir stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetFhirStoreTestIamPermissionCall), [*locations datasets get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetGetCall), [*locations datasets get iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetGetIamPolicyCall), [*locations datasets hl7 v2 stores create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreCreateCall), [*locations datasets hl7 v2 stores delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreDeleteCall), [*locations datasets hl7 v2 stores export*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreExportCall), [*locations datasets hl7 v2 stores get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreGetCall), [*locations datasets hl7 v2 stores get iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreGetIamPolicyCall), [*locations datasets hl7 v2 stores import*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreImportCall), [*locations datasets hl7 v2 stores list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreListCall), [*locations datasets hl7 v2 stores messages create*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageCreateCall), [*locations datasets hl7 v2 stores messages delete*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageDeleteCall), [*locations datasets hl7 v2 stores messages get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageGetCall), [*locations datasets hl7 v2 stores messages ingest*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageIngestCall), [*locations datasets hl7 v2 stores messages list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessageListCall), [*locations datasets hl7 v2 stores messages patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreMessagePatchCall), [*locations datasets hl7 v2 stores patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StorePatchCall), [*locations datasets hl7 v2 stores set iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreSetIamPolicyCall), [*locations datasets hl7 v2 stores test iam permissions*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetHl7V2StoreTestIamPermissionCall), [*locations datasets list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetListCall), [*locations datasets operations cancel*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetOperationCancelCall), [*locations datasets operations get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetOperationGetCall), [*locations datasets operations list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetOperationListCall), [*locations datasets patch*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetPatchCall), [*locations datasets set iam policy*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetSetIamPolicyCall), [*locations datasets test iam permissions*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationDatasetTestIamPermissionCall), [*locations get*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationListCall) and [*locations services nlp analyze entities*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/api::ProjectLocationServiceNlpAnalyzeEntityCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/CloudHealthcare)** +* **[Hub](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/CloudHealthcare)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::CallBuilder) -* **[Resources](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::CallBuilder) +* **[Resources](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Part)** + * **[Parts](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -153,17 +153,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -173,29 +173,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Delegate) to the -[Method Builder](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Delegate) to the +[Method Builder](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::RequestValue) and -[decodable](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::RequestValue) and +[decodable](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-healthcare1/5.0.2-beta-1+20221220/google_healthcare1/client::RequestValue) are moved +* [request values](https://docs.rs/google-healthcare1/5.0.2+20221220/google_healthcare1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/healthcare1/src/api.rs b/gen/healthcare1/src/api.rs index 4e4f35bea2..35bb2f05be 100644 --- a/gen/healthcare1/src/api.rs +++ b/gen/healthcare1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudHealthcare { CloudHealthcare { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://healthcare.googleapis.com/".to_string(), _root_url: "https://healthcare.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CloudHealthcare { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/healthcare1/src/client.rs b/gen/healthcare1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/healthcare1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/healthcare1/src/lib.rs b/gen/healthcare1/src/lib.rs index bb277f73fa..9a24aa6014 100644 --- a/gen/healthcare1/src/lib.rs +++ b/gen/healthcare1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Healthcare* crate version *5.0.2-beta-1+20221220*, where *20221220* is the exact revision of the *healthcare:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Healthcare* crate version *5.0.2+20221220*, where *20221220* is the exact revision of the *healthcare:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Healthcare* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/healthcare). diff --git a/gen/healthcare1_beta1-cli/Cargo.toml b/gen/healthcare1_beta1-cli/Cargo.toml index ec9cfe76fd..58b0f06f8f 100644 --- a/gen/healthcare1_beta1-cli/Cargo.toml +++ b/gen/healthcare1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-healthcare1_beta1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20221220" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Healthcare (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/healthcare1_beta1-cli" @@ -20,13 +20,13 @@ name = "healthcare1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-healthcare1_beta1] path = "../healthcare1_beta1" -version = "4.0.1+20220223" +version = "5.0.2+20221220" + diff --git a/gen/healthcare1_beta1-cli/README.md b/gen/healthcare1_beta1-cli/README.md index 2fda3354f5..178dd14155 100644 --- a/gen/healthcare1_beta1-cli/README.md +++ b/gen/healthcare1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Healthcare* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Healthcare* API at revision *20221220*. The CLI is at version *5.0.2*. ```bash healthcare1-beta1 [options] diff --git a/gen/healthcare1_beta1-cli/mkdocs.yml b/gen/healthcare1_beta1-cli/mkdocs.yml index 94b367fa41..4fc35db7df 100644 --- a/gen/healthcare1_beta1-cli/mkdocs.yml +++ b/gen/healthcare1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Healthcare v4.0.1+20220223 +site_name: Cloud Healthcare v5.0.2+20221220 site_url: http://byron.github.io/google-apis-rs/google-healthcare1_beta1-cli site_description: A complete library to interact with Cloud Healthcare (protocol v1beta1) @@ -7,155 +7,156 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/healthcare1_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-datasets-annotation-stores-annotations-create.md', 'Projects', 'Locations Datasets Annotation Stores Annotations Create'] -- ['projects_locations-datasets-annotation-stores-annotations-delete.md', 'Projects', 'Locations Datasets Annotation Stores Annotations Delete'] -- ['projects_locations-datasets-annotation-stores-annotations-get.md', 'Projects', 'Locations Datasets Annotation Stores Annotations Get'] -- ['projects_locations-datasets-annotation-stores-annotations-list.md', 'Projects', 'Locations Datasets Annotation Stores Annotations List'] -- ['projects_locations-datasets-annotation-stores-annotations-patch.md', 'Projects', 'Locations Datasets Annotation Stores Annotations Patch'] -- ['projects_locations-datasets-annotation-stores-create.md', 'Projects', 'Locations Datasets Annotation Stores Create'] -- ['projects_locations-datasets-annotation-stores-delete.md', 'Projects', 'Locations Datasets Annotation Stores Delete'] -- ['projects_locations-datasets-annotation-stores-evaluate.md', 'Projects', 'Locations Datasets Annotation Stores Evaluate'] -- ['projects_locations-datasets-annotation-stores-export.md', 'Projects', 'Locations Datasets Annotation Stores Export'] -- ['projects_locations-datasets-annotation-stores-get.md', 'Projects', 'Locations Datasets Annotation Stores Get'] -- ['projects_locations-datasets-annotation-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Annotation Stores Get Iam Policy'] -- ['projects_locations-datasets-annotation-stores-import.md', 'Projects', 'Locations Datasets Annotation Stores Import'] -- ['projects_locations-datasets-annotation-stores-list.md', 'Projects', 'Locations Datasets Annotation Stores List'] -- ['projects_locations-datasets-annotation-stores-patch.md', 'Projects', 'Locations Datasets Annotation Stores Patch'] -- ['projects_locations-datasets-annotation-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Annotation Stores Set Iam Policy'] -- ['projects_locations-datasets-annotation-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Annotation Stores Test Iam Permissions'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-create.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Create'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-delete.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Delete'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-get.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Get'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-list.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions List'] -- ['projects_locations-datasets-consent-stores-attribute-definitions-patch.md', 'Projects', 'Locations Datasets Consent Stores Attribute Definitions Patch'] -- ['projects_locations-datasets-consent-stores-check-data-access.md', 'Projects', 'Locations Datasets Consent Stores Check Data Access'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-create.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts Create'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-delete.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts Delete'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-get.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts Get'] -- ['projects_locations-datasets-consent-stores-consent-artifacts-list.md', 'Projects', 'Locations Datasets Consent Stores Consent Artifacts List'] -- ['projects_locations-datasets-consent-stores-consents-activate.md', 'Projects', 'Locations Datasets Consent Stores Consents Activate'] -- ['projects_locations-datasets-consent-stores-consents-create.md', 'Projects', 'Locations Datasets Consent Stores Consents Create'] -- ['projects_locations-datasets-consent-stores-consents-delete.md', 'Projects', 'Locations Datasets Consent Stores Consents Delete'] -- ['projects_locations-datasets-consent-stores-consents-delete-revision.md', 'Projects', 'Locations Datasets Consent Stores Consents Delete Revision'] -- ['projects_locations-datasets-consent-stores-consents-get.md', 'Projects', 'Locations Datasets Consent Stores Consents Get'] -- ['projects_locations-datasets-consent-stores-consents-list.md', 'Projects', 'Locations Datasets Consent Stores Consents List'] -- ['projects_locations-datasets-consent-stores-consents-list-revisions.md', 'Projects', 'Locations Datasets Consent Stores Consents List Revisions'] -- ['projects_locations-datasets-consent-stores-consents-patch.md', 'Projects', 'Locations Datasets Consent Stores Consents Patch'] -- ['projects_locations-datasets-consent-stores-consents-reject.md', 'Projects', 'Locations Datasets Consent Stores Consents Reject'] -- ['projects_locations-datasets-consent-stores-consents-revoke.md', 'Projects', 'Locations Datasets Consent Stores Consents Revoke'] -- ['projects_locations-datasets-consent-stores-create.md', 'Projects', 'Locations Datasets Consent Stores Create'] -- ['projects_locations-datasets-consent-stores-delete.md', 'Projects', 'Locations Datasets Consent Stores Delete'] -- ['projects_locations-datasets-consent-stores-evaluate-user-consents.md', 'Projects', 'Locations Datasets Consent Stores Evaluate User Consents'] -- ['projects_locations-datasets-consent-stores-get.md', 'Projects', 'Locations Datasets Consent Stores Get'] -- ['projects_locations-datasets-consent-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Consent Stores Get Iam Policy'] -- ['projects_locations-datasets-consent-stores-list.md', 'Projects', 'Locations Datasets Consent Stores List'] -- ['projects_locations-datasets-consent-stores-patch.md', 'Projects', 'Locations Datasets Consent Stores Patch'] -- ['projects_locations-datasets-consent-stores-query-accessible-data.md', 'Projects', 'Locations Datasets Consent Stores Query Accessible Data'] -- ['projects_locations-datasets-consent-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Consent Stores Set Iam Policy'] -- ['projects_locations-datasets-consent-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Consent Stores Test Iam Permissions'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-archive.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Archive'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-create.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Create'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-delete.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Delete'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-get.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Get'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-list.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings List'] -- ['projects_locations-datasets-consent-stores-user-data-mappings-patch.md', 'Projects', 'Locations Datasets Consent Stores User Data Mappings Patch'] -- ['projects_locations-datasets-create.md', 'Projects', 'Locations Datasets Create'] -- ['projects_locations-datasets-deidentify.md', 'Projects', 'Locations Datasets Deidentify'] -- ['projects_locations-datasets-delete.md', 'Projects', 'Locations Datasets Delete'] -- ['projects_locations-datasets-dicom-stores-create.md', 'Projects', 'Locations Datasets Dicom Stores Create'] -- ['projects_locations-datasets-dicom-stores-deidentify.md', 'Projects', 'Locations Datasets Dicom Stores Deidentify'] -- ['projects_locations-datasets-dicom-stores-delete.md', 'Projects', 'Locations Datasets Dicom Stores Delete'] -- ['projects_locations-datasets-dicom-stores-export.md', 'Projects', 'Locations Datasets Dicom Stores Export'] -- ['projects_locations-datasets-dicom-stores-get.md', 'Projects', 'Locations Datasets Dicom Stores Get'] -- ['projects_locations-datasets-dicom-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Dicom Stores Get Iam Policy'] -- ['projects_locations-datasets-dicom-stores-import.md', 'Projects', 'Locations Datasets Dicom Stores Import'] -- ['projects_locations-datasets-dicom-stores-list.md', 'Projects', 'Locations Datasets Dicom Stores List'] -- ['projects_locations-datasets-dicom-stores-patch.md', 'Projects', 'Locations Datasets Dicom Stores Patch'] -- ['projects_locations-datasets-dicom-stores-search-for-instances.md', 'Projects', 'Locations Datasets Dicom Stores Search For Instances'] -- ['projects_locations-datasets-dicom-stores-search-for-series.md', 'Projects', 'Locations Datasets Dicom Stores Search For Series'] -- ['projects_locations-datasets-dicom-stores-search-for-studies.md', 'Projects', 'Locations Datasets Dicom Stores Search For Studies'] -- ['projects_locations-datasets-dicom-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Dicom Stores Set Iam Policy'] -- ['projects_locations-datasets-dicom-stores-store-instances.md', 'Projects', 'Locations Datasets Dicom Stores Store Instances'] -- ['projects_locations-datasets-dicom-stores-studies-delete.md', 'Projects', 'Locations Datasets Dicom Stores Studies Delete'] -- ['projects_locations-datasets-dicom-stores-studies-retrieve-metadata.md', 'Projects', 'Locations Datasets Dicom Stores Studies Retrieve Metadata'] -- ['projects_locations-datasets-dicom-stores-studies-retrieve-study.md', 'Projects', 'Locations Datasets Dicom Stores Studies Retrieve Study'] -- ['projects_locations-datasets-dicom-stores-studies-search-for-instances.md', 'Projects', 'Locations Datasets Dicom Stores Studies Search For Instances'] -- ['projects_locations-datasets-dicom-stores-studies-search-for-series.md', 'Projects', 'Locations Datasets Dicom Stores Studies Search For Series'] -- ['projects_locations-datasets-dicom-stores-studies-series-delete.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Delete'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-delete.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Delete'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-frames.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Frames'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-rendered.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Rendered'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-instance.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Instance'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-metadata.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Metadata'] -- ['projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-rendered.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Rendered'] -- ['projects_locations-datasets-dicom-stores-studies-series-retrieve-metadata.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Retrieve Metadata'] -- ['projects_locations-datasets-dicom-stores-studies-series-retrieve-series.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Retrieve Series'] -- ['projects_locations-datasets-dicom-stores-studies-series-search-for-instances.md', 'Projects', 'Locations Datasets Dicom Stores Studies Series Search For Instances'] -- ['projects_locations-datasets-dicom-stores-studies-store-instances.md', 'Projects', 'Locations Datasets Dicom Stores Studies Store Instances'] -- ['projects_locations-datasets-dicom-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Dicom Stores Test Iam Permissions'] -- ['projects_locations-datasets-fhir-stores-configure-search.md', 'Projects', 'Locations Datasets Fhir Stores Configure Search'] -- ['projects_locations-datasets-fhir-stores-create.md', 'Projects', 'Locations Datasets Fhir Stores Create'] -- ['projects_locations-datasets-fhir-stores-deidentify.md', 'Projects', 'Locations Datasets Fhir Stores Deidentify'] -- ['projects_locations-datasets-fhir-stores-delete.md', 'Projects', 'Locations Datasets Fhir Stores Delete'] -- ['projects_locations-datasets-fhir-stores-export.md', 'Projects', 'Locations Datasets Fhir Stores Export'] -- ['projects_locations-datasets-fhir-stores-fhir--concept-map-search-translate.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Concept Map Search Translate'] -- ['projects_locations-datasets-fhir-stores-fhir--concept-map-translate.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Concept Map Translate'] -- ['projects_locations-datasets-fhir-stores-fhir--observation-lastn.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Observation Lastn'] -- ['projects_locations-datasets-fhir-stores-fhir--patient-everything.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Patient Everything'] -- ['projects_locations-datasets-fhir-stores-fhir--resource-purge.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Resource Purge'] -- ['projects_locations-datasets-fhir-stores-fhir--resource-validate.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Resource Validate'] -- ['projects_locations-datasets-fhir-stores-fhir-capabilities.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Capabilities'] -- ['projects_locations-datasets-fhir-stores-fhir-conditional-delete.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Conditional Delete'] -- ['projects_locations-datasets-fhir-stores-fhir-conditional-patch.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Conditional Patch'] -- ['projects_locations-datasets-fhir-stores-fhir-conditional-update.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Conditional Update'] -- ['projects_locations-datasets-fhir-stores-fhir-create.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Create'] -- ['projects_locations-datasets-fhir-stores-fhir-delete.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Delete'] -- ['projects_locations-datasets-fhir-stores-fhir-execute-bundle.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Execute Bundle'] -- ['projects_locations-datasets-fhir-stores-fhir-history.md', 'Projects', 'Locations Datasets Fhir Stores Fhir History'] -- ['projects_locations-datasets-fhir-stores-fhir-patch.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Patch'] -- ['projects_locations-datasets-fhir-stores-fhir-read.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Read'] -- ['projects_locations-datasets-fhir-stores-fhir-search.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Search'] -- ['projects_locations-datasets-fhir-stores-fhir-search-type.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Search Type'] -- ['projects_locations-datasets-fhir-stores-fhir-update.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Update'] -- ['projects_locations-datasets-fhir-stores-fhir-vread.md', 'Projects', 'Locations Datasets Fhir Stores Fhir Vread'] -- ['projects_locations-datasets-fhir-stores-get.md', 'Projects', 'Locations Datasets Fhir Stores Get'] -- ['projects_locations-datasets-fhir-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Fhir Stores Get Iam Policy'] -- ['projects_locations-datasets-fhir-stores-import.md', 'Projects', 'Locations Datasets Fhir Stores Import'] -- ['projects_locations-datasets-fhir-stores-list.md', 'Projects', 'Locations Datasets Fhir Stores List'] -- ['projects_locations-datasets-fhir-stores-patch.md', 'Projects', 'Locations Datasets Fhir Stores Patch'] -- ['projects_locations-datasets-fhir-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Fhir Stores Set Iam Policy'] -- ['projects_locations-datasets-fhir-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Fhir Stores Test Iam Permissions'] -- ['projects_locations-datasets-get.md', 'Projects', 'Locations Datasets Get'] -- ['projects_locations-datasets-get-iam-policy.md', 'Projects', 'Locations Datasets Get Iam Policy'] -- ['projects_locations-datasets-hl7-v2-stores-create.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Create'] -- ['projects_locations-datasets-hl7-v2-stores-delete.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Delete'] -- ['projects_locations-datasets-hl7-v2-stores-export.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Export'] -- ['projects_locations-datasets-hl7-v2-stores-get.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Get'] -- ['projects_locations-datasets-hl7-v2-stores-get-iam-policy.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Get Iam Policy'] -- ['projects_locations-datasets-hl7-v2-stores-import.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Import'] -- ['projects_locations-datasets-hl7-v2-stores-list.md', 'Projects', 'Locations Datasets Hl7 V2 Stores List'] -- ['projects_locations-datasets-hl7-v2-stores-messages-batch-get.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Batch Get'] -- ['projects_locations-datasets-hl7-v2-stores-messages-create.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Create'] -- ['projects_locations-datasets-hl7-v2-stores-messages-delete.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Delete'] -- ['projects_locations-datasets-hl7-v2-stores-messages-get.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Get'] -- ['projects_locations-datasets-hl7-v2-stores-messages-ingest.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Ingest'] -- ['projects_locations-datasets-hl7-v2-stores-messages-list.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages List'] -- ['projects_locations-datasets-hl7-v2-stores-messages-patch.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Messages Patch'] -- ['projects_locations-datasets-hl7-v2-stores-patch.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Patch'] -- ['projects_locations-datasets-hl7-v2-stores-set-iam-policy.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Set Iam Policy'] -- ['projects_locations-datasets-hl7-v2-stores-test-iam-permissions.md', 'Projects', 'Locations Datasets Hl7 V2 Stores Test Iam Permissions'] -- ['projects_locations-datasets-list.md', 'Projects', 'Locations Datasets List'] -- ['projects_locations-datasets-operations-cancel.md', 'Projects', 'Locations Datasets Operations Cancel'] -- ['projects_locations-datasets-operations-get.md', 'Projects', 'Locations Datasets Operations Get'] -- ['projects_locations-datasets-operations-list.md', 'Projects', 'Locations Datasets Operations List'] -- ['projects_locations-datasets-patch.md', 'Projects', 'Locations Datasets Patch'] -- ['projects_locations-datasets-set-iam-policy.md', 'Projects', 'Locations Datasets Set Iam Policy'] -- ['projects_locations-datasets-test-iam-permissions.md', 'Projects', 'Locations Datasets Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-services-nlp-analyze-entities.md', 'Projects', 'Locations Services Nlp Analyze Entities'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Datasets Annotation Stores Annotations Create': 'projects_locations-datasets-annotation-stores-annotations-create.md' + - 'Locations Datasets Annotation Stores Annotations Delete': 'projects_locations-datasets-annotation-stores-annotations-delete.md' + - 'Locations Datasets Annotation Stores Annotations Get': 'projects_locations-datasets-annotation-stores-annotations-get.md' + - 'Locations Datasets Annotation Stores Annotations List': 'projects_locations-datasets-annotation-stores-annotations-list.md' + - 'Locations Datasets Annotation Stores Annotations Patch': 'projects_locations-datasets-annotation-stores-annotations-patch.md' + - 'Locations Datasets Annotation Stores Create': 'projects_locations-datasets-annotation-stores-create.md' + - 'Locations Datasets Annotation Stores Delete': 'projects_locations-datasets-annotation-stores-delete.md' + - 'Locations Datasets Annotation Stores Evaluate': 'projects_locations-datasets-annotation-stores-evaluate.md' + - 'Locations Datasets Annotation Stores Export': 'projects_locations-datasets-annotation-stores-export.md' + - 'Locations Datasets Annotation Stores Get': 'projects_locations-datasets-annotation-stores-get.md' + - 'Locations Datasets Annotation Stores Get Iam Policy': 'projects_locations-datasets-annotation-stores-get-iam-policy.md' + - 'Locations Datasets Annotation Stores Import': 'projects_locations-datasets-annotation-stores-import.md' + - 'Locations Datasets Annotation Stores List': 'projects_locations-datasets-annotation-stores-list.md' + - 'Locations Datasets Annotation Stores Patch': 'projects_locations-datasets-annotation-stores-patch.md' + - 'Locations Datasets Annotation Stores Set Iam Policy': 'projects_locations-datasets-annotation-stores-set-iam-policy.md' + - 'Locations Datasets Annotation Stores Test Iam Permissions': 'projects_locations-datasets-annotation-stores-test-iam-permissions.md' + - 'Locations Datasets Consent Stores Attribute Definitions Create': 'projects_locations-datasets-consent-stores-attribute-definitions-create.md' + - 'Locations Datasets Consent Stores Attribute Definitions Delete': 'projects_locations-datasets-consent-stores-attribute-definitions-delete.md' + - 'Locations Datasets Consent Stores Attribute Definitions Get': 'projects_locations-datasets-consent-stores-attribute-definitions-get.md' + - 'Locations Datasets Consent Stores Attribute Definitions List': 'projects_locations-datasets-consent-stores-attribute-definitions-list.md' + - 'Locations Datasets Consent Stores Attribute Definitions Patch': 'projects_locations-datasets-consent-stores-attribute-definitions-patch.md' + - 'Locations Datasets Consent Stores Check Data Access': 'projects_locations-datasets-consent-stores-check-data-access.md' + - 'Locations Datasets Consent Stores Consent Artifacts Create': 'projects_locations-datasets-consent-stores-consent-artifacts-create.md' + - 'Locations Datasets Consent Stores Consent Artifacts Delete': 'projects_locations-datasets-consent-stores-consent-artifacts-delete.md' + - 'Locations Datasets Consent Stores Consent Artifacts Get': 'projects_locations-datasets-consent-stores-consent-artifacts-get.md' + - 'Locations Datasets Consent Stores Consent Artifacts List': 'projects_locations-datasets-consent-stores-consent-artifacts-list.md' + - 'Locations Datasets Consent Stores Consents Activate': 'projects_locations-datasets-consent-stores-consents-activate.md' + - 'Locations Datasets Consent Stores Consents Create': 'projects_locations-datasets-consent-stores-consents-create.md' + - 'Locations Datasets Consent Stores Consents Delete': 'projects_locations-datasets-consent-stores-consents-delete.md' + - 'Locations Datasets Consent Stores Consents Delete Revision': 'projects_locations-datasets-consent-stores-consents-delete-revision.md' + - 'Locations Datasets Consent Stores Consents Get': 'projects_locations-datasets-consent-stores-consents-get.md' + - 'Locations Datasets Consent Stores Consents List': 'projects_locations-datasets-consent-stores-consents-list.md' + - 'Locations Datasets Consent Stores Consents List Revisions': 'projects_locations-datasets-consent-stores-consents-list-revisions.md' + - 'Locations Datasets Consent Stores Consents Patch': 'projects_locations-datasets-consent-stores-consents-patch.md' + - 'Locations Datasets Consent Stores Consents Reject': 'projects_locations-datasets-consent-stores-consents-reject.md' + - 'Locations Datasets Consent Stores Consents Revoke': 'projects_locations-datasets-consent-stores-consents-revoke.md' + - 'Locations Datasets Consent Stores Create': 'projects_locations-datasets-consent-stores-create.md' + - 'Locations Datasets Consent Stores Delete': 'projects_locations-datasets-consent-stores-delete.md' + - 'Locations Datasets Consent Stores Evaluate User Consents': 'projects_locations-datasets-consent-stores-evaluate-user-consents.md' + - 'Locations Datasets Consent Stores Get': 'projects_locations-datasets-consent-stores-get.md' + - 'Locations Datasets Consent Stores Get Iam Policy': 'projects_locations-datasets-consent-stores-get-iam-policy.md' + - 'Locations Datasets Consent Stores List': 'projects_locations-datasets-consent-stores-list.md' + - 'Locations Datasets Consent Stores Patch': 'projects_locations-datasets-consent-stores-patch.md' + - 'Locations Datasets Consent Stores Query Accessible Data': 'projects_locations-datasets-consent-stores-query-accessible-data.md' + - 'Locations Datasets Consent Stores Set Iam Policy': 'projects_locations-datasets-consent-stores-set-iam-policy.md' + - 'Locations Datasets Consent Stores Test Iam Permissions': 'projects_locations-datasets-consent-stores-test-iam-permissions.md' + - 'Locations Datasets Consent Stores User Data Mappings Archive': 'projects_locations-datasets-consent-stores-user-data-mappings-archive.md' + - 'Locations Datasets Consent Stores User Data Mappings Create': 'projects_locations-datasets-consent-stores-user-data-mappings-create.md' + - 'Locations Datasets Consent Stores User Data Mappings Delete': 'projects_locations-datasets-consent-stores-user-data-mappings-delete.md' + - 'Locations Datasets Consent Stores User Data Mappings Get': 'projects_locations-datasets-consent-stores-user-data-mappings-get.md' + - 'Locations Datasets Consent Stores User Data Mappings List': 'projects_locations-datasets-consent-stores-user-data-mappings-list.md' + - 'Locations Datasets Consent Stores User Data Mappings Patch': 'projects_locations-datasets-consent-stores-user-data-mappings-patch.md' + - 'Locations Datasets Create': 'projects_locations-datasets-create.md' + - 'Locations Datasets Deidentify': 'projects_locations-datasets-deidentify.md' + - 'Locations Datasets Delete': 'projects_locations-datasets-delete.md' + - 'Locations Datasets Dicom Stores Create': 'projects_locations-datasets-dicom-stores-create.md' + - 'Locations Datasets Dicom Stores Deidentify': 'projects_locations-datasets-dicom-stores-deidentify.md' + - 'Locations Datasets Dicom Stores Delete': 'projects_locations-datasets-dicom-stores-delete.md' + - 'Locations Datasets Dicom Stores Export': 'projects_locations-datasets-dicom-stores-export.md' + - 'Locations Datasets Dicom Stores Get': 'projects_locations-datasets-dicom-stores-get.md' + - 'Locations Datasets Dicom Stores Get Iam Policy': 'projects_locations-datasets-dicom-stores-get-iam-policy.md' + - 'Locations Datasets Dicom Stores Import': 'projects_locations-datasets-dicom-stores-import.md' + - 'Locations Datasets Dicom Stores List': 'projects_locations-datasets-dicom-stores-list.md' + - 'Locations Datasets Dicom Stores Patch': 'projects_locations-datasets-dicom-stores-patch.md' + - 'Locations Datasets Dicom Stores Search For Instances': 'projects_locations-datasets-dicom-stores-search-for-instances.md' + - 'Locations Datasets Dicom Stores Search For Series': 'projects_locations-datasets-dicom-stores-search-for-series.md' + - 'Locations Datasets Dicom Stores Search For Studies': 'projects_locations-datasets-dicom-stores-search-for-studies.md' + - 'Locations Datasets Dicom Stores Set Iam Policy': 'projects_locations-datasets-dicom-stores-set-iam-policy.md' + - 'Locations Datasets Dicom Stores Store Instances': 'projects_locations-datasets-dicom-stores-store-instances.md' + - 'Locations Datasets Dicom Stores Studies Delete': 'projects_locations-datasets-dicom-stores-studies-delete.md' + - 'Locations Datasets Dicom Stores Studies Retrieve Metadata': 'projects_locations-datasets-dicom-stores-studies-retrieve-metadata.md' + - 'Locations Datasets Dicom Stores Studies Retrieve Study': 'projects_locations-datasets-dicom-stores-studies-retrieve-study.md' + - 'Locations Datasets Dicom Stores Studies Search For Instances': 'projects_locations-datasets-dicom-stores-studies-search-for-instances.md' + - 'Locations Datasets Dicom Stores Studies Search For Series': 'projects_locations-datasets-dicom-stores-studies-search-for-series.md' + - 'Locations Datasets Dicom Stores Studies Series Delete': 'projects_locations-datasets-dicom-stores-studies-series-delete.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Delete': 'projects_locations-datasets-dicom-stores-studies-series-instances-delete.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Frames': 'projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-frames.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Frames Retrieve Rendered': 'projects_locations-datasets-dicom-stores-studies-series-instances-frames-retrieve-rendered.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Instance': 'projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-instance.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Metadata': 'projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-metadata.md' + - 'Locations Datasets Dicom Stores Studies Series Instances Retrieve Rendered': 'projects_locations-datasets-dicom-stores-studies-series-instances-retrieve-rendered.md' + - 'Locations Datasets Dicom Stores Studies Series Retrieve Metadata': 'projects_locations-datasets-dicom-stores-studies-series-retrieve-metadata.md' + - 'Locations Datasets Dicom Stores Studies Series Retrieve Series': 'projects_locations-datasets-dicom-stores-studies-series-retrieve-series.md' + - 'Locations Datasets Dicom Stores Studies Series Search For Instances': 'projects_locations-datasets-dicom-stores-studies-series-search-for-instances.md' + - 'Locations Datasets Dicom Stores Studies Store Instances': 'projects_locations-datasets-dicom-stores-studies-store-instances.md' + - 'Locations Datasets Dicom Stores Test Iam Permissions': 'projects_locations-datasets-dicom-stores-test-iam-permissions.md' + - 'Locations Datasets Fhir Stores Configure Search': 'projects_locations-datasets-fhir-stores-configure-search.md' + - 'Locations Datasets Fhir Stores Create': 'projects_locations-datasets-fhir-stores-create.md' + - 'Locations Datasets Fhir Stores Deidentify': 'projects_locations-datasets-fhir-stores-deidentify.md' + - 'Locations Datasets Fhir Stores Delete': 'projects_locations-datasets-fhir-stores-delete.md' + - 'Locations Datasets Fhir Stores Export': 'projects_locations-datasets-fhir-stores-export.md' + - 'Locations Datasets Fhir Stores Fhir Concept Map Search Translate': 'projects_locations-datasets-fhir-stores-fhir--concept-map-search-translate.md' + - 'Locations Datasets Fhir Stores Fhir Concept Map Translate': 'projects_locations-datasets-fhir-stores-fhir--concept-map-translate.md' + - 'Locations Datasets Fhir Stores Fhir Observation Lastn': 'projects_locations-datasets-fhir-stores-fhir--observation-lastn.md' + - 'Locations Datasets Fhir Stores Fhir Patient Everything': 'projects_locations-datasets-fhir-stores-fhir--patient-everything.md' + - 'Locations Datasets Fhir Stores Fhir Resource Purge': 'projects_locations-datasets-fhir-stores-fhir--resource-purge.md' + - 'Locations Datasets Fhir Stores Fhir Resource Validate': 'projects_locations-datasets-fhir-stores-fhir--resource-validate.md' + - 'Locations Datasets Fhir Stores Fhir Capabilities': 'projects_locations-datasets-fhir-stores-fhir-capabilities.md' + - 'Locations Datasets Fhir Stores Fhir Conditional Delete': 'projects_locations-datasets-fhir-stores-fhir-conditional-delete.md' + - 'Locations Datasets Fhir Stores Fhir Conditional Patch': 'projects_locations-datasets-fhir-stores-fhir-conditional-patch.md' + - 'Locations Datasets Fhir Stores Fhir Conditional Update': 'projects_locations-datasets-fhir-stores-fhir-conditional-update.md' + - 'Locations Datasets Fhir Stores Fhir Create': 'projects_locations-datasets-fhir-stores-fhir-create.md' + - 'Locations Datasets Fhir Stores Fhir Delete': 'projects_locations-datasets-fhir-stores-fhir-delete.md' + - 'Locations Datasets Fhir Stores Fhir Execute Bundle': 'projects_locations-datasets-fhir-stores-fhir-execute-bundle.md' + - 'Locations Datasets Fhir Stores Fhir History': 'projects_locations-datasets-fhir-stores-fhir-history.md' + - 'Locations Datasets Fhir Stores Fhir Patch': 'projects_locations-datasets-fhir-stores-fhir-patch.md' + - 'Locations Datasets Fhir Stores Fhir Read': 'projects_locations-datasets-fhir-stores-fhir-read.md' + - 'Locations Datasets Fhir Stores Fhir Search': 'projects_locations-datasets-fhir-stores-fhir-search.md' + - 'Locations Datasets Fhir Stores Fhir Search Type': 'projects_locations-datasets-fhir-stores-fhir-search-type.md' + - 'Locations Datasets Fhir Stores Fhir Update': 'projects_locations-datasets-fhir-stores-fhir-update.md' + - 'Locations Datasets Fhir Stores Fhir Vread': 'projects_locations-datasets-fhir-stores-fhir-vread.md' + - 'Locations Datasets Fhir Stores Get': 'projects_locations-datasets-fhir-stores-get.md' + - 'Locations Datasets Fhir Stores Get Iam Policy': 'projects_locations-datasets-fhir-stores-get-iam-policy.md' + - 'Locations Datasets Fhir Stores Import': 'projects_locations-datasets-fhir-stores-import.md' + - 'Locations Datasets Fhir Stores List': 'projects_locations-datasets-fhir-stores-list.md' + - 'Locations Datasets Fhir Stores Patch': 'projects_locations-datasets-fhir-stores-patch.md' + - 'Locations Datasets Fhir Stores Set Iam Policy': 'projects_locations-datasets-fhir-stores-set-iam-policy.md' + - 'Locations Datasets Fhir Stores Test Iam Permissions': 'projects_locations-datasets-fhir-stores-test-iam-permissions.md' + - 'Locations Datasets Get': 'projects_locations-datasets-get.md' + - 'Locations Datasets Get Iam Policy': 'projects_locations-datasets-get-iam-policy.md' + - 'Locations Datasets Hl7 V2 Stores Create': 'projects_locations-datasets-hl7-v2-stores-create.md' + - 'Locations Datasets Hl7 V2 Stores Delete': 'projects_locations-datasets-hl7-v2-stores-delete.md' + - 'Locations Datasets Hl7 V2 Stores Export': 'projects_locations-datasets-hl7-v2-stores-export.md' + - 'Locations Datasets Hl7 V2 Stores Get': 'projects_locations-datasets-hl7-v2-stores-get.md' + - 'Locations Datasets Hl7 V2 Stores Get Iam Policy': 'projects_locations-datasets-hl7-v2-stores-get-iam-policy.md' + - 'Locations Datasets Hl7 V2 Stores Import': 'projects_locations-datasets-hl7-v2-stores-import.md' + - 'Locations Datasets Hl7 V2 Stores List': 'projects_locations-datasets-hl7-v2-stores-list.md' + - 'Locations Datasets Hl7 V2 Stores Messages Batch Get': 'projects_locations-datasets-hl7-v2-stores-messages-batch-get.md' + - 'Locations Datasets Hl7 V2 Stores Messages Create': 'projects_locations-datasets-hl7-v2-stores-messages-create.md' + - 'Locations Datasets Hl7 V2 Stores Messages Delete': 'projects_locations-datasets-hl7-v2-stores-messages-delete.md' + - 'Locations Datasets Hl7 V2 Stores Messages Get': 'projects_locations-datasets-hl7-v2-stores-messages-get.md' + - 'Locations Datasets Hl7 V2 Stores Messages Ingest': 'projects_locations-datasets-hl7-v2-stores-messages-ingest.md' + - 'Locations Datasets Hl7 V2 Stores Messages List': 'projects_locations-datasets-hl7-v2-stores-messages-list.md' + - 'Locations Datasets Hl7 V2 Stores Messages Patch': 'projects_locations-datasets-hl7-v2-stores-messages-patch.md' + - 'Locations Datasets Hl7 V2 Stores Patch': 'projects_locations-datasets-hl7-v2-stores-patch.md' + - 'Locations Datasets Hl7 V2 Stores Set Iam Policy': 'projects_locations-datasets-hl7-v2-stores-set-iam-policy.md' + - 'Locations Datasets Hl7 V2 Stores Test Iam Permissions': 'projects_locations-datasets-hl7-v2-stores-test-iam-permissions.md' + - 'Locations Datasets List': 'projects_locations-datasets-list.md' + - 'Locations Datasets Operations Cancel': 'projects_locations-datasets-operations-cancel.md' + - 'Locations Datasets Operations Get': 'projects_locations-datasets-operations-get.md' + - 'Locations Datasets Operations List': 'projects_locations-datasets-operations-list.md' + - 'Locations Datasets Patch': 'projects_locations-datasets-patch.md' + - 'Locations Datasets Set Iam Policy': 'projects_locations-datasets-set-iam-policy.md' + - 'Locations Datasets Test Iam Permissions': 'projects_locations-datasets-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Services Nlp Analyze Entities': 'projects_locations-services-nlp-analyze-entities.md' theme: readthedocs diff --git a/gen/healthcare1_beta1-cli/src/client.rs b/gen/healthcare1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/healthcare1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/healthcare1_beta1-cli/src/main.rs b/gen/healthcare1_beta1-cli/src/main.rs index 22dc2f2b0d..740e9a6755 100644 --- a/gen/healthcare1_beta1-cli/src/main.rs +++ b/gen/healthcare1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_healthcare1_beta1::{api, Error, oauth2}; +use google_healthcare1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -257,7 +256,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -353,7 +352,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -786,7 +785,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -930,7 +929,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1023,7 +1022,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1452,7 +1451,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1549,7 +1548,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1903,7 +1902,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2301,7 +2300,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2363,7 +2362,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2463,7 +2462,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2976,7 +2975,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3035,7 +3034,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3130,7 +3129,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3725,7 +3724,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3821,7 +3820,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3989,11 +3988,30 @@ where "config.dicom.keep-list.tags" => Some(("config.dicom.keepList.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.dicom.remove-list.tags" => Some(("config.dicom.removeList.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.dicom.skip-id-redaction" => Some(("config.dicom.skipIdRedaction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.dicom-tag-config.options.clean-image.additional-info-types" => Some(("config.dicomTagConfig.options.cleanImage.additionalInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.dicom-tag-config.options.clean-image.exclude-info-types" => Some(("config.dicomTagConfig.options.cleanImage.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.dicom-tag-config.options.clean-image.text-redaction-mode" => Some(("config.dicomTagConfig.options.cleanImage.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.dicom-tag-config.options.primary-ids" => Some(("config.dicomTagConfig.options.primaryIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.dicom-tag-config.profile-type" => Some(("config.dicomTagConfig.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.fhir.default-keep-extensions" => Some(("config.fhir.defaultKeepExtensions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.character-mask-config.masking-character" => Some(("config.fhirFieldConfig.options.characterMaskConfig.maskingCharacter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.crypto-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.kms-wrapped.crypto-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.kmsWrapped.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.kms-wrapped.wrapped-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.kmsWrapped.wrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.crypto-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.kms-wrapped.crypto-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.kmsWrapped.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.kms-wrapped.wrapped-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.kmsWrapped.wrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.profile-type" => Some(("config.fhirFieldConfig.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.image.additional-info-types" => Some(("config.image.additionalInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.image.exclude-info-types" => Some(("config.image.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.image.text-redaction-mode" => Some(("config.image.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.operation-metadata.fhir-output.fhir-store" => Some(("config.operationMetadata.fhirOutput.fhirStore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.text.exclude-info-types" => Some(("config.text.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.text.profile-type" => Some(("config.text.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-dataset" => Some(("destinationDataset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-config-uri" => Some(("gcsConfigUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotation", "annotation-store-name", "config", "default-keep-extensions", "destination-dataset", "dicom", "fhir", "filter-profile", "image", "keep-list", "remove-list", "skip-id-redaction", "store-quote", "tags", "text-redaction-mode"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-info-types", "annotation", "annotation-store-name", "character-mask-config", "clean-image", "config", "crypto-hash-config", "crypto-key", "date-shift-config", "default-keep-extensions", "destination-dataset", "dicom", "dicom-tag-config", "exclude-info-types", "fhir", "fhir-field-config", "fhir-output", "fhir-store", "filter-profile", "gcs-config-uri", "image", "keep-list", "kms-wrapped", "masking-character", "operation-metadata", "options", "primary-ids", "profile-type", "remove-list", "skip-id-redaction", "store-quote", "tags", "text", "text-redaction-mode", "wrapped-key"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4226,12 +4244,31 @@ where "config.dicom.keep-list.tags" => Some(("config.dicom.keepList.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.dicom.remove-list.tags" => Some(("config.dicom.removeList.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.dicom.skip-id-redaction" => Some(("config.dicom.skipIdRedaction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.dicom-tag-config.options.clean-image.additional-info-types" => Some(("config.dicomTagConfig.options.cleanImage.additionalInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.dicom-tag-config.options.clean-image.exclude-info-types" => Some(("config.dicomTagConfig.options.cleanImage.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.dicom-tag-config.options.clean-image.text-redaction-mode" => Some(("config.dicomTagConfig.options.cleanImage.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.dicom-tag-config.options.primary-ids" => Some(("config.dicomTagConfig.options.primaryIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.dicom-tag-config.profile-type" => Some(("config.dicomTagConfig.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.fhir.default-keep-extensions" => Some(("config.fhir.defaultKeepExtensions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.character-mask-config.masking-character" => Some(("config.fhirFieldConfig.options.characterMaskConfig.maskingCharacter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.crypto-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.kms-wrapped.crypto-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.kmsWrapped.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.kms-wrapped.wrapped-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.kmsWrapped.wrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.crypto-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.kms-wrapped.crypto-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.kmsWrapped.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.kms-wrapped.wrapped-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.kmsWrapped.wrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.profile-type" => Some(("config.fhirFieldConfig.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.image.additional-info-types" => Some(("config.image.additionalInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.image.exclude-info-types" => Some(("config.image.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.image.text-redaction-mode" => Some(("config.image.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.operation-metadata.fhir-output.fhir-store" => Some(("config.operationMetadata.fhirOutput.fhirStore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.text.exclude-info-types" => Some(("config.text.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.text.profile-type" => Some(("config.text.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-store" => Some(("destinationStore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "filter-config.resource-paths-gcs-uri" => Some(("filterConfig.resourcePathsGcsUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-config-uri" => Some(("gcsConfigUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotation", "annotation-store-name", "config", "default-keep-extensions", "destination-store", "dicom", "fhir", "filter-config", "filter-profile", "image", "keep-list", "remove-list", "resource-paths-gcs-uri", "skip-id-redaction", "store-quote", "tags", "text-redaction-mode"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-info-types", "annotation", "annotation-store-name", "character-mask-config", "clean-image", "config", "crypto-hash-config", "crypto-key", "date-shift-config", "default-keep-extensions", "destination-store", "dicom", "dicom-tag-config", "exclude-info-types", "fhir", "fhir-field-config", "fhir-output", "fhir-store", "filter-config", "filter-profile", "gcs-config-uri", "image", "keep-list", "kms-wrapped", "masking-character", "operation-metadata", "options", "primary-ids", "profile-type", "remove-list", "resource-paths-gcs-uri", "skip-id-redaction", "store-quote", "tags", "text", "text-redaction-mode", "wrapped-key"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4492,7 +4529,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4636,7 +4673,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4731,7 +4768,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6169,6 +6206,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "complex-data-type-reference-parsing" => Some(("complexDataTypeReferenceParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "default-search-handling-strict" => Some(("defaultSearchHandlingStrict", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-referential-integrity" => Some(("disableReferentialIntegrity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-resource-versioning" => Some(("disableResourceVersioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -6184,7 +6222,7 @@ where "validation-config.enabled-implementation-guides" => Some(("validationConfig.enabledImplementationGuides", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "send-for-bulk-import", "validation-config", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["complex-data-type-reference-parsing", "default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "send-for-bulk-import", "validation-config", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6277,12 +6315,32 @@ where "config.dicom.keep-list.tags" => Some(("config.dicom.keepList.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.dicom.remove-list.tags" => Some(("config.dicom.removeList.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.dicom.skip-id-redaction" => Some(("config.dicom.skipIdRedaction", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.dicom-tag-config.options.clean-image.additional-info-types" => Some(("config.dicomTagConfig.options.cleanImage.additionalInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.dicom-tag-config.options.clean-image.exclude-info-types" => Some(("config.dicomTagConfig.options.cleanImage.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.dicom-tag-config.options.clean-image.text-redaction-mode" => Some(("config.dicomTagConfig.options.cleanImage.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.dicom-tag-config.options.primary-ids" => Some(("config.dicomTagConfig.options.primaryIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.dicom-tag-config.profile-type" => Some(("config.dicomTagConfig.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.fhir.default-keep-extensions" => Some(("config.fhir.defaultKeepExtensions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.character-mask-config.masking-character" => Some(("config.fhirFieldConfig.options.characterMaskConfig.maskingCharacter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.crypto-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.kms-wrapped.crypto-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.kmsWrapped.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.crypto-hash-config.kms-wrapped.wrapped-key" => Some(("config.fhirFieldConfig.options.cryptoHashConfig.kmsWrapped.wrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.crypto-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.kms-wrapped.crypto-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.kmsWrapped.cryptoKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.options.date-shift-config.kms-wrapped.wrapped-key" => Some(("config.fhirFieldConfig.options.dateShiftConfig.kmsWrapped.wrappedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.fhir-field-config.profile-type" => Some(("config.fhirFieldConfig.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.image.additional-info-types" => Some(("config.image.additionalInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.image.exclude-info-types" => Some(("config.image.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.image.text-redaction-mode" => Some(("config.image.textRedactionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.operation-metadata.fhir-output.fhir-store" => Some(("config.operationMetadata.fhirOutput.fhirStore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.text.exclude-info-types" => Some(("config.text.excludeInfoTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "config.text.profile-type" => Some(("config.text.profileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination-store" => Some(("destinationStore", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-config-uri" => Some(("gcsConfigUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-filter.resources.resources" => Some(("resourceFilter.resources.resources", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "skip-modified-resources" => Some(("skipModifiedResources", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotation", "annotation-store-name", "config", "default-keep-extensions", "destination-store", "dicom", "fhir", "filter-profile", "image", "keep-list", "remove-list", "resource-filter", "resources", "skip-id-redaction", "store-quote", "tags", "text-redaction-mode"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-info-types", "annotation", "annotation-store-name", "character-mask-config", "clean-image", "config", "crypto-hash-config", "crypto-key", "date-shift-config", "default-keep-extensions", "destination-store", "dicom", "dicom-tag-config", "exclude-info-types", "fhir", "fhir-field-config", "fhir-output", "fhir-store", "filter-profile", "gcs-config-uri", "image", "keep-list", "kms-wrapped", "masking-character", "operation-metadata", "options", "primary-ids", "profile-type", "remove-list", "resource-filter", "resources", "skip-id-redaction", "skip-modified-resources", "store-quote", "tags", "text", "text-redaction-mode", "wrapped-key"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6421,12 +6479,14 @@ where "-type" => Some(("_type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bigquery-destination.dataset-uri" => Some(("bigqueryDestination.datasetUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bigquery-destination.force" => Some(("bigqueryDestination.force", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "bigquery-destination.schema-config.last-updated-partition-config.expiration-ms" => Some(("bigqueryDestination.schemaConfig.lastUpdatedPartitionConfig.expirationMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "bigquery-destination.schema-config.last-updated-partition-config.type" => Some(("bigqueryDestination.schemaConfig.lastUpdatedPartitionConfig.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bigquery-destination.schema-config.recursive-structure-depth" => Some(("bigqueryDestination.schemaConfig.recursiveStructureDepth", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bigquery-destination.schema-config.schema-type" => Some(("bigqueryDestination.schemaConfig.schemaType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bigquery-destination.write-disposition" => Some(("bigqueryDestination.writeDisposition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gcs-destination.uri-prefix" => Some(("gcsDestination.uriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["-since", "-type", "bigquery-destination", "dataset-uri", "force", "gcs-destination", "recursive-structure-depth", "schema-config", "schema-type", "uri-prefix", "write-disposition"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["-since", "-type", "bigquery-destination", "dataset-uri", "expiration-ms", "force", "gcs-destination", "last-updated-partition-config", "recursive-structure-depth", "schema-config", "schema-type", "type", "uri-prefix", "write-disposition"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6693,7 +6753,7 @@ where call = call._page_token(value.unwrap_or("")); }, "-count" => { - call = call._count(arg_from_str(value.unwrap_or("-0"), err, "-count", "integer")); + call = call._count( value.map(|v| arg_from_str(v, err, "-count", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7397,7 +7457,7 @@ where call = call._page_token(value.unwrap_or("")); }, "-count" => { - call = call._count(arg_from_str(value.unwrap_or("-0"), err, "-count", "integer")); + call = call._count( value.map(|v| arg_from_str(v, err, "-count", "int32")).unwrap_or(-0)); }, "-at" => { call = call._at(value.unwrap_or("")); @@ -7954,7 +8014,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8099,7 +8159,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8174,6 +8234,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "complex-data-type-reference-parsing" => Some(("complexDataTypeReferenceParsing", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "default-search-handling-strict" => Some(("defaultSearchHandlingStrict", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-referential-integrity" => Some(("disableReferentialIntegrity", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "disable-resource-versioning" => Some(("disableResourceVersioning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -8189,7 +8250,7 @@ where "validation-config.enabled-implementation-guides" => Some(("validationConfig.enabledImplementationGuides", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "send-for-bulk-import", "validation-config", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["complex-data-type-reference-parsing", "default-search-handling-strict", "disable-fhirpath-validation", "disable-profile-validation", "disable-reference-type-validation", "disable-referential-integrity", "disable-required-field-validation", "disable-resource-versioning", "enable-update-create", "enabled-implementation-guides", "labels", "name", "notification-config", "pubsub-topic", "send-for-bulk-import", "validation-config", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -8204,7 +8265,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8484,7 +8545,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8832,7 +8893,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8976,7 +9037,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -9394,7 +9455,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -9497,7 +9558,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -9596,7 +9657,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -9827,7 +9888,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10022,7 +10083,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -10115,7 +10176,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -10398,7 +10459,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -11320,7 +11381,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11420,7 +11481,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -11448,7 +11509,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12070,7 +12131,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12170,7 +12231,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12198,7 +12259,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12582,7 +12643,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -12766,7 +12827,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -13282,7 +13343,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -13505,7 +13566,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir--patient-everything", - Some(r##"Retrieves a Patient resource and resources related to that patient. Implements the FHIR extended operation Patient-everything ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/patient-operations.html#everything), [STU3](https://hl7.org/implement/standards/fhir/STU3/patient-operations.html#everything), [R4](https://hl7.org/implement/standards/fhir/R4/patient-operations.html#everything)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the operation. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The resources in scope for the response are: * The patient resource itself. * All the resources directly referenced by the patient resource. * Resources directly referencing the patient resource that meet the inclusion criteria. The inclusion criteria are based on the membership rules in the patient compartment definition ([DSTU2](https://hl7.org/fhir/DSTU2/compartment-patient.html), [STU3](http://www.hl7.org/fhir/stu3/compartmentdefinition-patient.html), [R4](https://hl7.org/fhir/R4/compartmentdefinition-patient.html)), which details the eligible resource types and referencing search parameters. For samples that show how to call `Patient-everything`, see [Getting all patient compartment resources](/healthcare/docs/how-tos/fhir-resources#getting_all_patient_compartment_resources)."##), + Some(r##"Retrieves a Patient resource and resources related to that patient. Implements the FHIR extended operation Patient-everything ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/patient-operations.html#everything), [STU3](https://hl7.org/implement/standards/fhir/STU3/patient-operations.html#everything), [R4](https://hl7.org/implement/standards/fhir/R4/patient-operations.html#everything)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the operation. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The resources in scope for the response are: * The patient resource itself. * All the resources directly referenced by the patient resource. * Resources directly referencing the patient resource that meet the inclusion criteria. The inclusion criteria are based on the membership rules in the patient compartment definition ([DSTU2](https://hl7.org/fhir/DSTU2/compartment-patient.html), [STU3](http://www.hl7.org/fhir/stu3/compartmentdefinition-patient.html), [R4](https://hl7.org/fhir/R4/compartmentdefinition-patient.html)), which details the eligible resource types and referencing search parameters. For samples that show how to call `Patient-everything`, see [Getting all patient compartment resources](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#getting_all_patient_compartment_resources)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir--patient-everything", vec![ (Some(r##"name"##), @@ -13527,7 +13588,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir--resource-purge", - Some(r##"Deletes all the historical versions of a resource (excluding the current version) from the FHIR store. To remove all versions of a resource, first delete the current version and then call this method. This is not a FHIR standard operation. For samples that show how to call `Resource-purge`, see [Deleting historical versions of a FHIR resource](/healthcare/docs/how-tos/fhir-resources#deleting_historical_versions_of_a_fhir_resource)."##), + Some(r##"Deletes all the historical versions of a resource (excluding the current version) from the FHIR store. To remove all versions of a resource, first delete the current version and then call this method. This is not a FHIR standard operation. For samples that show how to call `Resource-purge`, see [Deleting historical versions of a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#deleting_historical_versions_of_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir--resource-purge", vec![ (Some(r##"name"##), @@ -13605,7 +13666,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-conditional-delete", - Some(r##"Deletes FHIR resources that match a search query. Implements the FHIR standard conditional delete interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.12.1), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.13.1), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#3.1.0.7.1)). If multiple resources match, all matching resources are deleted. Search terms are provided as query parameters following the same pattern as the search method. Note: Unless resource versioning is disabled by setting the disable_resource_versioning flag on the FHIR store, the deleted resources are moved to a history repository that can still be retrieved through vread and related methods, unless they are removed by the purge method. This method requires the`healthcare.fhirStores.searchResources` and `healthcare.fhirResources.delete` permissions on the parent FHIR store. For samples that show how to call `conditionalDelete`, see [Conditionally deleting a FHIR resource](/healthcare/docs/how-tos/fhir-resources#conditionally_deleting_a_fhir_resource)."##), + Some(r##"Deletes FHIR resources that match a search query. Implements the FHIR standard conditional delete interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.12.1), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.13.1), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#3.1.0.7.1)). If multiple resources match, all matching resources are deleted. Search terms are provided as query parameters following the same pattern as the search method. Not all FHIR resources that match the search query might be deleted because, by default, a maximum of 100 FHIR resources can be deleted. The number of FHIR resources that can be deleted depends on the page size of the returned resources, which you can control using the `_count` query parameter. Even when using `_count`, you can delete a maximum 1,000 FHIR resources per each call of `conditionalDelete`. Note: Unless resource versioning is disabled by setting the disable_resource_versioning flag on the FHIR store, the deleted resources are moved to a history repository that can still be retrieved through vread and related methods, unless they are removed by the purge method. This method requires the`healthcare.fhirStores.searchResources` and `healthcare.fhirResources.delete` permissions on the parent FHIR store. For samples that show how to call `conditionalDelete`, see [Conditionally deleting a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#conditionally_deleting_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-conditional-delete", vec![ (Some(r##"parent"##), @@ -13633,7 +13694,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-conditional-patch", - Some(r##"If a resource is found based on the search criteria specified in the query parameters, updates part of that resource by applying the operations specified in a [JSON Patch](http://jsonpatch.com/) document. Implements the FHIR standard conditional patch interaction ([STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#patch), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#patch)). DSTU2 doesn't define a conditional patch method, but the server supports it in the same way it supports STU3. Search terms are provided as query parameters following the same pattern as the search method. If the search criteria identify more than one match, the request returns a `412 Precondition Failed` error. The request body must contain a JSON Patch document, and the request headers must contain `Content-Type: application/json-patch+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. This method requires the`healthcare.fhirStores.searchResources` permission on the parent FHIR store and the `healthcare.fhirResources.patch` permission on the requested FHIR store resource. For samples that show how to call `conditionalPatch`, see [Conditionally patching a FHIR resource](/healthcare/docs/how-tos/fhir-resources#conditionally_patching_a_fhir_resource)."##), + Some(r##"If a resource is found based on the search criteria specified in the query parameters, updates part of that resource by applying the operations specified in a [JSON Patch](http://jsonpatch.com/) document. Implements the FHIR standard conditional patch interaction ([STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#patch), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#patch)). DSTU2 doesn't define a conditional patch method, but the server supports it in the same way it supports STU3. Search terms are provided as query parameters following the same pattern as the search method. If the search criteria identify more than one match, the request returns a `412 Precondition Failed` error. The request body must contain a JSON Patch document, and the request headers must contain `Content-Type: application/json-patch+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. This method requires the`healthcare.fhirStores.searchResources` permission on the parent FHIR store and the `healthcare.fhirResources.patch` permission on the requested FHIR store resource. For samples that show how to call `conditionalPatch`, see [Conditionally patching a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#conditionally_patching_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-conditional-patch", vec![ (Some(r##"parent"##), @@ -13667,7 +13728,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-conditional-update", - Some(r##"If a resource is found based on the search criteria specified in the query parameters, updates the entire contents of that resource. Implements the FHIR standard conditional update interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.10.2), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#cond-update), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#cond-update)). Search terms are provided as query parameters following the same pattern as the search method. If the search criteria identify more than one match, the request returns a `412 Precondition Failed` error. If the search criteria identify zero matches, and the supplied resource body contains an `id`, and the FHIR store has enable_update_create set, creates the resource with the client-specified ID. It is strongly advised not to include or encode any sensitive data such as patient identifiers in client-specified resource IDs. Those IDs are part of the FHIR resource path recorded in Cloud Audit Logs and Pub/Sub notifications. Those IDs can also be contained in reference fields within other resources. If the search criteria identify zero matches, and the supplied resource body does not contain an `id`, the resource is created with a server-assigned ID as per the create method. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. This method requires the`healthcare.fhirStores.searchResources` and `healthcare.fhirResources.update` permissions on the parent FHIR store. For samples that show how to call `conditionalUpdate`, see [Conditionally updating a FHIR resource](/healthcare/docs/how-tos/fhir-resources#conditionally_updating_a_fhir_resource)."##), + Some(r##"If a resource is found based on the search criteria specified in the query parameters, updates the entire contents of that resource. Implements the FHIR standard conditional update interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.10.2), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#cond-update), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#cond-update)). Search terms are provided as query parameters following the same pattern as the search method. If the search criteria identify more than one match, the request returns a `412 Precondition Failed` error. If the search criteria identify zero matches, and the supplied resource body contains an `id`, and the FHIR store has enable_update_create set, creates the resource with the client-specified ID. It is strongly advised not to include or encode any sensitive data such as patient identifiers in client-specified resource IDs. Those IDs are part of the FHIR resource path recorded in Cloud Audit Logs and Pub/Sub notifications. Those IDs can also be contained in reference fields within other resources. If the search criteria identify zero matches, and the supplied resource body does not contain an `id`, the resource is created with a server-assigned ID as per the create method. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. This method requires the`healthcare.fhirStores.searchResources` and `healthcare.fhirResources.update` permissions on the parent FHIR store. For samples that show how to call `conditionalUpdate`, see [Conditionally updating a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#conditionally_updating_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-conditional-update", vec![ (Some(r##"parent"##), @@ -13701,7 +13762,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-create", - Some(r##"Creates a FHIR resource. Implements the FHIR standard create interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#create), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#create), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#create)), which creates a new resource with a server-assigned resource ID. Also supports the FHIR standard conditional create interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#ccreate), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#ccreate), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#ccreate)), specified by supplying an `If-None-Exist` header containing a FHIR search query. If no resources match this search query, the server processes the create operation as normal. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. On success, the response body contains a JSON-encoded representation of the resource as it was created on the server, including the server-assigned resource ID and version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `create`, see [Creating a FHIR resource](/healthcare/docs/how-tos/fhir-resources#creating_a_fhir_resource)."##), + Some(r##"Creates a FHIR resource. Implements the FHIR standard create interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#create), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#create), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#create)), which creates a new resource with a server-assigned resource ID. Also supports the FHIR standard conditional create interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#ccreate), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#ccreate), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#ccreate)), specified by supplying an `If-None-Exist` header containing a FHIR search query. If no resources match this search query, the server processes the create operation as normal. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. On success, the response body contains a JSON-encoded representation of the resource as it was created on the server, including the server-assigned resource ID and version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `create`, see [Creating a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#creating_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-create", vec![ (Some(r##"parent"##), @@ -13735,7 +13796,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-delete", - Some(r##"Deletes a FHIR resource. Implements the FHIR standard delete interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#delete), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#delete), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#delete)). Note: Unless resource versioning is disabled by setting the disable_resource_versioning flag on the FHIR store, the deleted resources are moved to a history repository that can still be retrieved through vread and related methods, unless they are removed by the purge method. For samples that show how to call `delete`, see [Deleting a FHIR resource](/healthcare/docs/how-tos/fhir-resources#deleting_a_fhir_resource)."##), + Some(r##"Deletes a FHIR resource. Implements the FHIR standard delete interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#delete), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#delete), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#delete)). Note: Unless resource versioning is disabled by setting the disable_resource_versioning flag on the FHIR store, the deleted resources are moved to a history repository that can still be retrieved through vread and related methods, unless they are removed by the purge method. For samples that show how to call `delete`, see [Deleting a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#deleting_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-delete", vec![ (Some(r##"name"##), @@ -13757,7 +13818,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-execute-bundle", - Some(r##"Executes all the requests in the given Bundle. Implements the FHIR standard batch/transaction interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#transaction), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#transaction), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#transaction)). Supports all interactions within a bundle, except search. This method accepts Bundles of type `batch` and `transaction`, processing them according to the batch processing rules ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.1), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.1), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#brules)) and transaction processing rules ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.2), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.2), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#trules)). The request body must contain a JSON-encoded FHIR `Bundle` resource, and the request headers must contain `Content-Type: application/fhir+json`. For a batch bundle or a successful transaction the response body contains a JSON-encoded representation of a `Bundle` resource of type `batch-response` or `transaction-response` containing one entry for each entry in the request, with the outcome of processing the entry. In the case of an error for a transaction bundle, the response body contains a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. This method requires permission for executing the requests in the bundle. The `executeBundle` permission grants permission to execute the request in the bundle but you must grant sufficient permissions to execute the individual requests in the bundle. For example, if the bundle contains a `create` request, you must have permission to execute the `create` request. Logging is available for the `executeBundle` permission. For samples that show how to call `executeBundle`, see [Managing FHIR resources using FHIR bundles](/healthcare/docs/how-tos/fhir-bundles)."##), + Some(r##"Executes all the requests in the given Bundle. Implements the FHIR standard batch/transaction interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#transaction), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#transaction), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#transaction)). Supports all interactions within a bundle, except search. This method accepts Bundles of type `batch` and `transaction`, processing them according to the batch processing rules ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.1), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.1), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#brules)) and transaction processing rules ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#2.1.0.16.2), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#2.21.0.17.2), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#trules)). The request body must contain a JSON-encoded FHIR `Bundle` resource, and the request headers must contain `Content-Type: application/fhir+json`. For a batch bundle or a successful transaction, the response body contains a JSON-encoded representation of a `Bundle` resource of type `batch-response` or `transaction-response` containing one entry for each entry in the request, with the outcome of processing the entry. In the case of an error for a transaction bundle, the response body contains a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. This method checks permissions for each request in the bundle. The `executeBundle` permission is required to call this method, but you must also grant sufficient permissions to execute the individual requests in the bundle. For example, if the bundle contains a request to create a FHIR resource, the caller must also have been granted the `healthcare.fhirResources.create` permission. You can use audit logs to view the permissions for `executeBundle` and each request in the bundle. For more information, see [Viewing Cloud Audit logs](https://cloud.google.com/healthcare-api/docs/how-tos/audit-logging). For samples that show how to call `executeBundle`, see [Managing FHIR resources using FHIR bundles](https://cloud.google.com/healthcare/docs/how-tos/fhir-bundles)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-execute-bundle", vec![ (Some(r##"parent"##), @@ -13785,7 +13846,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-history", - Some(r##"Lists all the versions of a resource (including the current version and deleted versions) from the FHIR store. Implements the per-resource form of the FHIR standard history interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#history), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#history), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#history)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `history`, containing the version history sorted from most recent to oldest versions. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `history`, see [Listing FHIR resource versions](/healthcare/docs/how-tos/fhir-resources#listing_fhir_resource_versions)."##), + Some(r##"Lists all the versions of a resource (including the current version and deleted versions) from the FHIR store. Implements the per-resource form of the FHIR standard history interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#history), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#history), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#history)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `history`, containing the version history sorted from most recent to oldest versions. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `history`, see [Listing FHIR resource versions](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#listing_fhir_resource_versions)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-history", vec![ (Some(r##"name"##), @@ -13807,7 +13868,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-patch", - Some(r##"Updates part of an existing resource by applying the operations specified in a [JSON Patch](http://jsonpatch.com/) document. Implements the FHIR standard patch interaction ([STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#patch), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#patch)). DSTU2 doesn't define a patch method, but the server supports it in the same way it supports STU3. The request body must contain a JSON Patch document, and the request headers must contain `Content-Type: application/json-patch+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `patch`, see [Patching a FHIR resource](/healthcare/docs/how-tos/fhir-resources#patching_a_fhir_resource)."##), + Some(r##"Updates part of an existing resource by applying the operations specified in a [JSON Patch](http://jsonpatch.com/) document. Implements the FHIR standard patch interaction ([STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#patch), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#patch)). DSTU2 doesn't define a patch method, but the server supports it in the same way it supports STU3. The request body must contain a JSON Patch document, and the request headers must contain `Content-Type: application/json-patch+json`. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `patch`, see [Patching a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#patching_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-patch", vec![ (Some(r##"name"##), @@ -13835,7 +13896,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-read", - Some(r##"Gets the contents of a FHIR resource. Implements the FHIR standard read interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#read), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#read), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#read)). Also supports the FHIR standard conditional read interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#cread), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#cread), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#cread)) specified by supplying an `If-Modified-Since` header with a date/time value or an `If-None-Match` header with an ETag value. On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `read`, see [Getting a FHIR resource](/healthcare/docs/how-tos/fhir-resources#getting_a_fhir_resource)."##), + Some(r##"Gets the contents of a FHIR resource. Implements the FHIR standard read interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#read), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#read), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#read)). Also supports the FHIR standard conditional read interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#cread), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#cread), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#cread)) specified by supplying an `If-Modified-Since` header with a date/time value or an `If-None-Match` header with an ETag value. On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `read`, see [Getting a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#getting_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-read", vec![ (Some(r##"name"##), @@ -13857,7 +13918,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-search", - Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](https://hl7.org/implement/standards/fhir/STU3/search.html), [R4](https://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](https://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](https://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `:recurse`. Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](/healthcare/docs/how-tos/fhir-advanced-search)."##), + Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](https://hl7.org/implement/standards/fhir/STU3/search.html), [R4](https://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](https://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](https://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `recurse` (DSTU2 and STU3) or `:iterate` (R4). Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](https://cloud.google.com/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](https://cloud.google.com/healthcare/docs/how-tos/fhir-advanced-search)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-search", vec![ (Some(r##"parent"##), @@ -13885,7 +13946,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-search-type", - Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](https://hl7.org/implement/standards/fhir/STU3/search.html), [R4](https://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](https://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](https://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `:recurse`. Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](/healthcare/docs/how-tos/fhir-advanced-search)."##), + Some(r##"Searches for resources in the given FHIR store according to criteria specified as query parameters. Implements the FHIR standard search interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#search), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#search), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#search)) using the search semantics described in the FHIR Search specification ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/search.html), [STU3](https://hl7.org/implement/standards/fhir/STU3/search.html), [R4](https://hl7.org/implement/standards/fhir/R4/search.html)). Supports four methods of search defined by the specification: * `GET [base]?[parameters]` to search across all resources. * `GET [base]/[type]?[parameters]` to search resources of a specified type. * `POST [base]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method across all resources. * `POST [base]/[type]/_search?[parameters]` as an alternate form having the same semantics as the `GET` method for the specified type. The `GET` and `POST` methods do not support compartment searches. The `POST` method does not support `application/x-www-form-urlencoded` search parameters. On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the search. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The server's capability statement, retrieved through capabilities, indicates what search parameters are supported on each FHIR resource. A list of all search parameters defined by the specification can be found in the FHIR Search Parameter Registry ([STU3](https://hl7.org/implement/standards/fhir/STU3/searchparameter-registry.html), [R4](https://hl7.org/implement/standards/fhir/R4/searchparameter-registry.html)). FHIR search parameters for DSTU2 can be found on each resource's definition page. Supported search modifiers: `:missing`, `:exact`, `:contains`, `:text`, `:in`, `:not-in`, `:above`, `:below`, `:[type]`, `:not`, and `recurse` (DSTU2 and STU3) or `:iterate` (R4). Supported search result parameters: `_sort`, `_count`, `_include`, `_revinclude`, `_summary=text`, `_summary=data`, and `_elements`. The maximum number of search results returned defaults to 100, which can be overridden by the `_count` parameter up to a maximum limit of 1000. If there are additional results, the returned `Bundle` contains a link of `relation` "next", which has a `_page_token` parameter for an opaque pagination token that can be used to retrieve the next page. Resources with a total size larger than 5MB or a field count larger than 50,000 might not be fully searchable as the server might trim its generated search index in those cases. Note: FHIR resources are indexed asynchronously, so there might be a slight delay between the time a resource is created or changes and when the change is reflected in search results. For samples and detailed information, see [Searching for FHIR resources](https://cloud.google.com/healthcare/docs/how-tos/fhir-search) and [Advanced FHIR search features](https://cloud.google.com/healthcare/docs/how-tos/fhir-advanced-search)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-search-type", vec![ (Some(r##"parent"##), @@ -13919,7 +13980,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-update", - Some(r##"Updates the entire contents of a resource. Implements the FHIR standard update interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#update), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#update), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#update)). If the specified resource does not exist and the FHIR store has enable_update_create set, creates the resource with the client-specified ID. It is strongly advised not to include or encode any sensitive data such as patient identifiers in client-specified resource IDs. Those IDs are part of the FHIR resource path recorded in Cloud Audit Logs and Pub/Sub notifications. Those IDs can also be contained in reference fields within other resources. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. The resource must contain an `id` element having an identical value to the ID in the REST path of the request. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `update`, see [Updating a FHIR resource](/healthcare/docs/how-tos/fhir-resources#updating_a_fhir_resource)."##), + Some(r##"Updates the entire contents of a resource. Implements the FHIR standard update interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#update), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#update), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#update)). If the specified resource does not exist and the FHIR store has enable_update_create set, creates the resource with the client-specified ID. It is strongly advised not to include or encode any sensitive data such as patient identifiers in client-specified resource IDs. Those IDs are part of the FHIR resource path recorded in Cloud Audit Logs and Pub/Sub notifications. Those IDs can also be contained in reference fields within other resources. The request body must contain a JSON-encoded FHIR resource, and the request headers must contain `Content-Type: application/fhir+json`. The resource must contain an `id` element having an identical value to the ID in the REST path of the request. On success, the response body contains a JSON-encoded representation of the updated resource, including the server-assigned version ID. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `update`, see [Updating a FHIR resource](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#updating_a_fhir_resource)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-update", vec![ (Some(r##"name"##), @@ -13947,7 +14008,7 @@ async fn main() { Some(false)), ]), ("locations-datasets-fhir-stores-fhir-vread", - Some(r##"Gets the contents of a version (current or historical) of a FHIR resource by version ID. Implements the FHIR standard vread interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#vread), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#vread), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#vread)). On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `vread`, see [Retrieving a FHIR resource version](/healthcare/docs/how-tos/fhir-resources#retrieving_a_fhir_resource_version)."##), + Some(r##"Gets the contents of a version (current or historical) of a FHIR resource by version ID. Implements the FHIR standard vread interaction ([DSTU2](https://hl7.org/implement/standards/fhir/DSTU2/http.html#vread), [STU3](https://hl7.org/implement/standards/fhir/STU3/http.html#vread), [R4](https://hl7.org/implement/standards/fhir/R4/http.html#vread)). On success, the response body contains a JSON-encoded representation of the resource. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. For samples that show how to call `vread`, see [Retrieving a FHIR resource version](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#retrieving_a_fhir_resource_version)."##), "Details at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli/projects_locations-datasets-fhir-stores-fhir-vread", vec![ (Some(r##"name"##), @@ -13996,7 +14057,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14096,7 +14157,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14124,7 +14185,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14174,7 +14235,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14296,7 +14357,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14512,7 +14573,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Resource name of the Message, of the form `projects/{project_id}/datasets/{dataset_id}/hl7V2Stores/{hl7_v2_store_id}/messages/{message_id}`. Assigned by the server."##), + Some(r##"Resource name of the Message, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/hl7V2Stores/{hl7_v2_store_id}/messages/{message_id}`. Assigned by the server."##), Some(true), Some(false)), @@ -14540,7 +14601,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Resource name of the HL7v2 store, of the form `projects/{project_id}/datasets/{dataset_id}/hl7V2Stores/{hl7v2_store_id}`."##), + Some(r##"Resource name of the HL7v2 store, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/hl7V2Stores/{hl7v2_store_id}`."##), Some(true), Some(false)), @@ -14568,7 +14629,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14596,7 +14657,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14746,7 +14807,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14774,7 +14835,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -14874,7 +14935,7 @@ async fn main() { let mut app = App::new("healthcare1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20221220") .about("Manage, store, and access healthcare data in Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_healthcare1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/healthcare1_beta1/Cargo.toml b/gen/healthcare1_beta1/Cargo.toml index 7353eccad9..bb7cbf9872 100644 --- a/gen/healthcare1_beta1/Cargo.toml +++ b/gen/healthcare1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-healthcare1_beta1" -version = "5.0.2-beta-1+20221220" +version = "5.0.2+20221220" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Healthcare (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/healthcare1_beta1" homepage = "https://cloud.google.com/healthcare" -documentation = "https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220" +documentation = "https://docs.rs/google-healthcare1_beta1/5.0.2+20221220" license = "MIT" keywords = ["healthcare", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/healthcare1_beta1/README.md b/gen/healthcare1_beta1/README.md index 91319d2527..55dbcd2a3a 100644 --- a/gen/healthcare1_beta1/README.md +++ b/gen/healthcare1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-healthcare1_beta1` library allows access to all features of the *Google Cloud Healthcare* service. -This documentation was generated from *Cloud Healthcare* crate version *5.0.2-beta-1+20221220*, where *20221220* is the exact revision of the *healthcare:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Healthcare* crate version *5.0.2+20221220*, where *20221220* is the exact revision of the *healthcare:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Healthcare* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/healthcare). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/CloudHealthcare) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/CloudHealthcare) ... * projects - * [*locations datasets annotation stores annotations create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationCreateCall), [*locations datasets annotation stores annotations delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationDeleteCall), [*locations datasets annotation stores annotations get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationGetCall), [*locations datasets annotation stores annotations list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationListCall), [*locations datasets annotation stores annotations patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationPatchCall), [*locations datasets annotation stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreCreateCall), [*locations datasets annotation stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreDeleteCall), [*locations datasets annotation stores evaluate*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreEvaluateCall), [*locations datasets annotation stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreExportCall), [*locations datasets annotation stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreGetCall), [*locations datasets annotation stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreGetIamPolicyCall), [*locations datasets annotation stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreImportCall), [*locations datasets annotation stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreListCall), [*locations datasets annotation stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStorePatchCall), [*locations datasets annotation stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreSetIamPolicyCall), [*locations datasets annotation stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreTestIamPermissionCall), [*locations datasets consent stores attribute definitions create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionCreateCall), [*locations datasets consent stores attribute definitions delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionDeleteCall), [*locations datasets consent stores attribute definitions get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionGetCall), [*locations datasets consent stores attribute definitions list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionListCall), [*locations datasets consent stores attribute definitions patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionPatchCall), [*locations datasets consent stores check data access*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreCheckDataAccesCall), [*locations datasets consent stores consent artifacts create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactCreateCall), [*locations datasets consent stores consent artifacts delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactDeleteCall), [*locations datasets consent stores consent artifacts get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactGetCall), [*locations datasets consent stores consent artifacts list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactListCall), [*locations datasets consent stores consents activate*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentActivateCall), [*locations datasets consent stores consents create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentCreateCall), [*locations datasets consent stores consents delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentDeleteCall), [*locations datasets consent stores consents delete revision*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentDeleteRevisionCall), [*locations datasets consent stores consents get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentGetCall), [*locations datasets consent stores consents list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentListCall), [*locations datasets consent stores consents list revisions*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentListRevisionCall), [*locations datasets consent stores consents patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentPatchCall), [*locations datasets consent stores consents reject*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentRejectCall), [*locations datasets consent stores consents revoke*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentRevokeCall), [*locations datasets consent stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreCreateCall), [*locations datasets consent stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreDeleteCall), [*locations datasets consent stores evaluate user consents*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreEvaluateUserConsentCall), [*locations datasets consent stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreGetCall), [*locations datasets consent stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreGetIamPolicyCall), [*locations datasets consent stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreListCall), [*locations datasets consent stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStorePatchCall), [*locations datasets consent stores query accessible data*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreQueryAccessibleDataCall), [*locations datasets consent stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreSetIamPolicyCall), [*locations datasets consent stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreTestIamPermissionCall), [*locations datasets consent stores user data mappings archive*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingArchiveCall), [*locations datasets consent stores user data mappings create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingCreateCall), [*locations datasets consent stores user data mappings delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingDeleteCall), [*locations datasets consent stores user data mappings get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingGetCall), [*locations datasets consent stores user data mappings list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingListCall), [*locations datasets consent stores user data mappings patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingPatchCall), [*locations datasets create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetCreateCall), [*locations datasets deidentify*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDeidentifyCall), [*locations datasets delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDeleteCall), [*locations datasets dicom stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreCreateCall), [*locations datasets dicom stores deidentify*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreDeidentifyCall), [*locations datasets dicom stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreDeleteCall), [*locations datasets dicom stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreExportCall), [*locations datasets dicom stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreGetCall), [*locations datasets dicom stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreGetIamPolicyCall), [*locations datasets dicom stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreImportCall), [*locations datasets dicom stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreListCall), [*locations datasets dicom stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStorePatchCall), [*locations datasets dicom stores search for instances*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSearchForInstanceCall), [*locations datasets dicom stores search for series*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSearchForSeryCall), [*locations datasets dicom stores search for studies*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSearchForStudyCall), [*locations datasets dicom stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSetIamPolicyCall), [*locations datasets dicom stores store instances*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStoreInstanceCall), [*locations datasets dicom stores studies delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyDeleteCall), [*locations datasets dicom stores studies retrieve metadata*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyRetrieveMetadataCall), [*locations datasets dicom stores studies retrieve study*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyRetrieveStudyCall), [*locations datasets dicom stores studies search for instances*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySearchForInstanceCall), [*locations datasets dicom stores studies search for series*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySearchForSeryCall), [*locations datasets dicom stores studies series delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesDeleteCall), [*locations datasets dicom stores studies series instances delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceDeleteCall), [*locations datasets dicom stores studies series instances frames retrieve frames*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveFrameCall), [*locations datasets dicom stores studies series instances frames retrieve rendered*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveRenderedCall), [*locations datasets dicom stores studies series instances retrieve instance*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveInstanceCall), [*locations datasets dicom stores studies series instances retrieve metadata*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveMetadataCall), [*locations datasets dicom stores studies series instances retrieve rendered*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveRenderedCall), [*locations datasets dicom stores studies series retrieve metadata*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveMetadataCall), [*locations datasets dicom stores studies series retrieve series*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveSeryCall), [*locations datasets dicom stores studies series search for instances*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesSearchForInstanceCall), [*locations datasets dicom stores studies store instances*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyStoreInstanceCall), [*locations datasets dicom stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreTestIamPermissionCall), [*locations datasets fhir stores configure search*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreConfigureSearchCall), [*locations datasets fhir stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreCreateCall), [*locations datasets fhir stores deidentify*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreDeidentifyCall), [*locations datasets fhir stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreDeleteCall), [*locations datasets fhir stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreExportCall), [*locations datasets fhir stores fhir concept map-search-translate*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConceptMapSearchTranslateCall), [*locations datasets fhir stores fhir concept map-translate*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConceptMapTranslateCall), [*locations datasets fhir stores fhir observation-lastn*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirObservationLastnCall), [*locations datasets fhir stores fhir patient-everything*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirPatientEverythingCall), [*locations datasets fhir stores fhir resource-purge*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirResourcePurgeCall), [*locations datasets fhir stores fhir resource-validate*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirResourceValidateCall), [*locations datasets fhir stores fhir capabilities*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirCapabilityCall), [*locations datasets fhir stores fhir conditional delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConditionalDeleteCall), [*locations datasets fhir stores fhir conditional patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConditionalPatchCall), [*locations datasets fhir stores fhir conditional update*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConditionalUpdateCall), [*locations datasets fhir stores fhir create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirCreateCall), [*locations datasets fhir stores fhir delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirDeleteCall), [*locations datasets fhir stores fhir execute bundle*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirExecuteBundleCall), [*locations datasets fhir stores fhir history*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirHistoryCall), [*locations datasets fhir stores fhir patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirPatchCall), [*locations datasets fhir stores fhir read*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirReadCall), [*locations datasets fhir stores fhir search*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirSearchCall), [*locations datasets fhir stores fhir search-type*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirSearchTypeCall), [*locations datasets fhir stores fhir update*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirUpdateCall), [*locations datasets fhir stores fhir vread*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirVreadCall), [*locations datasets fhir stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreGetCall), [*locations datasets fhir stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreGetIamPolicyCall), [*locations datasets fhir stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreImportCall), [*locations datasets fhir stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreListCall), [*locations datasets fhir stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStorePatchCall), [*locations datasets fhir stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreSetIamPolicyCall), [*locations datasets fhir stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreTestIamPermissionCall), [*locations datasets get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetGetCall), [*locations datasets get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetGetIamPolicyCall), [*locations datasets hl7 v2 stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreCreateCall), [*locations datasets hl7 v2 stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreDeleteCall), [*locations datasets hl7 v2 stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreExportCall), [*locations datasets hl7 v2 stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreGetCall), [*locations datasets hl7 v2 stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreGetIamPolicyCall), [*locations datasets hl7 v2 stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreImportCall), [*locations datasets hl7 v2 stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreListCall), [*locations datasets hl7 v2 stores messages batch get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageBatchGetCall), [*locations datasets hl7 v2 stores messages create*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageCreateCall), [*locations datasets hl7 v2 stores messages delete*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageDeleteCall), [*locations datasets hl7 v2 stores messages get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageGetCall), [*locations datasets hl7 v2 stores messages ingest*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageIngestCall), [*locations datasets hl7 v2 stores messages list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageListCall), [*locations datasets hl7 v2 stores messages patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessagePatchCall), [*locations datasets hl7 v2 stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StorePatchCall), [*locations datasets hl7 v2 stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreSetIamPolicyCall), [*locations datasets hl7 v2 stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreTestIamPermissionCall), [*locations datasets list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetListCall), [*locations datasets operations cancel*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetOperationCancelCall), [*locations datasets operations get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetOperationGetCall), [*locations datasets operations list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetOperationListCall), [*locations datasets patch*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetPatchCall), [*locations datasets set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetSetIamPolicyCall), [*locations datasets test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetTestIamPermissionCall), [*locations get*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationListCall) and [*locations services nlp analyze entities*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/api::ProjectLocationServiceNlpAnalyzeEntityCall) + * [*locations datasets annotation stores annotations create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationCreateCall), [*locations datasets annotation stores annotations delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationDeleteCall), [*locations datasets annotation stores annotations get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationGetCall), [*locations datasets annotation stores annotations list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationListCall), [*locations datasets annotation stores annotations patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreAnnotationPatchCall), [*locations datasets annotation stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreCreateCall), [*locations datasets annotation stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreDeleteCall), [*locations datasets annotation stores evaluate*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreEvaluateCall), [*locations datasets annotation stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreExportCall), [*locations datasets annotation stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreGetCall), [*locations datasets annotation stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreGetIamPolicyCall), [*locations datasets annotation stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreImportCall), [*locations datasets annotation stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreListCall), [*locations datasets annotation stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStorePatchCall), [*locations datasets annotation stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreSetIamPolicyCall), [*locations datasets annotation stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetAnnotationStoreTestIamPermissionCall), [*locations datasets consent stores attribute definitions create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionCreateCall), [*locations datasets consent stores attribute definitions delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionDeleteCall), [*locations datasets consent stores attribute definitions get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionGetCall), [*locations datasets consent stores attribute definitions list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionListCall), [*locations datasets consent stores attribute definitions patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreAttributeDefinitionPatchCall), [*locations datasets consent stores check data access*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreCheckDataAccesCall), [*locations datasets consent stores consent artifacts create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactCreateCall), [*locations datasets consent stores consent artifacts delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactDeleteCall), [*locations datasets consent stores consent artifacts get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactGetCall), [*locations datasets consent stores consent artifacts list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentArtifactListCall), [*locations datasets consent stores consents activate*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentActivateCall), [*locations datasets consent stores consents create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentCreateCall), [*locations datasets consent stores consents delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentDeleteCall), [*locations datasets consent stores consents delete revision*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentDeleteRevisionCall), [*locations datasets consent stores consents get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentGetCall), [*locations datasets consent stores consents list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentListCall), [*locations datasets consent stores consents list revisions*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentListRevisionCall), [*locations datasets consent stores consents patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentPatchCall), [*locations datasets consent stores consents reject*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentRejectCall), [*locations datasets consent stores consents revoke*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreConsentRevokeCall), [*locations datasets consent stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreCreateCall), [*locations datasets consent stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreDeleteCall), [*locations datasets consent stores evaluate user consents*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreEvaluateUserConsentCall), [*locations datasets consent stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreGetCall), [*locations datasets consent stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreGetIamPolicyCall), [*locations datasets consent stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreListCall), [*locations datasets consent stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStorePatchCall), [*locations datasets consent stores query accessible data*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreQueryAccessibleDataCall), [*locations datasets consent stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreSetIamPolicyCall), [*locations datasets consent stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreTestIamPermissionCall), [*locations datasets consent stores user data mappings archive*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingArchiveCall), [*locations datasets consent stores user data mappings create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingCreateCall), [*locations datasets consent stores user data mappings delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingDeleteCall), [*locations datasets consent stores user data mappings get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingGetCall), [*locations datasets consent stores user data mappings list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingListCall), [*locations datasets consent stores user data mappings patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetConsentStoreUserDataMappingPatchCall), [*locations datasets create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetCreateCall), [*locations datasets deidentify*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDeidentifyCall), [*locations datasets delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDeleteCall), [*locations datasets dicom stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreCreateCall), [*locations datasets dicom stores deidentify*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreDeidentifyCall), [*locations datasets dicom stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreDeleteCall), [*locations datasets dicom stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreExportCall), [*locations datasets dicom stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreGetCall), [*locations datasets dicom stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreGetIamPolicyCall), [*locations datasets dicom stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreImportCall), [*locations datasets dicom stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreListCall), [*locations datasets dicom stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStorePatchCall), [*locations datasets dicom stores search for instances*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSearchForInstanceCall), [*locations datasets dicom stores search for series*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSearchForSeryCall), [*locations datasets dicom stores search for studies*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSearchForStudyCall), [*locations datasets dicom stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreSetIamPolicyCall), [*locations datasets dicom stores store instances*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStoreInstanceCall), [*locations datasets dicom stores studies delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyDeleteCall), [*locations datasets dicom stores studies retrieve metadata*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyRetrieveMetadataCall), [*locations datasets dicom stores studies retrieve study*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyRetrieveStudyCall), [*locations datasets dicom stores studies search for instances*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySearchForInstanceCall), [*locations datasets dicom stores studies search for series*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySearchForSeryCall), [*locations datasets dicom stores studies series delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesDeleteCall), [*locations datasets dicom stores studies series instances delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceDeleteCall), [*locations datasets dicom stores studies series instances frames retrieve frames*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveFrameCall), [*locations datasets dicom stores studies series instances frames retrieve rendered*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceFrameRetrieveRenderedCall), [*locations datasets dicom stores studies series instances retrieve instance*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveInstanceCall), [*locations datasets dicom stores studies series instances retrieve metadata*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveMetadataCall), [*locations datasets dicom stores studies series instances retrieve rendered*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesInstanceRetrieveRenderedCall), [*locations datasets dicom stores studies series retrieve metadata*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveMetadataCall), [*locations datasets dicom stores studies series retrieve series*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesRetrieveSeryCall), [*locations datasets dicom stores studies series search for instances*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudySeriesSearchForInstanceCall), [*locations datasets dicom stores studies store instances*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreStudyStoreInstanceCall), [*locations datasets dicom stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetDicomStoreTestIamPermissionCall), [*locations datasets fhir stores configure search*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreConfigureSearchCall), [*locations datasets fhir stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreCreateCall), [*locations datasets fhir stores deidentify*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreDeidentifyCall), [*locations datasets fhir stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreDeleteCall), [*locations datasets fhir stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreExportCall), [*locations datasets fhir stores fhir concept map-search-translate*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConceptMapSearchTranslateCall), [*locations datasets fhir stores fhir concept map-translate*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConceptMapTranslateCall), [*locations datasets fhir stores fhir observation-lastn*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirObservationLastnCall), [*locations datasets fhir stores fhir patient-everything*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirPatientEverythingCall), [*locations datasets fhir stores fhir resource-purge*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirResourcePurgeCall), [*locations datasets fhir stores fhir resource-validate*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirResourceValidateCall), [*locations datasets fhir stores fhir capabilities*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirCapabilityCall), [*locations datasets fhir stores fhir conditional delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConditionalDeleteCall), [*locations datasets fhir stores fhir conditional patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConditionalPatchCall), [*locations datasets fhir stores fhir conditional update*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirConditionalUpdateCall), [*locations datasets fhir stores fhir create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirCreateCall), [*locations datasets fhir stores fhir delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirDeleteCall), [*locations datasets fhir stores fhir execute bundle*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirExecuteBundleCall), [*locations datasets fhir stores fhir history*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirHistoryCall), [*locations datasets fhir stores fhir patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirPatchCall), [*locations datasets fhir stores fhir read*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirReadCall), [*locations datasets fhir stores fhir search*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirSearchCall), [*locations datasets fhir stores fhir search-type*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirSearchTypeCall), [*locations datasets fhir stores fhir update*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirUpdateCall), [*locations datasets fhir stores fhir vread*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreFhirVreadCall), [*locations datasets fhir stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreGetCall), [*locations datasets fhir stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreGetIamPolicyCall), [*locations datasets fhir stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreImportCall), [*locations datasets fhir stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreListCall), [*locations datasets fhir stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStorePatchCall), [*locations datasets fhir stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreSetIamPolicyCall), [*locations datasets fhir stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetFhirStoreTestIamPermissionCall), [*locations datasets get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetGetCall), [*locations datasets get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetGetIamPolicyCall), [*locations datasets hl7 v2 stores create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreCreateCall), [*locations datasets hl7 v2 stores delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreDeleteCall), [*locations datasets hl7 v2 stores export*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreExportCall), [*locations datasets hl7 v2 stores get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreGetCall), [*locations datasets hl7 v2 stores get iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreGetIamPolicyCall), [*locations datasets hl7 v2 stores import*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreImportCall), [*locations datasets hl7 v2 stores list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreListCall), [*locations datasets hl7 v2 stores messages batch get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageBatchGetCall), [*locations datasets hl7 v2 stores messages create*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageCreateCall), [*locations datasets hl7 v2 stores messages delete*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageDeleteCall), [*locations datasets hl7 v2 stores messages get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageGetCall), [*locations datasets hl7 v2 stores messages ingest*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageIngestCall), [*locations datasets hl7 v2 stores messages list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessageListCall), [*locations datasets hl7 v2 stores messages patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreMessagePatchCall), [*locations datasets hl7 v2 stores patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StorePatchCall), [*locations datasets hl7 v2 stores set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreSetIamPolicyCall), [*locations datasets hl7 v2 stores test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetHl7V2StoreTestIamPermissionCall), [*locations datasets list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetListCall), [*locations datasets operations cancel*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetOperationCancelCall), [*locations datasets operations get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetOperationGetCall), [*locations datasets operations list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetOperationListCall), [*locations datasets patch*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetPatchCall), [*locations datasets set iam policy*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetSetIamPolicyCall), [*locations datasets test iam permissions*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationDatasetTestIamPermissionCall), [*locations get*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationListCall) and [*locations services nlp analyze entities*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/api::ProjectLocationServiceNlpAnalyzeEntityCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/CloudHealthcare)** +* **[Hub](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/CloudHealthcare)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -158,17 +158,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -178,29 +178,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-healthcare1_beta1/5.0.2-beta-1+20221220/google_healthcare1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-healthcare1_beta1/5.0.2+20221220/google_healthcare1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/healthcare1_beta1/src/api.rs b/gen/healthcare1_beta1/src/api.rs index 66a94edab1..c16f6313d6 100644 --- a/gen/healthcare1_beta1/src/api.rs +++ b/gen/healthcare1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudHealthcare { CloudHealthcare { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://healthcare.googleapis.com/".to_string(), _root_url: "https://healthcare.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CloudHealthcare { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/healthcare1_beta1/src/client.rs b/gen/healthcare1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/healthcare1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/healthcare1_beta1/src/lib.rs b/gen/healthcare1_beta1/src/lib.rs index 70472af16e..d7b3ce5c2c 100644 --- a/gen/healthcare1_beta1/src/lib.rs +++ b/gen/healthcare1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Healthcare* crate version *5.0.2-beta-1+20221220*, where *20221220* is the exact revision of the *healthcare:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Healthcare* crate version *5.0.2+20221220*, where *20221220* is the exact revision of the *healthcare:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Healthcare* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/healthcare). diff --git a/gen/iam1-cli/Cargo.toml b/gen/iam1-cli/Cargo.toml index ca506798da..72a2a754cd 100644 --- a/gen/iam1-cli/Cargo.toml +++ b/gen/iam1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-iam1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Iam (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iam1-cli" @@ -20,13 +20,13 @@ name = "iam1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-iam1] path = "../iam1" -version = "4.0.1+20220224" +version = "5.0.2+20230112" + diff --git a/gen/iam1-cli/README.md b/gen/iam1-cli/README.md index fc92707031..6c284f79cd 100644 --- a/gen/iam1-cli/README.md +++ b/gen/iam1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Iam* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Iam* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash iam1 [options] @@ -34,6 +34,7 @@ iam1 [options] query-auditable-services (-r )... [-p ]... [-o ] locations workforce-pools-operations-get [-p ]... [-o ] + workforce-pools-providers-keys-operations-get [-p ]... [-o ] workforce-pools-providers-operations-get [-p ]... [-o ] workforce-pools-subjects-operations-get [-p ]... [-o ] organizations diff --git a/gen/iam1-cli/mkdocs.yml b/gen/iam1-cli/mkdocs.yml index 05b980b26c..29152ffdec 100644 --- a/gen/iam1-cli/mkdocs.yml +++ b/gen/iam1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Iam v4.0.1+20220224 +site_name: Iam v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-iam1-cli site_description: A complete library to interact with Iam (protocol v1) @@ -7,65 +7,72 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/iam1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['iam-policies_lint-policy.md', 'Iam Policies', 'Lint Policy'] -- ['iam-policies_query-auditable-services.md', 'Iam Policies', 'Query Auditable Services'] -- ['locations_workforce-pools-operations-get.md', 'Locations', 'Workforce Pools Operations Get'] -- ['locations_workforce-pools-providers-operations-get.md', 'Locations', 'Workforce Pools Providers Operations Get'] -- ['locations_workforce-pools-subjects-operations-get.md', 'Locations', 'Workforce Pools Subjects Operations Get'] -- ['organizations_roles-create.md', 'Organizations', 'Roles Create'] -- ['organizations_roles-delete.md', 'Organizations', 'Roles Delete'] -- ['organizations_roles-get.md', 'Organizations', 'Roles Get'] -- ['organizations_roles-list.md', 'Organizations', 'Roles List'] -- ['organizations_roles-patch.md', 'Organizations', 'Roles Patch'] -- ['organizations_roles-undelete.md', 'Organizations', 'Roles Undelete'] -- ['permissions_query-testable-permissions.md', 'Permissions', 'Query Testable Permissions'] -- ['projects_locations-workload-identity-pools-create.md', 'Projects', 'Locations Workload Identity Pools Create'] -- ['projects_locations-workload-identity-pools-delete.md', 'Projects', 'Locations Workload Identity Pools Delete'] -- ['projects_locations-workload-identity-pools-get.md', 'Projects', 'Locations Workload Identity Pools Get'] -- ['projects_locations-workload-identity-pools-list.md', 'Projects', 'Locations Workload Identity Pools List'] -- ['projects_locations-workload-identity-pools-operations-get.md', 'Projects', 'Locations Workload Identity Pools Operations Get'] -- ['projects_locations-workload-identity-pools-patch.md', 'Projects', 'Locations Workload Identity Pools Patch'] -- ['projects_locations-workload-identity-pools-providers-create.md', 'Projects', 'Locations Workload Identity Pools Providers Create'] -- ['projects_locations-workload-identity-pools-providers-delete.md', 'Projects', 'Locations Workload Identity Pools Providers Delete'] -- ['projects_locations-workload-identity-pools-providers-get.md', 'Projects', 'Locations Workload Identity Pools Providers Get'] -- ['projects_locations-workload-identity-pools-providers-keys-operations-get.md', 'Projects', 'Locations Workload Identity Pools Providers Keys Operations Get'] -- ['projects_locations-workload-identity-pools-providers-list.md', 'Projects', 'Locations Workload Identity Pools Providers List'] -- ['projects_locations-workload-identity-pools-providers-operations-get.md', 'Projects', 'Locations Workload Identity Pools Providers Operations Get'] -- ['projects_locations-workload-identity-pools-providers-patch.md', 'Projects', 'Locations Workload Identity Pools Providers Patch'] -- ['projects_locations-workload-identity-pools-providers-undelete.md', 'Projects', 'Locations Workload Identity Pools Providers Undelete'] -- ['projects_locations-workload-identity-pools-undelete.md', 'Projects', 'Locations Workload Identity Pools Undelete'] -- ['projects_roles-create.md', 'Projects', 'Roles Create'] -- ['projects_roles-delete.md', 'Projects', 'Roles Delete'] -- ['projects_roles-get.md', 'Projects', 'Roles Get'] -- ['projects_roles-list.md', 'Projects', 'Roles List'] -- ['projects_roles-patch.md', 'Projects', 'Roles Patch'] -- ['projects_roles-undelete.md', 'Projects', 'Roles Undelete'] -- ['projects_service-accounts-create.md', 'Projects', 'Service Accounts Create'] -- ['projects_service-accounts-delete.md', 'Projects', 'Service Accounts Delete'] -- ['projects_service-accounts-disable.md', 'Projects', 'Service Accounts Disable'] -- ['projects_service-accounts-enable.md', 'Projects', 'Service Accounts Enable'] -- ['projects_service-accounts-get.md', 'Projects', 'Service Accounts Get'] -- ['projects_service-accounts-get-iam-policy.md', 'Projects', 'Service Accounts Get Iam Policy'] -- ['projects_service-accounts-keys-create.md', 'Projects', 'Service Accounts Keys Create'] -- ['projects_service-accounts-keys-delete.md', 'Projects', 'Service Accounts Keys Delete'] -- ['projects_service-accounts-keys-disable.md', 'Projects', 'Service Accounts Keys Disable'] -- ['projects_service-accounts-keys-enable.md', 'Projects', 'Service Accounts Keys Enable'] -- ['projects_service-accounts-keys-get.md', 'Projects', 'Service Accounts Keys Get'] -- ['projects_service-accounts-keys-list.md', 'Projects', 'Service Accounts Keys List'] -- ['projects_service-accounts-keys-upload.md', 'Projects', 'Service Accounts Keys Upload'] -- ['projects_service-accounts-list.md', 'Projects', 'Service Accounts List'] -- ['projects_service-accounts-patch.md', 'Projects', 'Service Accounts Patch'] -- ['projects_service-accounts-set-iam-policy.md', 'Projects', 'Service Accounts Set Iam Policy'] -- ['projects_service-accounts-sign-blob.md', 'Projects', 'Service Accounts Sign Blob'] -- ['projects_service-accounts-sign-jwt.md', 'Projects', 'Service Accounts Sign Jwt'] -- ['projects_service-accounts-test-iam-permissions.md', 'Projects', 'Service Accounts Test Iam Permissions'] -- ['projects_service-accounts-undelete.md', 'Projects', 'Service Accounts Undelete'] -- ['projects_service-accounts-update.md', 'Projects', 'Service Accounts Update'] -- ['roles_get.md', 'Roles', 'Get'] -- ['roles_list.md', 'Roles', 'List'] -- ['roles_query-grantable-roles.md', 'Roles', 'Query Grantable Roles'] +nav: +- Home: 'index.md' +- 'Iam Policies': + - 'Lint Policy': 'iam-policies_lint-policy.md' + - 'Query Auditable Services': 'iam-policies_query-auditable-services.md' +- 'Locations': + - 'Workforce Pools Operations Get': 'locations_workforce-pools-operations-get.md' + - 'Workforce Pools Providers Keys Operations Get': 'locations_workforce-pools-providers-keys-operations-get.md' + - 'Workforce Pools Providers Operations Get': 'locations_workforce-pools-providers-operations-get.md' + - 'Workforce Pools Subjects Operations Get': 'locations_workforce-pools-subjects-operations-get.md' +- 'Organizations': + - 'Roles Create': 'organizations_roles-create.md' + - 'Roles Delete': 'organizations_roles-delete.md' + - 'Roles Get': 'organizations_roles-get.md' + - 'Roles List': 'organizations_roles-list.md' + - 'Roles Patch': 'organizations_roles-patch.md' + - 'Roles Undelete': 'organizations_roles-undelete.md' +- 'Permissions': + - 'Query Testable Permissions': 'permissions_query-testable-permissions.md' +- 'Projects': + - 'Locations Workload Identity Pools Create': 'projects_locations-workload-identity-pools-create.md' + - 'Locations Workload Identity Pools Delete': 'projects_locations-workload-identity-pools-delete.md' + - 'Locations Workload Identity Pools Get': 'projects_locations-workload-identity-pools-get.md' + - 'Locations Workload Identity Pools List': 'projects_locations-workload-identity-pools-list.md' + - 'Locations Workload Identity Pools Operations Get': 'projects_locations-workload-identity-pools-operations-get.md' + - 'Locations Workload Identity Pools Patch': 'projects_locations-workload-identity-pools-patch.md' + - 'Locations Workload Identity Pools Providers Create': 'projects_locations-workload-identity-pools-providers-create.md' + - 'Locations Workload Identity Pools Providers Delete': 'projects_locations-workload-identity-pools-providers-delete.md' + - 'Locations Workload Identity Pools Providers Get': 'projects_locations-workload-identity-pools-providers-get.md' + - 'Locations Workload Identity Pools Providers Keys Operations Get': 'projects_locations-workload-identity-pools-providers-keys-operations-get.md' + - 'Locations Workload Identity Pools Providers List': 'projects_locations-workload-identity-pools-providers-list.md' + - 'Locations Workload Identity Pools Providers Operations Get': 'projects_locations-workload-identity-pools-providers-operations-get.md' + - 'Locations Workload Identity Pools Providers Patch': 'projects_locations-workload-identity-pools-providers-patch.md' + - 'Locations Workload Identity Pools Providers Undelete': 'projects_locations-workload-identity-pools-providers-undelete.md' + - 'Locations Workload Identity Pools Undelete': 'projects_locations-workload-identity-pools-undelete.md' + - 'Roles Create': 'projects_roles-create.md' + - 'Roles Delete': 'projects_roles-delete.md' + - 'Roles Get': 'projects_roles-get.md' + - 'Roles List': 'projects_roles-list.md' + - 'Roles Patch': 'projects_roles-patch.md' + - 'Roles Undelete': 'projects_roles-undelete.md' + - 'Service Accounts Create': 'projects_service-accounts-create.md' + - 'Service Accounts Delete': 'projects_service-accounts-delete.md' + - 'Service Accounts Disable': 'projects_service-accounts-disable.md' + - 'Service Accounts Enable': 'projects_service-accounts-enable.md' + - 'Service Accounts Get': 'projects_service-accounts-get.md' + - 'Service Accounts Get Iam Policy': 'projects_service-accounts-get-iam-policy.md' + - 'Service Accounts Keys Create': 'projects_service-accounts-keys-create.md' + - 'Service Accounts Keys Delete': 'projects_service-accounts-keys-delete.md' + - 'Service Accounts Keys Disable': 'projects_service-accounts-keys-disable.md' + - 'Service Accounts Keys Enable': 'projects_service-accounts-keys-enable.md' + - 'Service Accounts Keys Get': 'projects_service-accounts-keys-get.md' + - 'Service Accounts Keys List': 'projects_service-accounts-keys-list.md' + - 'Service Accounts Keys Upload': 'projects_service-accounts-keys-upload.md' + - 'Service Accounts List': 'projects_service-accounts-list.md' + - 'Service Accounts Patch': 'projects_service-accounts-patch.md' + - 'Service Accounts Set Iam Policy': 'projects_service-accounts-set-iam-policy.md' + - 'Service Accounts Sign Blob': 'projects_service-accounts-sign-blob.md' + - 'Service Accounts Sign Jwt': 'projects_service-accounts-sign-jwt.md' + - 'Service Accounts Test Iam Permissions': 'projects_service-accounts-test-iam-permissions.md' + - 'Service Accounts Undelete': 'projects_service-accounts-undelete.md' + - 'Service Accounts Update': 'projects_service-accounts-update.md' +- 'Roles': + - 'Get': 'roles_get.md' + - 'List': 'roles_list.md' + - 'Query Grantable Roles': 'roles_query-grantable-roles.md' theme: readthedocs diff --git a/gen/iam1-cli/src/client.rs b/gen/iam1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/iam1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/iam1-cli/src/main.rs b/gen/iam1-cli/src/main.rs index 16efe2a8e4..be0234f444 100644 --- a/gen/iam1-cli/src/main.rs +++ b/gen/iam1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_iam1::{api, Error, oauth2}; +use google_iam1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -277,6 +276,58 @@ where } } + async fn _locations_workforce_pools_providers_keys_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.locations().workforce_pools_providers_keys_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _locations_workforce_pools_providers_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.locations().workforce_pools_providers_operations_get(opt.value_of("name").unwrap_or("")); @@ -480,7 +531,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "etag" => { - call = call.etag(value.unwrap_or("")); + call = call.etag( value.map(|v| arg_from_str(v, err, "etag", "byte")).unwrap_or(b"hello world")); }, _ => { let mut found = false; @@ -591,13 +642,13 @@ where call = call.view(value.unwrap_or("")); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -692,7 +743,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1117,13 +1168,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1268,7 +1319,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1349,9 +1400,10 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "oidc.allowed-audiences" => Some(("oidc.allowedAudiences", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "oidc.issuer-uri" => Some(("oidc.issuerUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "saml.idp-metadata-xml" => Some(("saml.idpMetadataXml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "allowed-audiences", "attribute-condition", "attribute-mapping", "aws", "description", "disabled", "display-name", "issuer-uri", "name", "oidc", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "allowed-audiences", "attribute-condition", "attribute-mapping", "aws", "description", "disabled", "display-name", "idp-metadata-xml", "issuer-uri", "name", "oidc", "saml", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1578,13 +1630,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1717,9 +1769,10 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "oidc.allowed-audiences" => Some(("oidc.allowedAudiences", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "oidc.issuer-uri" => Some(("oidc.issuerUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "saml.idp-metadata-xml" => Some(("saml.idpMetadataXml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "allowed-audiences", "attribute-condition", "attribute-mapping", "aws", "description", "disabled", "display-name", "issuer-uri", "name", "oidc", "state"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "allowed-audiences", "attribute-condition", "attribute-mapping", "aws", "description", "disabled", "display-name", "idp-metadata-xml", "issuer-uri", "name", "oidc", "saml", "state"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1734,7 +1787,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2050,7 +2103,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "etag" => { - call = call.etag(value.unwrap_or("")); + call = call.etag( value.map(|v| arg_from_str(v, err, "etag", "byte")).unwrap_or(b"hello world")); }, _ => { let mut found = false; @@ -2161,13 +2214,13 @@ where call = call.view(value.unwrap_or("")); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2262,7 +2315,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2769,7 +2822,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3331,7 +3384,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4055,7 +4108,7 @@ where call = call.view(value.unwrap_or("")); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "parent" => { call = call.parent(value.unwrap_or("")); @@ -4064,7 +4117,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4225,6 +4278,9 @@ where ("workforce-pools-operations-get", Some(opt)) => { call_result = self._locations_workforce_pools_operations_get(opt, dry_run, &mut err).await; }, + ("workforce-pools-providers-keys-operations-get", Some(opt)) => { + call_result = self._locations_workforce_pools_providers_keys_operations_get(opt, dry_run, &mut err).await; + }, ("workforce-pools-providers-operations-get", Some(opt)) => { call_result = self._locations_workforce_pools_providers_operations_get(opt, dry_run, &mut err).await; }, @@ -4545,7 +4601,7 @@ async fn main() { ]), ]), - ("locations", "methods: 'workforce-pools-operations-get', 'workforce-pools-providers-operations-get' and 'workforce-pools-subjects-operations-get'", vec![ + ("locations", "methods: 'workforce-pools-operations-get', 'workforce-pools-providers-keys-operations-get', 'workforce-pools-providers-operations-get' and 'workforce-pools-subjects-operations-get'", vec![ ("workforce-pools-operations-get", Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), "Details at http://byron.github.io/google-apis-rs/google_iam1_cli/locations_workforce-pools-operations-get", @@ -4562,6 +4618,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("workforce-pools-providers-keys-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_iam1_cli/locations_workforce-pools-providers-keys-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5343,7 +5421,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5365,7 +5443,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5393,7 +5471,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5421,7 +5499,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5443,7 +5521,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5465,7 +5543,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5493,7 +5571,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account key in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}/keys/{key}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account key. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/-/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account key `projects/-/serviceAccounts/fake@example.com/keys/fake-key`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5510,12 +5588,12 @@ async fn main() { Some(false)), ]), ("service-accounts-keys-disable", - Some(r##"Disable a ServiceAccountKey. A disabled service account key can be enabled through EnableServiceAccountKey."##), + Some(r##"Disable a ServiceAccountKey. A disabled service account key can be re-enabled with EnableServiceAccountKey."##), "Details at http://byron.github.io/google-apis-rs/google_iam1_cli/projects_service-accounts-keys-disable", vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account key in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}/keys/{key}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account key. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/-/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account key `projects/-/serviceAccounts/fake@example.com/keys/fake-key`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5543,7 +5621,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account key in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}/keys/{key}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account key. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/-/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account key `projects/-/serviceAccounts/fake@example.com/keys/fake-key`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5571,7 +5649,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account key in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}/keys/{key}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account key. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}/keys/{KEY_ID}` * `projects/-/serviceAccounts/{UNIQUE_ID}/keys/{KEY_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account key `projects/-/serviceAccounts/fake@example.com/keys/fake-key`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5593,7 +5671,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID`, will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5610,12 +5688,12 @@ async fn main() { Some(false)), ]), ("service-accounts-keys-upload", - Some(r##"Creates a ServiceAccountKey, using a public key that you provide."##), + Some(r##"Uploads the public key portion of a key pair that you manage, and associates the public key with a ServiceAccount. After you upload the public key, you can use the private key from the key pair as a service account key."##), "Details at http://byron.github.io/google-apis-rs/google_iam1_cli/projects_service-accounts-keys-upload", vec![ (Some(r##"name"##), None, - Some(r##"The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"The resource name of the service account key. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5665,7 +5743,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to get the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), + Some(r##"The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5693,7 +5771,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5721,7 +5799,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Deprecated. [Migrate to Service Account Credentials API](https://cloud.google.com/iam/help/credentials/migrate-api). The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. Deprecated. [Migrate to Service Account Credentials API](https://cloud.google.com/iam/help/credentials/migrate-api). The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5749,7 +5827,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Deprecated. [Migrate to Service Account Credentials API](https://cloud.google.com/iam/help/credentials/migrate-api). The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account. The `ACCOUNT` value can be the `email` address or the `unique_id` of the service account."##), + Some(r##"Required. Deprecated. [Migrate to Service Account Credentials API](https://cloud.google.com/iam/help/credentials/migrate-api). The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5777,7 +5855,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5805,7 +5883,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The resource name of the service account in the following format: `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_UNIQUE_ID}`. Using `-` as a wildcard for the `PROJECT_ID` will infer the project from the account."##), + Some(r##"The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5833,7 +5911,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to get the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), + Some(r##"The resource name of the service account. Use one of the following formats: * `projects/{PROJECT_ID}/serviceAccounts/{EMAIL_ADDRESS}` * `projects/{PROJECT_ID}/serviceAccounts/{UNIQUE_ID}` As an alternative, you can use the `-` wildcard character instead of the project ID: * `projects/-/serviceAccounts/{EMAIL_ADDRESS}` * `projects/-/serviceAccounts/{UNIQUE_ID}` When possible, avoid using the `-` wildcard character, because it can cause response messages to contain misleading error codes. For example, if you try to access the service account `projects/-/serviceAccounts/fake@example.com`, which does not exist, the response contains an HTTP `403 Forbidden` error instead of a `404 Not Found` error."##), Some(true), Some(false)), @@ -5924,7 +6002,7 @@ async fn main() { let mut app = App::new("iam1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230112") .about("Manages identity and access control for Google Cloud Platform resources, including the creation of service accounts, which you can use to authenticate to Google and make API calls. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_iam1_cli") .arg(Arg::with_name("url") diff --git a/gen/iam1/Cargo.toml b/gen/iam1/Cargo.toml index c68360c0ff..38e64d931a 100644 --- a/gen/iam1/Cargo.toml +++ b/gen/iam1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-iam1" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Iam (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iam1" homepage = "https://cloud.google.com/iam/" -documentation = "https://docs.rs/google-iam1/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-iam1/5.0.2+20230112" license = "MIT" keywords = ["iam", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/iam1/README.md b/gen/iam1/README.md index d0a5674fb6..e063b88311 100644 --- a/gen/iam1/README.md +++ b/gen/iam1/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-iam1` library allows access to all features of the *Google Iam* service. -This documentation was generated from *Iam* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *iam:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Iam* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *iam:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Iam* *v1* API can be found at the [official documentation site](https://cloud.google.com/iam/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/Iam) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/Iam) ... * iam policies - * [*lint policy*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::IamPolicyLintPolicyCall) and [*query auditable services*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::IamPolicyQueryAuditableServiceCall) + * [*lint policy*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::IamPolicyLintPolicyCall) and [*query auditable services*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::IamPolicyQueryAuditableServiceCall) * locations - * [*workforce pools operations get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::LocationWorkforcePoolOperationGetCall), [*workforce pools providers keys operations get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::LocationWorkforcePoolProviderKeyOperationGetCall), [*workforce pools providers operations get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::LocationWorkforcePoolProviderOperationGetCall) and [*workforce pools subjects operations get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::LocationWorkforcePoolSubjectOperationGetCall) + * [*workforce pools operations get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::LocationWorkforcePoolOperationGetCall), [*workforce pools providers keys operations get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::LocationWorkforcePoolProviderKeyOperationGetCall), [*workforce pools providers operations get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::LocationWorkforcePoolProviderOperationGetCall) and [*workforce pools subjects operations get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::LocationWorkforcePoolSubjectOperationGetCall) * organizations - * [*roles create*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::OrganizationRoleCreateCall), [*roles delete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::OrganizationRoleDeleteCall), [*roles get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::OrganizationRoleGetCall), [*roles list*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::OrganizationRoleListCall), [*roles patch*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::OrganizationRolePatchCall) and [*roles undelete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::OrganizationRoleUndeleteCall) -* [permissions](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::Permission) - * [*query testable permissions*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::PermissionQueryTestablePermissionCall) + * [*roles create*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::OrganizationRoleCreateCall), [*roles delete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::OrganizationRoleDeleteCall), [*roles get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::OrganizationRoleGetCall), [*roles list*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::OrganizationRoleListCall), [*roles patch*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::OrganizationRolePatchCall) and [*roles undelete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::OrganizationRoleUndeleteCall) +* [permissions](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::Permission) + * [*query testable permissions*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::PermissionQueryTestablePermissionCall) * projects - * [*locations workload identity pools create*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolCreateCall), [*locations workload identity pools delete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolDeleteCall), [*locations workload identity pools get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolGetCall), [*locations workload identity pools list*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolListCall), [*locations workload identity pools operations get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolOperationGetCall), [*locations workload identity pools patch*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolPatchCall), [*locations workload identity pools providers create*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderCreateCall), [*locations workload identity pools providers delete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderDeleteCall), [*locations workload identity pools providers get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderGetCall), [*locations workload identity pools providers keys operations get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderKeyOperationGetCall), [*locations workload identity pools providers list*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderListCall), [*locations workload identity pools providers operations get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderOperationGetCall), [*locations workload identity pools providers patch*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderPatchCall), [*locations workload identity pools providers undelete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderUndeleteCall), [*locations workload identity pools undelete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolUndeleteCall), [*roles create*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectRoleCreateCall), [*roles delete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectRoleDeleteCall), [*roles get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectRoleGetCall), [*roles list*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectRoleListCall), [*roles patch*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectRolePatchCall), [*roles undelete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectRoleUndeleteCall), [*service accounts create*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountCreateCall), [*service accounts delete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountDeleteCall), [*service accounts disable*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountDisableCall), [*service accounts enable*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountEnableCall), [*service accounts get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountGetCall), [*service accounts get iam policy*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountGetIamPolicyCall), [*service accounts keys create*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountKeyCreateCall), [*service accounts keys delete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountKeyDeleteCall), [*service accounts keys disable*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountKeyDisableCall), [*service accounts keys enable*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountKeyEnableCall), [*service accounts keys get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountKeyGetCall), [*service accounts keys list*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountKeyListCall), [*service accounts keys upload*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountKeyUploadCall), [*service accounts list*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountListCall), [*service accounts patch*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountPatchCall), [*service accounts set iam policy*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountSetIamPolicyCall), [*service accounts sign blob*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountSignBlobCall), [*service accounts sign jwt*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountSignJwtCall), [*service accounts test iam permissions*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountTestIamPermissionCall), [*service accounts undelete*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountUndeleteCall) and [*service accounts update*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::ProjectServiceAccountUpdateCall) -* [roles](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::Role) - * [*get*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::RoleGetCall), [*list*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::RoleListCall) and [*query grantable roles*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/api::RoleQueryGrantableRoleCall) + * [*locations workload identity pools create*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolCreateCall), [*locations workload identity pools delete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolDeleteCall), [*locations workload identity pools get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolGetCall), [*locations workload identity pools list*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolListCall), [*locations workload identity pools operations get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolOperationGetCall), [*locations workload identity pools patch*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolPatchCall), [*locations workload identity pools providers create*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderCreateCall), [*locations workload identity pools providers delete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderDeleteCall), [*locations workload identity pools providers get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderGetCall), [*locations workload identity pools providers keys operations get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderKeyOperationGetCall), [*locations workload identity pools providers list*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderListCall), [*locations workload identity pools providers operations get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderOperationGetCall), [*locations workload identity pools providers patch*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderPatchCall), [*locations workload identity pools providers undelete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolProviderUndeleteCall), [*locations workload identity pools undelete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectLocationWorkloadIdentityPoolUndeleteCall), [*roles create*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectRoleCreateCall), [*roles delete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectRoleDeleteCall), [*roles get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectRoleGetCall), [*roles list*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectRoleListCall), [*roles patch*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectRolePatchCall), [*roles undelete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectRoleUndeleteCall), [*service accounts create*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountCreateCall), [*service accounts delete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountDeleteCall), [*service accounts disable*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountDisableCall), [*service accounts enable*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountEnableCall), [*service accounts get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountGetCall), [*service accounts get iam policy*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountGetIamPolicyCall), [*service accounts keys create*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountKeyCreateCall), [*service accounts keys delete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountKeyDeleteCall), [*service accounts keys disable*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountKeyDisableCall), [*service accounts keys enable*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountKeyEnableCall), [*service accounts keys get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountKeyGetCall), [*service accounts keys list*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountKeyListCall), [*service accounts keys upload*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountKeyUploadCall), [*service accounts list*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountListCall), [*service accounts patch*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountPatchCall), [*service accounts set iam policy*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountSetIamPolicyCall), [*service accounts sign blob*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountSignBlobCall), [*service accounts sign jwt*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountSignJwtCall), [*service accounts test iam permissions*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountTestIamPermissionCall), [*service accounts undelete*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountUndeleteCall) and [*service accounts update*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::ProjectServiceAccountUpdateCall) +* [roles](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::Role) + * [*get*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::RoleGetCall), [*list*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::RoleListCall) and [*query grantable roles*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/api::RoleQueryGrantableRoleCall) @@ -33,17 +33,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/Iam)** +* **[Hub](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/Iam)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::CallBuilder) -* **[Resources](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::CallBuilder) +* **[Resources](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Part)** + * **[Parts](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -149,17 +149,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -169,29 +169,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Delegate) to the -[Method Builder](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Delegate) to the +[Method Builder](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::RequestValue) and -[decodable](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::RequestValue) and +[decodable](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-iam1/5.0.2-beta-1+20230112/google_iam1/client::RequestValue) are moved +* [request values](https://docs.rs/google-iam1/5.0.2+20230112/google_iam1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/iam1/src/api.rs b/gen/iam1/src/api.rs index 4fa11ee538..09a6e69e58 100644 --- a/gen/iam1/src/api.rs +++ b/gen/iam1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Iam { Iam { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://iam.googleapis.com/".to_string(), _root_url: "https://iam.googleapis.com/".to_string(), } @@ -152,7 +152,7 @@ impl<'a, S> Iam { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/iam1/src/client.rs b/gen/iam1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/iam1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/iam1/src/lib.rs b/gen/iam1/src/lib.rs index 4bb7a789ce..2586acfbc6 100644 --- a/gen/iam1/src/lib.rs +++ b/gen/iam1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Iam* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *iam:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Iam* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *iam:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Iam* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/iam/). diff --git a/gen/iamcredentials1-cli/Cargo.toml b/gen/iamcredentials1-cli/Cargo.toml index 03632c62dc..6abc75e875 100644 --- a/gen/iamcredentials1-cli/Cargo.toml +++ b/gen/iamcredentials1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-iamcredentials1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with IAM Credentials (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iamcredentials1-cli" @@ -20,13 +20,13 @@ name = "iamcredentials1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-iamcredentials1] path = "../iamcredentials1" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/iamcredentials1-cli/README.md b/gen/iamcredentials1-cli/README.md index dec710fb55..cc9abacaf1 100644 --- a/gen/iamcredentials1-cli/README.md +++ b/gen/iamcredentials1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *IAM Credentials* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *IAM Credentials* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash iamcredentials1 [options] diff --git a/gen/iamcredentials1-cli/mkdocs.yml b/gen/iamcredentials1-cli/mkdocs.yml index d4ac01c8dd..3c30039252 100644 --- a/gen/iamcredentials1-cli/mkdocs.yml +++ b/gen/iamcredentials1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: IAM Credentials v4.0.1+20220225 +site_name: IAM Credentials v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-iamcredentials1-cli site_description: A complete library to interact with IAM Credentials (protocol v1) @@ -7,12 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/iamcredentials1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_service-accounts-generate-access-token.md', 'Projects', 'Service Accounts Generate Access Token'] -- ['projects_service-accounts-generate-id-token.md', 'Projects', 'Service Accounts Generate Id Token'] -- ['projects_service-accounts-sign-blob.md', 'Projects', 'Service Accounts Sign Blob'] -- ['projects_service-accounts-sign-jwt.md', 'Projects', 'Service Accounts Sign Jwt'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Service Accounts Generate Access Token': 'projects_service-accounts-generate-access-token.md' + - 'Service Accounts Generate Id Token': 'projects_service-accounts-generate-id-token.md' + - 'Service Accounts Sign Blob': 'projects_service-accounts-sign-blob.md' + - 'Service Accounts Sign Jwt': 'projects_service-accounts-sign-jwt.md' theme: readthedocs diff --git a/gen/iamcredentials1-cli/src/client.rs b/gen/iamcredentials1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/iamcredentials1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/iamcredentials1-cli/src/main.rs b/gen/iamcredentials1-cli/src/main.rs index 352d0acd64..fe9823bef1 100644 --- a/gen/iamcredentials1-cli/src/main.rs +++ b/gen/iamcredentials1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_iamcredentials1::{api, Error, oauth2}; +use google_iamcredentials1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -614,7 +613,7 @@ async fn main() { let mut app = App::new("iamcredentials1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("Creates short-lived credentials for impersonating IAM service accounts. To enable this API, you must enable the IAM API (iam.googleapis.com). ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_iamcredentials1_cli") .arg(Arg::with_name("url") diff --git a/gen/iamcredentials1/Cargo.toml b/gen/iamcredentials1/Cargo.toml index 5b007a9d32..f26591c1d1 100644 --- a/gen/iamcredentials1/Cargo.toml +++ b/gen/iamcredentials1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-iamcredentials1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with IAM Credentials (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iamcredentials1" homepage = "https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials" -documentation = "https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-iamcredentials1/5.0.2+20230113" license = "MIT" keywords = ["iamcredentials", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/iamcredentials1/README.md b/gen/iamcredentials1/README.md index 40e522e4b5..f4d797ffe8 100644 --- a/gen/iamcredentials1/README.md +++ b/gen/iamcredentials1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-iamcredentials1` library allows access to all features of the *Google IAM Credentials* service. -This documentation was generated from *IAM Credentials* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *iamcredentials:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *IAM Credentials* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *iamcredentials:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *IAM Credentials* *v1* API can be found at the [official documentation site](https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/IAMCredentials) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/IAMCredentials) ... * projects - * [*service accounts generate access token*](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/api::ProjectServiceAccountGenerateAccessTokenCall), [*service accounts generate id token*](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/api::ProjectServiceAccountGenerateIdTokenCall), [*service accounts sign blob*](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/api::ProjectServiceAccountSignBlobCall) and [*service accounts sign jwt*](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/api::ProjectServiceAccountSignJwtCall) + * [*service accounts generate access token*](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/api::ProjectServiceAccountGenerateAccessTokenCall), [*service accounts generate id token*](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/api::ProjectServiceAccountGenerateIdTokenCall), [*service accounts sign blob*](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/api::ProjectServiceAccountSignBlobCall) and [*service accounts sign jwt*](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/api::ProjectServiceAccountSignJwtCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/IAMCredentials)** +* **[Hub](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/IAMCredentials)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::CallBuilder) -* **[Resources](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::CallBuilder) +* **[Resources](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Part)** + * **[Parts](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Delegate) to the -[Method Builder](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Delegate) to the +[Method Builder](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::RequestValue) and -[decodable](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::RequestValue) and +[decodable](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-iamcredentials1/5.0.2-beta-1+20230113/google_iamcredentials1/client::RequestValue) are moved +* [request values](https://docs.rs/google-iamcredentials1/5.0.2+20230113/google_iamcredentials1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/iamcredentials1/src/api.rs b/gen/iamcredentials1/src/api.rs index 499fcde587..2b9533a7fc 100644 --- a/gen/iamcredentials1/src/api.rs +++ b/gen/iamcredentials1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> IAMCredentials { IAMCredentials { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://iamcredentials.googleapis.com/".to_string(), _root_url: "https://iamcredentials.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> IAMCredentials { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/iamcredentials1/src/client.rs b/gen/iamcredentials1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/iamcredentials1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/iamcredentials1/src/lib.rs b/gen/iamcredentials1/src/lib.rs index 92e6838be1..2477dc23cf 100644 --- a/gen/iamcredentials1/src/lib.rs +++ b/gen/iamcredentials1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *IAM Credentials* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *iamcredentials:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *IAM Credentials* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *iamcredentials:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *IAM Credentials* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials). diff --git a/gen/iap1-cli/Cargo.toml b/gen/iap1-cli/Cargo.toml index fb5ce187be..84a9e24c22 100644 --- a/gen/iap1-cli/Cargo.toml +++ b/gen/iap1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-iap1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud IAP (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iap1-cli" @@ -20,13 +20,13 @@ name = "iap1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-iap1] path = "../iap1" -version = "4.0.1+20220225" +version = "5.0.2+20230118" + diff --git a/gen/iap1-cli/README.md b/gen/iap1-cli/README.md index 0ee9a6f5dd..c9a595bdd7 100644 --- a/gen/iap1-cli/README.md +++ b/gen/iap1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud IAP* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud IAP* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash iap1 [options] @@ -44,6 +44,11 @@ iap1 [options] brands-identity-aware-proxy-clients-list [-p ]... [-o ] brands-identity-aware-proxy-clients-reset-secret (-r )... [-p ]... [-o ] brands-list [-p ]... [-o ] + iap-tunnel-locations-dest-groups-create (-r )... [-p ]... [-o ] + iap-tunnel-locations-dest-groups-delete [-p ]... [-o ] + iap-tunnel-locations-dest-groups-get [-p ]... [-o ] + iap-tunnel-locations-dest-groups-list [-p ]... [-o ] + iap-tunnel-locations-dest-groups-patch (-r )... [-p ]... [-o ] iap1 --help Configuration: diff --git a/gen/iap1-cli/mkdocs.yml b/gen/iap1-cli/mkdocs.yml index e794f0a961..91e4ec1cdb 100644 --- a/gen/iap1-cli/mkdocs.yml +++ b/gen/iap1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud IAP v4.0.1+20220225 +site_name: Cloud IAP v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-iap1-cli site_description: A complete library to interact with Cloud IAP (protocol v1) @@ -7,21 +7,28 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/iap1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['methods_get-iam-policy.md', 'Methods', 'Get Iam Policy'] -- ['methods_get-iap-settings.md', 'Methods', 'Get Iap Settings'] -- ['methods_set-iam-policy.md', 'Methods', 'Set Iam Policy'] -- ['methods_test-iam-permissions.md', 'Methods', 'Test Iam Permissions'] -- ['methods_update-iap-settings.md', 'Methods', 'Update Iap Settings'] -- ['projects_brands-create.md', 'Projects', 'Brands Create'] -- ['projects_brands-get.md', 'Projects', 'Brands Get'] -- ['projects_brands-identity-aware-proxy-clients-create.md', 'Projects', 'Brands Identity Aware Proxy Clients Create'] -- ['projects_brands-identity-aware-proxy-clients-delete.md', 'Projects', 'Brands Identity Aware Proxy Clients Delete'] -- ['projects_brands-identity-aware-proxy-clients-get.md', 'Projects', 'Brands Identity Aware Proxy Clients Get'] -- ['projects_brands-identity-aware-proxy-clients-list.md', 'Projects', 'Brands Identity Aware Proxy Clients List'] -- ['projects_brands-identity-aware-proxy-clients-reset-secret.md', 'Projects', 'Brands Identity Aware Proxy Clients Reset Secret'] -- ['projects_brands-list.md', 'Projects', 'Brands List'] +nav: +- Home: 'index.md' +- 'Methods': + - 'Get Iam Policy': 'methods_get-iam-policy.md' + - 'Get Iap Settings': 'methods_get-iap-settings.md' + - 'Set Iam Policy': 'methods_set-iam-policy.md' + - 'Test Iam Permissions': 'methods_test-iam-permissions.md' + - 'Update Iap Settings': 'methods_update-iap-settings.md' +- 'Projects': + - 'Brands Create': 'projects_brands-create.md' + - 'Brands Get': 'projects_brands-get.md' + - 'Brands Identity Aware Proxy Clients Create': 'projects_brands-identity-aware-proxy-clients-create.md' + - 'Brands Identity Aware Proxy Clients Delete': 'projects_brands-identity-aware-proxy-clients-delete.md' + - 'Brands Identity Aware Proxy Clients Get': 'projects_brands-identity-aware-proxy-clients-get.md' + - 'Brands Identity Aware Proxy Clients List': 'projects_brands-identity-aware-proxy-clients-list.md' + - 'Brands Identity Aware Proxy Clients Reset Secret': 'projects_brands-identity-aware-proxy-clients-reset-secret.md' + - 'Brands List': 'projects_brands-list.md' + - 'Iap Tunnel Locations Dest Groups Create': 'projects_iap-tunnel-locations-dest-groups-create.md' + - 'Iap Tunnel Locations Dest Groups Delete': 'projects_iap-tunnel-locations-dest-groups-delete.md' + - 'Iap Tunnel Locations Dest Groups Get': 'projects_iap-tunnel-locations-dest-groups-get.md' + - 'Iap Tunnel Locations Dest Groups List': 'projects_iap-tunnel-locations-dest-groups-list.md' + - 'Iap Tunnel Locations Dest Groups Patch': 'projects_iap-tunnel-locations-dest-groups-patch.md' theme: readthedocs diff --git a/gen/iap1-cli/src/client.rs b/gen/iap1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/iap1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/iap1-cli/src/main.rs b/gen/iap1-cli/src/main.rs index b5a588d29a..2c7b768ac6 100644 --- a/gen/iap1-cli/src/main.rs +++ b/gen/iap1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_iap1::{api, Error, oauth2}; +use google_iap1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -382,6 +381,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "access-settings.allowed-domains-settings.domains" => Some(("accessSettings.allowedDomainsSettings.domains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "access-settings.allowed-domains-settings.enable" => Some(("accessSettings.allowedDomainsSettings.enable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "access-settings.cors-settings.allow-http-options" => Some(("accessSettings.corsSettings.allowHttpOptions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "access-settings.gcip-settings.login-page-uri" => Some(("accessSettings.gcipSettings.loginPageUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access-settings.gcip-settings.tenant-ids" => Some(("accessSettings.gcipSettings.tenantIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -400,11 +401,15 @@ where "access-settings.reauth-settings.policy-type" => Some(("accessSettings.reauthSettings.policyType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "application-settings.access-denied-page-settings.access-denied-page-uri" => Some(("applicationSettings.accessDeniedPageSettings.accessDeniedPageUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "application-settings.access-denied-page-settings.generate-troubleshooting-uri" => Some(("applicationSettings.accessDeniedPageSettings.generateTroubleshootingUri", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "application-settings.access-denied-page-settings.remediation-token-generation-enabled" => Some(("applicationSettings.accessDeniedPageSettings.remediationTokenGenerationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "application-settings.attribute-propagation-settings.enable" => Some(("applicationSettings.attributePropagationSettings.enable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "application-settings.attribute-propagation-settings.expression" => Some(("applicationSettings.attributePropagationSettings.expression", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "application-settings.attribute-propagation-settings.output-credentials" => Some(("applicationSettings.attributePropagationSettings.outputCredentials", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "application-settings.cookie-domain" => Some(("applicationSettings.cookieDomain", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "application-settings.csm-settings.rctoken-aud" => Some(("applicationSettings.csmSettings.rctokenAud", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["access-denied-page-settings", "access-denied-page-uri", "access-settings", "allow-http-options", "application-settings", "cookie-domain", "cors-settings", "csm-settings", "gcip-settings", "generate-troubleshooting-uri", "iam-permission", "iam-service-name", "id", "labels", "login-hint", "login-page-uri", "max-age", "method", "name", "oauth-settings", "policy-delegation-settings", "policy-name", "policy-type", "rctoken-aud", "reauth-settings", "region", "resource", "service", "tenant-ids", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access-denied-page-settings", "access-denied-page-uri", "access-settings", "allow-http-options", "allowed-domains-settings", "application-settings", "attribute-propagation-settings", "cookie-domain", "cors-settings", "csm-settings", "domains", "enable", "expression", "gcip-settings", "generate-troubleshooting-uri", "iam-permission", "iam-service-name", "id", "labels", "login-hint", "login-page-uri", "max-age", "method", "name", "oauth-settings", "output-credentials", "policy-delegation-settings", "policy-name", "policy-type", "rctoken-aud", "reauth-settings", "region", "remediation-token-generation-enabled", "resource", "service", "tenant-ids", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -419,7 +424,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -809,7 +814,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -994,6 +999,351 @@ where } } + async fn _projects_iap_tunnel_locations_dest_groups_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "cidrs" => Some(("cidrs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "fqdns" => Some(("fqdns", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["cidrs", "fqdns", "name"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TunnelDestGroup = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().iap_tunnel_locations_dest_groups_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "tunnel-dest-group-id" => { + call = call.tunnel_dest_group_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["tunnel-dest-group-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_iap_tunnel_locations_dest_groups_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().iap_tunnel_locations_dest_groups_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_iap_tunnel_locations_dest_groups_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().iap_tunnel_locations_dest_groups_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_iap_tunnel_locations_dest_groups_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().iap_tunnel_locations_dest_groups_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_iap_tunnel_locations_dest_groups_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "cidrs" => Some(("cidrs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "fqdns" => Some(("fqdns", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["cidrs", "fqdns", "name"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TunnelDestGroup = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().iap_tunnel_locations_dest_groups_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -1048,6 +1398,21 @@ where ("brands-list", Some(opt)) => { call_result = self._projects_brands_list(opt, dry_run, &mut err).await; }, + ("iap-tunnel-locations-dest-groups-create", Some(opt)) => { + call_result = self._projects_iap_tunnel_locations_dest_groups_create(opt, dry_run, &mut err).await; + }, + ("iap-tunnel-locations-dest-groups-delete", Some(opt)) => { + call_result = self._projects_iap_tunnel_locations_dest_groups_delete(opt, dry_run, &mut err).await; + }, + ("iap-tunnel-locations-dest-groups-get", Some(opt)) => { + call_result = self._projects_iap_tunnel_locations_dest_groups_get(opt, dry_run, &mut err).await; + }, + ("iap-tunnel-locations-dest-groups-list", Some(opt)) => { + call_result = self._projects_iap_tunnel_locations_dest_groups_list(opt, dry_run, &mut err).await; + }, + ("iap-tunnel-locations-dest-groups-patch", Some(opt)) => { + call_result = self._projects_iap_tunnel_locations_dest_groups_patch(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -1134,7 +1499,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1184,7 +1549,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1212,7 +1577,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1264,7 +1629,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'brands-create', 'brands-get', 'brands-identity-aware-proxy-clients-create', 'brands-identity-aware-proxy-clients-delete', 'brands-identity-aware-proxy-clients-get', 'brands-identity-aware-proxy-clients-list', 'brands-identity-aware-proxy-clients-reset-secret' and 'brands-list'", vec![ + ("projects", "methods: 'brands-create', 'brands-get', 'brands-identity-aware-proxy-clients-create', 'brands-identity-aware-proxy-clients-delete', 'brands-identity-aware-proxy-clients-get', 'brands-identity-aware-proxy-clients-list', 'brands-identity-aware-proxy-clients-reset-secret', 'brands-list', 'iap-tunnel-locations-dest-groups-create', 'iap-tunnel-locations-dest-groups-delete', 'iap-tunnel-locations-dest-groups-get', 'iap-tunnel-locations-dest-groups-list' and 'iap-tunnel-locations-dest-groups-patch'", vec![ ("brands-create", Some(r##"Constructs a new OAuth brand for the project if one does not exist. The created brand is "internal only", meaning that OAuth clients created under it only accept requests from users who belong to the same Google Workspace organization as the project. The brand is created in an un-reviewed status. NOTE: The "internal only" status can be manually changed in the Google Cloud Console. Requires that a brand does not already exist for the project, and that the specified support email is owned by the caller."##), "Details at http://byron.github.io/google-apis-rs/google_iap1_cli/projects_brands-create", @@ -1453,6 +1818,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("iap-tunnel-locations-dest-groups-create", + Some(r##"Creates a new TunnelDestGroup."##), + "Details at http://byron.github.io/google-apis-rs/google_iap1_cli/projects_iap-tunnel-locations-dest-groups-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Google Cloud Project ID and location. In the following format: `projects/{project_number/id}/iap_tunnel/locations/{location}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("iap-tunnel-locations-dest-groups-delete", + Some(r##"Deletes a TunnelDestGroup."##), + "Details at http://byron.github.io/google-apis-rs/google_iap1_cli/projects_iap-tunnel-locations-dest-groups-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the TunnelDestGroup to delete. In the following format: `projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("iap-tunnel-locations-dest-groups-get", + Some(r##"Retrieves an existing TunnelDestGroup."##), + "Details at http://byron.github.io/google-apis-rs/google_iap1_cli/projects_iap-tunnel-locations-dest-groups-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the TunnelDestGroup to be fetched. In the following format: `projects/{project_number/id}/iap_tunnel/locations/{location}/destGroups/{dest_group}`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("iap-tunnel-locations-dest-groups-list", + Some(r##"Lists the existing TunnelDestGroups. To group across all locations, use a `-` as the location ID. For example: `/v1/projects/123/iap_tunnel/locations/-/destGroups`"##), + "Details at http://byron.github.io/google-apis-rs/google_iap1_cli/projects_iap-tunnel-locations-dest-groups-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Google Cloud Project ID and location. In the following format: `projects/{project_number/id}/iap_tunnel/locations/{location}`. A `-` can be used for the location to group across all locations."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("iap-tunnel-locations-dest-groups-patch", + Some(r##"Updates a TunnelDestGroup."##), + "Details at http://byron.github.io/google-apis-rs/google_iap1_cli/projects_iap-tunnel-locations-dest-groups-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Immutable. Identifier for the TunnelDestGroup. Must be unique within the project and contain only lower case letters (a-z) and dashes (-)."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1465,7 +1952,7 @@ async fn main() { let mut app = App::new("iap1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230118") .about("Controls access to cloud applications running on Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_iap1_cli") .arg(Arg::with_name("url") diff --git a/gen/iap1/Cargo.toml b/gen/iap1/Cargo.toml index 81f4b2a129..e614de0558 100644 --- a/gen/iap1/Cargo.toml +++ b/gen/iap1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-iap1" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud IAP (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iap1" homepage = "https://cloud.google.com/iap" -documentation = "https://docs.rs/google-iap1/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-iap1/5.0.2+20230118" license = "MIT" keywords = ["iap", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/iap1/README.md b/gen/iap1/README.md index 16eafcf153..3a601e44a6 100644 --- a/gen/iap1/README.md +++ b/gen/iap1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-iap1` library allows access to all features of the *Google Cloud IAP* service. -This documentation was generated from *Cloud IAP* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *iap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud IAP* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *iap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud IAP* *v1* API can be found at the [official documentation site](https://cloud.google.com/iap). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/CloudIAP) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/CloudIAP) ... * projects - * [*brands create*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandCreateCall), [*brands get*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandGetCall), [*brands identity aware proxy clients create*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientCreateCall), [*brands identity aware proxy clients delete*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientDeleteCall), [*brands identity aware proxy clients get*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientGetCall), [*brands identity aware proxy clients list*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientListCall), [*brands identity aware proxy clients reset secret*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientResetSecretCall), [*brands list*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectBrandListCall), [*iap_tunnel locations dest groups create*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupCreateCall), [*iap_tunnel locations dest groups delete*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupDeleteCall), [*iap_tunnel locations dest groups get*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupGetCall), [*iap_tunnel locations dest groups list*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupListCall) and [*iap_tunnel locations dest groups patch*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupPatchCall) + * [*brands create*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandCreateCall), [*brands get*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandGetCall), [*brands identity aware proxy clients create*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientCreateCall), [*brands identity aware proxy clients delete*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientDeleteCall), [*brands identity aware proxy clients get*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientGetCall), [*brands identity aware proxy clients list*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientListCall), [*brands identity aware proxy clients reset secret*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandIdentityAwareProxyClientResetSecretCall), [*brands list*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectBrandListCall), [*iap_tunnel locations dest groups create*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupCreateCall), [*iap_tunnel locations dest groups delete*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupDeleteCall), [*iap_tunnel locations dest groups get*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupGetCall), [*iap_tunnel locations dest groups list*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupListCall) and [*iap_tunnel locations dest groups patch*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::ProjectIapTunnelLocationDestGroupPatchCall) Other activities are ... -* [get iam policy](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::MethodGetIamPolicyCall) -* [get iap settings](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::MethodGetIapSettingCall) -* [set iam policy](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::MethodSetIamPolicyCall) -* [test iam permissions](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::MethodTestIamPermissionCall) -* [update iap settings](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/api::MethodUpdateIapSettingCall) +* [get iam policy](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::MethodGetIamPolicyCall) +* [get iap settings](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::MethodGetIapSettingCall) +* [set iam policy](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::MethodSetIamPolicyCall) +* [test iam permissions](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::MethodTestIamPermissionCall) +* [update iap settings](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/api::MethodUpdateIapSettingCall) @@ -30,17 +30,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/CloudIAP)** +* **[Hub](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/CloudIAP)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::CallBuilder) -* **[Resources](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::CallBuilder) +* **[Resources](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Part)** + * **[Parts](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Delegate) to the -[Method Builder](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Delegate) to the +[Method Builder](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::RequestValue) and -[decodable](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::RequestValue) and +[decodable](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-iap1/5.0.2-beta-1+20230118/google_iap1/client::RequestValue) are moved +* [request values](https://docs.rs/google-iap1/5.0.2+20230118/google_iap1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/iap1/src/api.rs b/gen/iap1/src/api.rs index cc4e50055d..d2438ebb01 100644 --- a/gen/iap1/src/api.rs +++ b/gen/iap1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudIAP { CloudIAP { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://iap.googleapis.com/".to_string(), _root_url: "https://iap.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> CloudIAP { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/iap1/src/client.rs b/gen/iap1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/iap1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/iap1/src/lib.rs b/gen/iap1/src/lib.rs index c0d04d9e3d..f8c2eb3937 100644 --- a/gen/iap1/src/lib.rs +++ b/gen/iap1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud IAP* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *iap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud IAP* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *iap:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud IAP* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/iap). diff --git a/gen/iap1_beta1-cli/Cargo.toml b/gen/iap1_beta1-cli/Cargo.toml index 1ad3da77fd..6ede0ff006 100644 --- a/gen/iap1_beta1-cli/Cargo.toml +++ b/gen/iap1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-iap1_beta1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud IAP (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iap1_beta1-cli" @@ -20,13 +20,13 @@ name = "iap1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-iap1_beta1] path = "../iap1_beta1" -version = "4.0.1+20220225" +version = "5.0.2+20230118" + diff --git a/gen/iap1_beta1-cli/README.md b/gen/iap1_beta1-cli/README.md index 1a2868cfa5..2a5cefd93b 100644 --- a/gen/iap1_beta1-cli/README.md +++ b/gen/iap1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud IAP* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud IAP* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash iap1-beta1 [options] diff --git a/gen/iap1_beta1-cli/mkdocs.yml b/gen/iap1_beta1-cli/mkdocs.yml index 18aac48f99..da1e64ce5c 100644 --- a/gen/iap1_beta1-cli/mkdocs.yml +++ b/gen/iap1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud IAP v4.0.1+20220225 +site_name: Cloud IAP v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-iap1_beta1-cli site_description: A complete library to interact with Cloud IAP (protocol v1beta1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/iap1_beta1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['methods_get-iam-policy.md', 'Methods', 'Get Iam Policy'] -- ['methods_set-iam-policy.md', 'Methods', 'Set Iam Policy'] -- ['methods_test-iam-permissions.md', 'Methods', 'Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Methods': + - 'Get Iam Policy': 'methods_get-iam-policy.md' + - 'Set Iam Policy': 'methods_set-iam-policy.md' + - 'Test Iam Permissions': 'methods_test-iam-permissions.md' theme: readthedocs diff --git a/gen/iap1_beta1-cli/src/client.rs b/gen/iap1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/iap1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/iap1_beta1-cli/src/main.rs b/gen/iap1_beta1-cli/src/main.rs index edcba45923..2ea25bb419 100644 --- a/gen/iap1_beta1-cli/src/main.rs +++ b/gen/iap1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_iap1_beta1::{api, Error, oauth2}; +use google_iap1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -409,7 +408,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -437,7 +436,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -465,7 +464,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -493,7 +492,7 @@ async fn main() { let mut app = App::new("iap1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230118") .about("Controls access to cloud applications running on Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_iap1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/iap1_beta1/Cargo.toml b/gen/iap1_beta1/Cargo.toml index 47149ab3e4..008105dd8e 100644 --- a/gen/iap1_beta1/Cargo.toml +++ b/gen/iap1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-iap1_beta1" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud IAP (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/iap1_beta1" homepage = "https://cloud.google.com/iap" -documentation = "https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-iap1_beta1/5.0.2+20230118" license = "MIT" keywords = ["iap", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/iap1_beta1/README.md b/gen/iap1_beta1/README.md index 15cd29bdc9..8868676052 100644 --- a/gen/iap1_beta1/README.md +++ b/gen/iap1_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-iap1_beta1` library allows access to all features of the *Google Cloud IAP* service. -This documentation was generated from *Cloud IAP* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *iap:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud IAP* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *iap:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud IAP* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/iap). # Features -Use the following functionality with ease from the central [hub](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/CloudIAP) ... +Use the following functionality with ease from the central [hub](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/CloudIAP) ... -* [get iam policy](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/api::MethodGetIamPolicyCall) -* [set iam policy](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/api::MethodSetIamPolicyCall) -* [test iam permissions](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/api::MethodTestIamPermissionCall) +* [get iam policy](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/api::MethodGetIamPolicyCall) +* [set iam policy](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/api::MethodSetIamPolicyCall) +* [test iam permissions](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/api::MethodTestIamPermissionCall) @@ -24,17 +24,17 @@ Use the following functionality with ease from the central [hub](https://docs.rs The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/CloudIAP)** +* **[Hub](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/CloudIAP)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-iap1_beta1/5.0.2-beta-1+20230118/google_iap1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-iap1_beta1/5.0.2+20230118/google_iap1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/iap1_beta1/src/api.rs b/gen/iap1_beta1/src/api.rs index 7292e514df..0182262b5e 100644 --- a/gen/iap1_beta1/src/api.rs +++ b/gen/iap1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudIAP { CloudIAP { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://iap.googleapis.com/".to_string(), _root_url: "https://iap.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CloudIAP { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/iap1_beta1/src/client.rs b/gen/iap1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/iap1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/iap1_beta1/src/lib.rs b/gen/iap1_beta1/src/lib.rs index 3c21fecd4f..45c7bd4880 100644 --- a/gen/iap1_beta1/src/lib.rs +++ b/gen/iap1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud IAP* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *iap:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud IAP* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *iap:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud IAP* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/iap). diff --git a/gen/ideahub1_beta-cli/Cargo.toml b/gen/ideahub1_beta-cli/Cargo.toml index c8b69af2c7..4f123b4f13 100644 --- a/gen/ideahub1_beta-cli/Cargo.toml +++ b/gen/ideahub1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-ideahub1_beta-cli" -version = "4.0.1+20220305" +version = "5.0.2+20220305" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ideahub (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ideahub1_beta-cli" @@ -20,13 +20,13 @@ name = "ideahub1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-ideahub1_beta] path = "../ideahub1_beta" -version = "4.0.1+20220305" +version = "5.0.2+20220305" + diff --git a/gen/ideahub1_beta-cli/README.md b/gen/ideahub1_beta-cli/README.md index b0b697721a..28dd926942 100644 --- a/gen/ideahub1_beta-cli/README.md +++ b/gen/ideahub1_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Ideahub* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Ideahub* API at revision *20220305*. The CLI is at version *5.0.2*. ```bash ideahub1-beta [options] diff --git a/gen/ideahub1_beta-cli/mkdocs.yml b/gen/ideahub1_beta-cli/mkdocs.yml index 3b070990d0..ed238558ae 100644 --- a/gen/ideahub1_beta-cli/mkdocs.yml +++ b/gen/ideahub1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Ideahub v4.0.1+20220305 +site_name: Ideahub v5.0.2+20220305 site_url: http://byron.github.io/google-apis-rs/google-ideahub1_beta-cli site_description: A complete library to interact with Ideahub (protocol v1beta) @@ -7,13 +7,14 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/ideahub1_beta-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['platforms_properties-idea-activities-create.md', 'Platforms', 'Properties Idea Activities Create'] -- ['platforms_properties-idea-states-patch.md', 'Platforms', 'Properties Idea States Patch'] -- ['platforms_properties-ideas-list.md', 'Platforms', 'Properties Ideas List'] -- ['platforms_properties-locales-list.md', 'Platforms', 'Properties Locales List'] -- ['platforms_properties-topic-states-patch.md', 'Platforms', 'Properties Topic States Patch'] +nav: +- Home: 'index.md' +- 'Platforms': + - 'Properties Idea Activities Create': 'platforms_properties-idea-activities-create.md' + - 'Properties Idea States Patch': 'platforms_properties-idea-states-patch.md' + - 'Properties Ideas List': 'platforms_properties-ideas-list.md' + - 'Properties Locales List': 'platforms_properties-locales-list.md' + - 'Properties Topic States Patch': 'platforms_properties-topic-states-patch.md' theme: readthedocs diff --git a/gen/ideahub1_beta-cli/src/client.rs b/gen/ideahub1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/ideahub1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/ideahub1_beta-cli/src/main.rs b/gen/ideahub1_beta-cli/src/main.rs index b10f49ce4d..7f082fd851 100644 --- a/gen/ideahub1_beta-cli/src/main.rs +++ b/gen/ideahub1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_ideahub1_beta::{api, Error, oauth2}; +use google_ideahub1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -179,7 +178,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -235,7 +234,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -297,7 +296,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -385,7 +384,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -667,7 +666,7 @@ async fn main() { let mut app = App::new("ideahub1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20220305") .about("This is an invitation-only API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_ideahub1_beta_cli") .arg(Arg::with_name("folder") diff --git a/gen/ideahub1_beta/Cargo.toml b/gen/ideahub1_beta/Cargo.toml index ec55fa08bf..5dc6e97d2b 100644 --- a/gen/ideahub1_beta/Cargo.toml +++ b/gen/ideahub1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-ideahub1_beta" -version = "5.0.2-beta-1+20220305" +version = "5.0.2+20220305" authors = ["Sebastian Thiel "] description = "A complete library to interact with Ideahub (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ideahub1_beta" homepage = "https://console.cloud.google.com/apis/library/ideahub.googleapis.com" -documentation = "https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305" +documentation = "https://docs.rs/google-ideahub1_beta/5.0.2+20220305" license = "MIT" keywords = ["ideahub", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/ideahub1_beta/README.md b/gen/ideahub1_beta/README.md index ca708959ac..e1557d9a9a 100644 --- a/gen/ideahub1_beta/README.md +++ b/gen/ideahub1_beta/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-ideahub1_beta` library allows access to all features of the *Google Ideahub* service. -This documentation was generated from *Ideahub* crate version *5.0.2-beta-1+20220305*, where *20220305* is the exact revision of the *ideahub:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Ideahub* crate version *5.0.2+20220305*, where *20220305* is the exact revision of the *ideahub:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Ideahub* *v1_beta* API can be found at the [official documentation site](https://console.cloud.google.com/apis/library/ideahub.googleapis.com). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/Ideahub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/Ideahub) ... * platforms - * [*properties idea activities create*](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/api::PlatformPropertyIdeaActivityCreateCall), [*properties idea states patch*](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/api::PlatformPropertyIdeaStatePatchCall), [*properties ideas list*](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/api::PlatformPropertyIdeaListCall), [*properties locales list*](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/api::PlatformPropertyLocaleListCall) and [*properties topic states patch*](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/api::PlatformPropertyTopicStatePatchCall) + * [*properties idea activities create*](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/api::PlatformPropertyIdeaActivityCreateCall), [*properties idea states patch*](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/api::PlatformPropertyIdeaStatePatchCall), [*properties ideas list*](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/api::PlatformPropertyIdeaListCall), [*properties locales list*](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/api::PlatformPropertyLocaleListCall) and [*properties topic states patch*](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/api::PlatformPropertyTopicStatePatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/Ideahub)** +* **[Hub](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/Ideahub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Part)** + * **[Parts](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-ideahub1_beta/5.0.2-beta-1+20220305/google_ideahub1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-ideahub1_beta/5.0.2+20220305/google_ideahub1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/ideahub1_beta/src/api.rs b/gen/ideahub1_beta/src/api.rs index 61e4697cd6..9ff74f864a 100644 --- a/gen/ideahub1_beta/src/api.rs +++ b/gen/ideahub1_beta/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> Ideahub { Ideahub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://ideahub.googleapis.com/".to_string(), _root_url: "https://ideahub.googleapis.com/".to_string(), } @@ -114,7 +114,7 @@ impl<'a, S> Ideahub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/ideahub1_beta/src/client.rs b/gen/ideahub1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/ideahub1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/ideahub1_beta/src/lib.rs b/gen/ideahub1_beta/src/lib.rs index 5adfa4f59a..a7ee6cdffe 100644 --- a/gen/ideahub1_beta/src/lib.rs +++ b/gen/ideahub1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Ideahub* crate version *5.0.2-beta-1+20220305*, where *20220305* is the exact revision of the *ideahub:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Ideahub* crate version *5.0.2+20220305*, where *20220305* is the exact revision of the *ideahub:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Ideahub* *v1_beta* API can be found at the //! [official documentation site](https://console.cloud.google.com/apis/library/ideahub.googleapis.com). diff --git a/gen/identitytoolkit3-cli/Cargo.toml b/gen/identitytoolkit3-cli/Cargo.toml index 7ef2dc187c..2c07a3f8b4 100644 --- a/gen/identitytoolkit3-cli/Cargo.toml +++ b/gen/identitytoolkit3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-identitytoolkit3-cli" -version = "4.0.1+20180723" +version = "5.0.2+20180723" authors = ["Sebastian Thiel "] description = "A complete library to interact with Identity Toolkit (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/identitytoolkit3-cli" @@ -20,13 +20,13 @@ name = "identitytoolkit3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-identitytoolkit3] path = "../identitytoolkit3" -version = "4.0.1+20180723" +version = "5.0.2+20180723" + diff --git a/gen/identitytoolkit3-cli/README.md b/gen/identitytoolkit3-cli/README.md index a3ee46b7ea..c5ad147d6c 100644 --- a/gen/identitytoolkit3-cli/README.md +++ b/gen/identitytoolkit3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Identity Toolkit* API at revision *20180723*. The CLI is at version *4.0.1*. +This documentation was generated from the *Identity Toolkit* API at revision *20180723*. The CLI is at version *5.0.2*. ```bash identitytoolkit3 [options] diff --git a/gen/identitytoolkit3-cli/mkdocs.yml b/gen/identitytoolkit3-cli/mkdocs.yml index 568c92b7c8..c52edd5e2d 100644 --- a/gen/identitytoolkit3-cli/mkdocs.yml +++ b/gen/identitytoolkit3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Identity Toolkit v4.0.1+20180723 +site_name: Identity Toolkit v5.0.2+20180723 site_url: http://byron.github.io/google-apis-rs/google-identitytoolkit3-cli site_description: A complete library to interact with Identity Toolkit (protocol v3) @@ -7,28 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/identitytoolkit3 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['relyingparty_create-auth-uri.md', 'Relyingparty', 'Create Auth Uri'] -- ['relyingparty_delete-account.md', 'Relyingparty', 'Delete Account'] -- ['relyingparty_download-account.md', 'Relyingparty', 'Download Account'] -- ['relyingparty_email-link-signin.md', 'Relyingparty', 'Email Link Signin'] -- ['relyingparty_get-account-info.md', 'Relyingparty', 'Get Account Info'] -- ['relyingparty_get-oob-confirmation-code.md', 'Relyingparty', 'Get Oob Confirmation Code'] -- ['relyingparty_get-project-config.md', 'Relyingparty', 'Get Project Config'] -- ['relyingparty_get-public-keys.md', 'Relyingparty', 'Get Public Keys'] -- ['relyingparty_get-recaptcha-param.md', 'Relyingparty', 'Get Recaptcha Param'] -- ['relyingparty_reset-password.md', 'Relyingparty', 'Reset Password'] -- ['relyingparty_send-verification-code.md', 'Relyingparty', 'Send Verification Code'] -- ['relyingparty_set-account-info.md', 'Relyingparty', 'Set Account Info'] -- ['relyingparty_set-project-config.md', 'Relyingparty', 'Set Project Config'] -- ['relyingparty_sign-out-user.md', 'Relyingparty', 'Sign Out User'] -- ['relyingparty_signup-new-user.md', 'Relyingparty', 'Signup New User'] -- ['relyingparty_upload-account.md', 'Relyingparty', 'Upload Account'] -- ['relyingparty_verify-assertion.md', 'Relyingparty', 'Verify Assertion'] -- ['relyingparty_verify-custom-token.md', 'Relyingparty', 'Verify Custom Token'] -- ['relyingparty_verify-password.md', 'Relyingparty', 'Verify Password'] -- ['relyingparty_verify-phone-number.md', 'Relyingparty', 'Verify Phone Number'] +nav: +- Home: 'index.md' +- 'Relyingparty': + - 'Create Auth Uri': 'relyingparty_create-auth-uri.md' + - 'Delete Account': 'relyingparty_delete-account.md' + - 'Download Account': 'relyingparty_download-account.md' + - 'Email Link Signin': 'relyingparty_email-link-signin.md' + - 'Get Account Info': 'relyingparty_get-account-info.md' + - 'Get Oob Confirmation Code': 'relyingparty_get-oob-confirmation-code.md' + - 'Get Project Config': 'relyingparty_get-project-config.md' + - 'Get Public Keys': 'relyingparty_get-public-keys.md' + - 'Get Recaptcha Param': 'relyingparty_get-recaptcha-param.md' + - 'Reset Password': 'relyingparty_reset-password.md' + - 'Send Verification Code': 'relyingparty_send-verification-code.md' + - 'Set Account Info': 'relyingparty_set-account-info.md' + - 'Set Project Config': 'relyingparty_set-project-config.md' + - 'Sign Out User': 'relyingparty_sign-out-user.md' + - 'Signup New User': 'relyingparty_signup-new-user.md' + - 'Upload Account': 'relyingparty_upload-account.md' + - 'Verify Assertion': 'relyingparty_verify-assertion.md' + - 'Verify Custom Token': 'relyingparty_verify-custom-token.md' + - 'Verify Password': 'relyingparty_verify-password.md' + - 'Verify Phone Number': 'relyingparty_verify-phone-number.md' theme: readthedocs diff --git a/gen/identitytoolkit3-cli/src/client.rs b/gen/identitytoolkit3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/identitytoolkit3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/identitytoolkit3-cli/src/main.rs b/gen/identitytoolkit3-cli/src/main.rs index e7d6229646..f7187bfb38 100644 --- a/gen/identitytoolkit3-cli/src/main.rs +++ b/gen/identitytoolkit3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_identitytoolkit3::{api, Error, oauth2}; +use google_identitytoolkit3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -2385,7 +2384,7 @@ async fn main() { let mut app = App::new("identitytoolkit3") .author("Sebastian Thiel ") - .version("4.0.1+20180723") + .version("5.0.2+20180723") .about("Help the third party sites to implement federated login.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_identitytoolkit3_cli") .arg(Arg::with_name("url") diff --git a/gen/identitytoolkit3/Cargo.toml b/gen/identitytoolkit3/Cargo.toml index 85e2e2fc7d..c1488f894b 100644 --- a/gen/identitytoolkit3/Cargo.toml +++ b/gen/identitytoolkit3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-identitytoolkit3" -version = "5.0.2-beta-1+20180723" +version = "5.0.2+20180723" authors = ["Sebastian Thiel "] description = "A complete library to interact with Identity Toolkit (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/identitytoolkit3" homepage = "https://developers.google.com/identity-toolkit/v3/" -documentation = "https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723" +documentation = "https://docs.rs/google-identitytoolkit3/5.0.2+20180723" license = "MIT" keywords = ["identitytoolkit", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/identitytoolkit3/README.md b/gen/identitytoolkit3/README.md index d0bde38958..4269c8ecd2 100644 --- a/gen/identitytoolkit3/README.md +++ b/gen/identitytoolkit3/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-identitytoolkit3` library allows access to all features of the *Google Identity Toolkit* service. -This documentation was generated from *Identity Toolkit* crate version *5.0.2-beta-1+20180723*, where *20180723* is the exact revision of the *identitytoolkit:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Identity Toolkit* crate version *5.0.2+20180723*, where *20180723* is the exact revision of the *identitytoolkit:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Identity Toolkit* *v3* API can be found at the [official documentation site](https://developers.google.com/identity-toolkit/v3/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/IdentityToolkit) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/IdentityToolkit) ... -* [relyingparty](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::Relyingparty) - * [*create auth uri*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyCreateAuthUriCall), [*delete account*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyDeleteAccountCall), [*download account*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyDownloadAccountCall), [*email link signin*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyEmailLinkSigninCall), [*get account info*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyGetAccountInfoCall), [*get oob confirmation code*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyGetOobConfirmationCodeCall), [*get project config*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyGetProjectConfigCall), [*get public keys*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyGetPublicKeyCall), [*get recaptcha param*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyGetRecaptchaParamCall), [*reset password*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyResetPasswordCall), [*send verification code*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartySendVerificationCodeCall), [*set account info*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartySetAccountInfoCall), [*set project config*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartySetProjectConfigCall), [*sign out user*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartySignOutUserCall), [*signup new user*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartySignupNewUserCall), [*upload account*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyUploadAccountCall), [*verify assertion*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyVerifyAssertionCall), [*verify custom token*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyVerifyCustomTokenCall), [*verify password*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyVerifyPasswordCall) and [*verify phone number*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/api::RelyingpartyVerifyPhoneNumberCall) +* [relyingparty](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::Relyingparty) + * [*create auth uri*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyCreateAuthUriCall), [*delete account*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyDeleteAccountCall), [*download account*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyDownloadAccountCall), [*email link signin*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyEmailLinkSigninCall), [*get account info*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyGetAccountInfoCall), [*get oob confirmation code*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyGetOobConfirmationCodeCall), [*get project config*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyGetProjectConfigCall), [*get public keys*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyGetPublicKeyCall), [*get recaptcha param*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyGetRecaptchaParamCall), [*reset password*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyResetPasswordCall), [*send verification code*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartySendVerificationCodeCall), [*set account info*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartySetAccountInfoCall), [*set project config*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartySetProjectConfigCall), [*sign out user*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartySignOutUserCall), [*signup new user*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartySignupNewUserCall), [*upload account*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyUploadAccountCall), [*verify assertion*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyVerifyAssertionCall), [*verify custom token*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyVerifyCustomTokenCall), [*verify password*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyVerifyPasswordCall) and [*verify phone number*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/api::RelyingpartyVerifyPhoneNumberCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/IdentityToolkit)** +* **[Hub](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/IdentityToolkit)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::CallBuilder) -* **[Resources](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::CallBuilder) +* **[Resources](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Part)** + * **[Parts](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Delegate) to the -[Method Builder](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Delegate) to the +[Method Builder](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::RequestValue) and -[decodable](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::RequestValue) and +[decodable](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-identitytoolkit3/5.0.2-beta-1+20180723/google_identitytoolkit3/client::RequestValue) are moved +* [request values](https://docs.rs/google-identitytoolkit3/5.0.2+20180723/google_identitytoolkit3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/identitytoolkit3/src/api.rs b/gen/identitytoolkit3/src/api.rs index 9e1a73cd2c..2b579a5a0e 100644 --- a/gen/identitytoolkit3/src/api.rs +++ b/gen/identitytoolkit3/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> IdentityToolkit { IdentityToolkit { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/identitytoolkit/v3/relyingparty/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> IdentityToolkit { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/identitytoolkit3/src/client.rs b/gen/identitytoolkit3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/identitytoolkit3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/identitytoolkit3/src/lib.rs b/gen/identitytoolkit3/src/lib.rs index eff41456e8..9dc6c18bca 100644 --- a/gen/identitytoolkit3/src/lib.rs +++ b/gen/identitytoolkit3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Identity Toolkit* crate version *5.0.2-beta-1+20180723*, where *20180723* is the exact revision of the *identitytoolkit:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Identity Toolkit* crate version *5.0.2+20180723*, where *20180723* is the exact revision of the *identitytoolkit:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Identity Toolkit* *v3* API can be found at the //! [official documentation site](https://developers.google.com/identity-toolkit/v3/). diff --git a/gen/ids1-cli/Cargo.toml b/gen/ids1-cli/Cargo.toml index b470fc6f4e..2df0708eec 100644 --- a/gen/ids1-cli/Cargo.toml +++ b/gen/ids1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-ids1-cli" -version = "4.0.1+20220221" +version = "5.0.2+20221113" authors = ["Sebastian Thiel "] description = "A complete library to interact with IDS (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ids1-cli" @@ -20,13 +20,13 @@ name = "ids1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-ids1] path = "../ids1" -version = "4.0.1+20220221" +version = "5.0.2+20221113" + diff --git a/gen/ids1-cli/README.md b/gen/ids1-cli/README.md index 0d9382ecb5..eb9665d040 100644 --- a/gen/ids1-cli/README.md +++ b/gen/ids1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *IDS* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *IDS* API at revision *20221113*. The CLI is at version *5.0.2*. ```bash ids1 [options] @@ -35,6 +35,7 @@ ids1 [options] locations-endpoints-get [-p ]... [-o ] locations-endpoints-get-iam-policy [-p ]... [-o ] locations-endpoints-list [-p ]... [-o ] + locations-endpoints-patch (-r )... [-p ]... [-o ] locations-endpoints-set-iam-policy (-r )... [-p ]... [-o ] locations-endpoints-test-iam-permissions (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] diff --git a/gen/ids1-cli/mkdocs.yml b/gen/ids1-cli/mkdocs.yml index 157809eeee..e46eb2c635 100644 --- a/gen/ids1-cli/mkdocs.yml +++ b/gen/ids1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: IDS v4.0.1+20220221 +site_name: IDS v5.0.2+20221113 site_url: http://byron.github.io/google-apis-rs/google-ids1-cli site_description: A complete library to interact with IDS (protocol v1) @@ -7,21 +7,23 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/ids1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-endpoints-create.md', 'Projects', 'Locations Endpoints Create'] -- ['projects_locations-endpoints-delete.md', 'Projects', 'Locations Endpoints Delete'] -- ['projects_locations-endpoints-get.md', 'Projects', 'Locations Endpoints Get'] -- ['projects_locations-endpoints-get-iam-policy.md', 'Projects', 'Locations Endpoints Get Iam Policy'] -- ['projects_locations-endpoints-list.md', 'Projects', 'Locations Endpoints List'] -- ['projects_locations-endpoints-set-iam-policy.md', 'Projects', 'Locations Endpoints Set Iam Policy'] -- ['projects_locations-endpoints-test-iam-permissions.md', 'Projects', 'Locations Endpoints Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Endpoints Create': 'projects_locations-endpoints-create.md' + - 'Locations Endpoints Delete': 'projects_locations-endpoints-delete.md' + - 'Locations Endpoints Get': 'projects_locations-endpoints-get.md' + - 'Locations Endpoints Get Iam Policy': 'projects_locations-endpoints-get-iam-policy.md' + - 'Locations Endpoints List': 'projects_locations-endpoints-list.md' + - 'Locations Endpoints Patch': 'projects_locations-endpoints-patch.md' + - 'Locations Endpoints Set Iam Policy': 'projects_locations-endpoints-set-iam-policy.md' + - 'Locations Endpoints Test Iam Permissions': 'projects_locations-endpoints-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/ids1-cli/src/client.rs b/gen/ids1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/ids1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/ids1-cli/src/main.rs b/gen/ids1-cli/src/main.rs index d1e6b7a128..79eb4ff4bc 100644 --- a/gen/ids1-cli/src/main.rs +++ b/gen/ids1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_ids1::{api, Error, oauth2}; +use google_ids1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -83,10 +82,11 @@ where "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "severity" => Some(("severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "threat-exceptions" => Some(("threatExceptions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "traffic-logs" => Some(("trafficLogs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "endpoint-forwarding-rule", "endpoint-ip", "labels", "name", "network", "severity", "state", "traffic-logs", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "endpoint-forwarding-rule", "endpoint-ip", "labels", "name", "network", "severity", "state", "threat-exceptions", "traffic-logs", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -268,7 +268,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -327,7 +327,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -382,6 +382,109 @@ where } } + async fn _projects_locations_endpoints_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "endpoint-forwarding-rule" => Some(("endpointForwardingRule", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "endpoint-ip" => Some(("endpointIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "severity" => Some(("severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "threat-exceptions" => Some(("threatExceptions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "traffic-logs" => Some(("trafficLogs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "endpoint-forwarding-rule", "endpoint-ip", "labels", "name", "network", "severity", "state", "threat-exceptions", "traffic-logs", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Endpoint = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_endpoints_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_endpoints_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -616,7 +719,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -866,7 +969,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -940,6 +1043,9 @@ where ("locations-endpoints-list", Some(opt)) => { call_result = self._projects_locations_endpoints_list(opt, dry_run, &mut err).await; }, + ("locations-endpoints-patch", Some(opt)) => { + call_result = self._projects_locations_endpoints_patch(opt, dry_run, &mut err).await; + }, ("locations-endpoints-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_endpoints_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -1043,7 +1149,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-endpoints-create', 'locations-endpoints-delete', 'locations-endpoints-get', 'locations-endpoints-get-iam-policy', 'locations-endpoints-list', 'locations-endpoints-set-iam-policy', 'locations-endpoints-test-iam-permissions', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-endpoints-create', 'locations-endpoints-delete', 'locations-endpoints-get', 'locations-endpoints-get-iam-policy', 'locations-endpoints-list', 'locations-endpoints-patch', 'locations-endpoints-set-iam-policy', 'locations-endpoints-test-iam-permissions', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ ("locations-endpoints-create", Some(r##"Creates a new Endpoint in a given project and location."##), "Details at http://byron.github.io/google-apis-rs/google_ids1_cli/projects_locations-endpoints-create", @@ -1122,7 +1228,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1154,6 +1260,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-endpoints-patch", + Some(r##"Updates the parameters of a single Endpoint."##), + "Details at http://byron.github.io/google-apis-rs/google_ids1_cli/projects_locations-endpoints-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The name of the endpoint."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1166,7 +1300,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1194,7 +1328,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1360,7 +1494,7 @@ async fn main() { let mut app = App::new("ids1") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20221113") .about("Cloud IDS (Cloud Intrusion Detection System) detects malware, spyware, command-and-control attacks, and other network-based threats. Its security efficacy is industry leading, built with Palo Alto Networks technologies. When you use this product, your organization name and consumption levels will be shared with Palo Alto Networks.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_ids1_cli") .arg(Arg::with_name("url") diff --git a/gen/ids1/Cargo.toml b/gen/ids1/Cargo.toml index 9fcb0150e6..bd244df327 100644 --- a/gen/ids1/Cargo.toml +++ b/gen/ids1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-ids1" -version = "5.0.2-beta-1+20221113" +version = "5.0.2+20221113" authors = ["Sebastian Thiel "] description = "A complete library to interact with IDS (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ids1" homepage = "https://cloud.google.com/" -documentation = "https://docs.rs/google-ids1/5.0.2-beta-1+20221113" +documentation = "https://docs.rs/google-ids1/5.0.2+20221113" license = "MIT" keywords = ["ids", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/ids1/README.md b/gen/ids1/README.md index 7008ec27b9..135d398b3a 100644 --- a/gen/ids1/README.md +++ b/gen/ids1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-ids1` library allows access to all features of the *Google IDS* service. -This documentation was generated from *IDS* crate version *5.0.2-beta-1+20221113*, where *20221113* is the exact revision of the *ids:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *IDS* crate version *5.0.2+20221113*, where *20221113* is the exact revision of the *ids:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *IDS* *v1* API can be found at the [official documentation site](https://cloud.google.com/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/IDS) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/IDS) ... * projects - * [*locations endpoints create*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointCreateCall), [*locations endpoints delete*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointDeleteCall), [*locations endpoints get*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointGetCall), [*locations endpoints get iam policy*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointGetIamPolicyCall), [*locations endpoints list*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointListCall), [*locations endpoints patch*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointPatchCall), [*locations endpoints set iam policy*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointSetIamPolicyCall), [*locations endpoints test iam permissions*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationEndpointTestIamPermissionCall), [*locations get*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/api::ProjectLocationOperationListCall) + * [*locations endpoints create*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointCreateCall), [*locations endpoints delete*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointDeleteCall), [*locations endpoints get*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointGetCall), [*locations endpoints get iam policy*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointGetIamPolicyCall), [*locations endpoints list*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointListCall), [*locations endpoints patch*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointPatchCall), [*locations endpoints set iam policy*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointSetIamPolicyCall), [*locations endpoints test iam permissions*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationEndpointTestIamPermissionCall), [*locations get*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/IDS)** +* **[Hub](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/IDS)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::CallBuilder) -* **[Resources](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::CallBuilder) +* **[Resources](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Part)** + * **[Parts](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Delegate) to the -[Method Builder](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Delegate) to the +[Method Builder](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::RequestValue) and -[decodable](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::RequestValue) and +[decodable](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-ids1/5.0.2-beta-1+20221113/google_ids1/client::RequestValue) are moved +* [request values](https://docs.rs/google-ids1/5.0.2+20221113/google_ids1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/ids1/src/api.rs b/gen/ids1/src/api.rs index 63b73cb303..a594e5dfde 100644 --- a/gen/ids1/src/api.rs +++ b/gen/ids1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> IDS { IDS { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://ids.googleapis.com/".to_string(), _root_url: "https://ids.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> IDS { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/ids1/src/client.rs b/gen/ids1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/ids1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/ids1/src/lib.rs b/gen/ids1/src/lib.rs index 01548402fb..e6342a076f 100644 --- a/gen/ids1/src/lib.rs +++ b/gen/ids1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *IDS* crate version *5.0.2-beta-1+20221113*, where *20221113* is the exact revision of the *ids:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *IDS* crate version *5.0.2+20221113*, where *20221113* is the exact revision of the *ids:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *IDS* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/). diff --git a/gen/indexing3-cli/Cargo.toml b/gen/indexing3-cli/Cargo.toml index 3f46a39f8d..cf93762550 100644 --- a/gen/indexing3-cli/Cargo.toml +++ b/gen/indexing3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-indexing3-cli" -version = "4.0.1+20220126" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Indexing (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/indexing3-cli" @@ -20,13 +20,13 @@ name = "indexing3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-indexing3] path = "../indexing3" -version = "4.0.1+20220126" +version = "5.0.2+20230117" + diff --git a/gen/indexing3-cli/README.md b/gen/indexing3-cli/README.md index e1ab4099ce..589fceda23 100644 --- a/gen/indexing3-cli/README.md +++ b/gen/indexing3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Indexing* API at revision *20220126*. The CLI is at version *4.0.1*. +This documentation was generated from the *Indexing* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash indexing3 [options] diff --git a/gen/indexing3-cli/mkdocs.yml b/gen/indexing3-cli/mkdocs.yml index d128adff3b..f294a01c57 100644 --- a/gen/indexing3-cli/mkdocs.yml +++ b/gen/indexing3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Indexing v4.0.1+20220126 +site_name: Indexing v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-indexing3-cli site_description: A complete library to interact with Indexing (protocol v3) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/indexing3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['url-notifications_get-metadata.md', 'Url Notifications', 'Get Metadata'] -- ['url-notifications_publish.md', 'Url Notifications', 'Publish'] +nav: +- Home: 'index.md' +- 'Url Notifications': + - 'Get Metadata': 'url-notifications_get-metadata.md' + - 'Publish': 'url-notifications_publish.md' theme: readthedocs diff --git a/gen/indexing3-cli/src/client.rs b/gen/indexing3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/indexing3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/indexing3-cli/src/main.rs b/gen/indexing3-cli/src/main.rs index 5a3deddb24..6c415cec59 100644 --- a/gen/indexing3-cli/src/main.rs +++ b/gen/indexing3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_indexing3::{api, Error, oauth2}; +use google_indexing3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -331,7 +330,7 @@ async fn main() { let mut app = App::new("indexing3") .author("Sebastian Thiel ") - .version("4.0.1+20220126") + .version("5.0.2+20230117") .about("Notifies Google when your web pages change.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_indexing3_cli") .arg(Arg::with_name("url") diff --git a/gen/indexing3/Cargo.toml b/gen/indexing3/Cargo.toml index 1b50c93477..dc7b46b2c9 100644 --- a/gen/indexing3/Cargo.toml +++ b/gen/indexing3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-indexing3" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Indexing (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/indexing3" homepage = "https://developers.google.com/search/apis/indexing-api/" -documentation = "https://docs.rs/google-indexing3/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-indexing3/5.0.2+20230117" license = "MIT" keywords = ["indexing", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/indexing3/README.md b/gen/indexing3/README.md index 4718b48682..d53030388e 100644 --- a/gen/indexing3/README.md +++ b/gen/indexing3/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-indexing3` library allows access to all features of the *Google Indexing* service. -This documentation was generated from *Indexing* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *indexing:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Indexing* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *indexing:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Indexing* *v3* API can be found at the [official documentation site](https://developers.google.com/search/apis/indexing-api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/Indexing) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/Indexing) ... -* [url notifications](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/api::UrlNotification) - * [*get metadata*](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/api::UrlNotificationGetMetadataCall) and [*publish*](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/api::UrlNotificationPublishCall) +* [url notifications](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/api::UrlNotification) + * [*get metadata*](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/api::UrlNotificationGetMetadataCall) and [*publish*](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/api::UrlNotificationPublishCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/Indexing)** +* **[Hub](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/Indexing)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::CallBuilder) -* **[Resources](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::CallBuilder) +* **[Resources](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Part)** + * **[Parts](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Delegate) to the -[Method Builder](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Delegate) to the +[Method Builder](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::RequestValue) and -[decodable](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::RequestValue) and +[decodable](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-indexing3/5.0.2-beta-1+20230117/google_indexing3/client::RequestValue) are moved +* [request values](https://docs.rs/google-indexing3/5.0.2+20230117/google_indexing3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/indexing3/src/api.rs b/gen/indexing3/src/api.rs index 19e90bf1e5..561171431a 100644 --- a/gen/indexing3/src/api.rs +++ b/gen/indexing3/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> Indexing { Indexing { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://indexing.googleapis.com/".to_string(), _root_url: "https://indexing.googleapis.com/".to_string(), } @@ -131,7 +131,7 @@ impl<'a, S> Indexing { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/indexing3/src/client.rs b/gen/indexing3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/indexing3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/indexing3/src/lib.rs b/gen/indexing3/src/lib.rs index 8f854d3f65..38e0913ba4 100644 --- a/gen/indexing3/src/lib.rs +++ b/gen/indexing3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Indexing* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *indexing:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Indexing* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *indexing:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Indexing* *v3* API can be found at the //! [official documentation site](https://developers.google.com/search/apis/indexing-api/). diff --git a/gen/jobs3-cli/Cargo.toml b/gen/jobs3-cli/Cargo.toml index e1ad43d18b..f3c97004d5 100644 --- a/gen/jobs3-cli/Cargo.toml +++ b/gen/jobs3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-jobs3-cli" -version = "4.0.1+20220211" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Talent Solution (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/jobs3-cli" @@ -20,13 +20,13 @@ name = "jobs3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-jobs3] path = "../jobs3" -version = "4.0.1+20220211" +version = "5.0.2+20230120" + diff --git a/gen/jobs3-cli/README.md b/gen/jobs3-cli/README.md index 49f11c06fa..4a3ce3f34f 100644 --- a/gen/jobs3-cli/README.md +++ b/gen/jobs3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Talent Solution* API at revision *20220211*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Talent Solution* API at revision *20230120*. The CLI is at version *5.0.2*. ```bash jobs3 [options] diff --git a/gen/jobs3-cli/mkdocs.yml b/gen/jobs3-cli/mkdocs.yml index 41e2f82cac..bfdb877f40 100644 --- a/gen/jobs3-cli/mkdocs.yml +++ b/gen/jobs3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Talent Solution v4.0.1+20220211 +site_name: Cloud Talent Solution v5.0.2+20230120 site_url: http://byron.github.io/google-apis-rs/google-jobs3-cli site_description: A complete library to interact with Cloud Talent Solution (protocol v3) @@ -7,23 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/jobs3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_client-events-create.md', 'Projects', 'Client Events Create'] -- ['projects_companies-create.md', 'Projects', 'Companies Create'] -- ['projects_companies-delete.md', 'Projects', 'Companies Delete'] -- ['projects_companies-get.md', 'Projects', 'Companies Get'] -- ['projects_companies-list.md', 'Projects', 'Companies List'] -- ['projects_companies-patch.md', 'Projects', 'Companies Patch'] -- ['projects_complete.md', 'Projects', 'Complete'] -- ['projects_jobs-batch-delete.md', 'Projects', 'Jobs Batch Delete'] -- ['projects_jobs-create.md', 'Projects', 'Jobs Create'] -- ['projects_jobs-delete.md', 'Projects', 'Jobs Delete'] -- ['projects_jobs-get.md', 'Projects', 'Jobs Get'] -- ['projects_jobs-list.md', 'Projects', 'Jobs List'] -- ['projects_jobs-patch.md', 'Projects', 'Jobs Patch'] -- ['projects_jobs-search.md', 'Projects', 'Jobs Search'] -- ['projects_jobs-search-for-alert.md', 'Projects', 'Jobs Search For Alert'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Client Events Create': 'projects_client-events-create.md' + - 'Companies Create': 'projects_companies-create.md' + - 'Companies Delete': 'projects_companies-delete.md' + - 'Companies Get': 'projects_companies-get.md' + - 'Companies List': 'projects_companies-list.md' + - 'Companies Patch': 'projects_companies-patch.md' + - 'Complete': 'projects_complete.md' + - 'Jobs Batch Delete': 'projects_jobs-batch-delete.md' + - 'Jobs Create': 'projects_jobs-create.md' + - 'Jobs Delete': 'projects_jobs-delete.md' + - 'Jobs Get': 'projects_jobs-get.md' + - 'Jobs List': 'projects_jobs-list.md' + - 'Jobs Patch': 'projects_jobs-patch.md' + - 'Jobs Search': 'projects_jobs-search.md' + - 'Jobs Search For Alert': 'projects_jobs-search-for-alert.md' theme: readthedocs diff --git a/gen/jobs3-cli/src/client.rs b/gen/jobs3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/jobs3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/jobs3-cli/src/main.rs b/gen/jobs3-cli/src/main.rs index 80517b14ee..5d4560e612 100644 --- a/gen/jobs3-cli/src/main.rs +++ b/gen/jobs3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_jobs3::{api, Error, oauth2}; +use google_jobs3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -364,13 +363,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "require-open-jobs" => { - call = call.require_open_jobs(arg_from_str(value.unwrap_or("false"), err, "require-open-jobs", "boolean")); + call = call.require_open_jobs( value.map(|v| arg_from_str(v, err, "require-open-jobs", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -547,7 +546,7 @@ where call = call.query(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-codes" => { call = call.add_language_codes(value.unwrap_or("")); @@ -931,7 +930,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "job-view" => { call = call.job_view(value.unwrap_or("")); @@ -1890,7 +1889,7 @@ async fn main() { let mut app = App::new("jobs3") .author("Sebastian Thiel ") - .version("4.0.1+20220211") + .version("5.0.2+20230120") .about("Cloud Talent Solution provides the capability to create, read, update, and delete job postings, as well as search jobs based on keywords and filters. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_jobs3_cli") .arg(Arg::with_name("url") diff --git a/gen/jobs3/Cargo.toml b/gen/jobs3/Cargo.toml index b3d240bcae..176d5229aa 100644 --- a/gen/jobs3/Cargo.toml +++ b/gen/jobs3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-jobs3" -version = "5.0.2-beta-1+20230120" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Talent Solution (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/jobs3" homepage = "https://cloud.google.com/talent-solution/job-search/docs/" -documentation = "https://docs.rs/google-jobs3/5.0.2-beta-1+20230120" +documentation = "https://docs.rs/google-jobs3/5.0.2+20230120" license = "MIT" keywords = ["jobs", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/jobs3/README.md b/gen/jobs3/README.md index b7efa49c43..c492cb3931 100644 --- a/gen/jobs3/README.md +++ b/gen/jobs3/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-jobs3` library allows access to all features of the *Google Cloud Talent Solution* service. -This documentation was generated from *Cloud Talent Solution* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *jobs:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Talent Solution* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *jobs:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Talent Solution* *v3* API can be found at the [official documentation site](https://cloud.google.com/talent-solution/job-search/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/CloudTalentSolution) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/CloudTalentSolution) ... * projects - * [*client events create*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectClientEventCreateCall), [*companies create*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectCompanyCreateCall), [*companies delete*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectCompanyDeleteCall), [*companies get*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectCompanyGetCall), [*companies list*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectCompanyListCall), [*companies patch*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectCompanyPatchCall), [*complete*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectCompleteCall), [*jobs batch delete*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobBatchDeleteCall), [*jobs create*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobCreateCall), [*jobs delete*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobDeleteCall), [*jobs get*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobGetCall), [*jobs list*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobListCall), [*jobs patch*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobPatchCall), [*jobs search*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobSearchCall) and [*jobs search for alert*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/api::ProjectJobSearchForAlertCall) + * [*client events create*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectClientEventCreateCall), [*companies create*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectCompanyCreateCall), [*companies delete*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectCompanyDeleteCall), [*companies get*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectCompanyGetCall), [*companies list*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectCompanyListCall), [*companies patch*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectCompanyPatchCall), [*complete*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectCompleteCall), [*jobs batch delete*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobBatchDeleteCall), [*jobs create*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobCreateCall), [*jobs delete*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobDeleteCall), [*jobs get*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobGetCall), [*jobs list*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobListCall), [*jobs patch*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobPatchCall), [*jobs search*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobSearchCall) and [*jobs search for alert*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/api::ProjectJobSearchForAlertCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/CloudTalentSolution)** +* **[Hub](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/CloudTalentSolution)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::CallBuilder) -* **[Resources](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::CallBuilder) +* **[Resources](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Part)** + * **[Parts](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Delegate) to the -[Method Builder](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Delegate) to the +[Method Builder](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::RequestValue) and -[decodable](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::RequestValue) and +[decodable](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-jobs3/5.0.2-beta-1+20230120/google_jobs3/client::RequestValue) are moved +* [request values](https://docs.rs/google-jobs3/5.0.2+20230120/google_jobs3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/jobs3/src/api.rs b/gen/jobs3/src/api.rs index 5f2e67f528..f119022e81 100644 --- a/gen/jobs3/src/api.rs +++ b/gen/jobs3/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> CloudTalentSolution { CloudTalentSolution { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://jobs.googleapis.com/".to_string(), _root_url: "https://jobs.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudTalentSolution { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/jobs3/src/client.rs b/gen/jobs3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/jobs3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/jobs3/src/lib.rs b/gen/jobs3/src/lib.rs index cfc0915498..58a43e6847 100644 --- a/gen/jobs3/src/lib.rs +++ b/gen/jobs3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Talent Solution* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *jobs:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Talent Solution* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *jobs:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Talent Solution* *v3* API can be found at the //! [official documentation site](https://cloud.google.com/talent-solution/job-search/docs/). diff --git a/gen/jobs4-cli/Cargo.toml b/gen/jobs4-cli/Cargo.toml index e83e8a9a9f..269fe06ff3 100644 --- a/gen/jobs4-cli/Cargo.toml +++ b/gen/jobs4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-jobs4-cli" -version = "4.0.1+20220211" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Talent Solution (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/jobs4-cli" @@ -20,13 +20,13 @@ name = "jobs4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-jobs4] path = "../jobs4" -version = "4.0.1+20220211" +version = "5.0.2+20230120" + diff --git a/gen/jobs4-cli/README.md b/gen/jobs4-cli/README.md index df0f8e22b0..f029134b6d 100644 --- a/gen/jobs4-cli/README.md +++ b/gen/jobs4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Talent Solution* API at revision *20220211*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Talent Solution* API at revision *20230120*. The CLI is at version *5.0.2*. ```bash jobs4 [options] diff --git a/gen/jobs4-cli/mkdocs.yml b/gen/jobs4-cli/mkdocs.yml index fffcbf79b3..a72d2333c3 100644 --- a/gen/jobs4-cli/mkdocs.yml +++ b/gen/jobs4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Talent Solution v4.0.1+20220211 +site_name: Cloud Talent Solution v5.0.2+20230120 site_url: http://byron.github.io/google-apis-rs/google-jobs4-cli site_description: A complete library to interact with Cloud Talent Solution (protocol v4) @@ -7,31 +7,32 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/jobs4-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_tenants-client-events-create.md', 'Projects', 'Tenants Client Events Create'] -- ['projects_tenants-companies-create.md', 'Projects', 'Tenants Companies Create'] -- ['projects_tenants-companies-delete.md', 'Projects', 'Tenants Companies Delete'] -- ['projects_tenants-companies-get.md', 'Projects', 'Tenants Companies Get'] -- ['projects_tenants-companies-list.md', 'Projects', 'Tenants Companies List'] -- ['projects_tenants-companies-patch.md', 'Projects', 'Tenants Companies Patch'] -- ['projects_tenants-complete-query.md', 'Projects', 'Tenants Complete Query'] -- ['projects_tenants-create.md', 'Projects', 'Tenants Create'] -- ['projects_tenants-delete.md', 'Projects', 'Tenants Delete'] -- ['projects_tenants-get.md', 'Projects', 'Tenants Get'] -- ['projects_tenants-jobs-batch-create.md', 'Projects', 'Tenants Jobs Batch Create'] -- ['projects_tenants-jobs-batch-delete.md', 'Projects', 'Tenants Jobs Batch Delete'] -- ['projects_tenants-jobs-batch-update.md', 'Projects', 'Tenants Jobs Batch Update'] -- ['projects_tenants-jobs-create.md', 'Projects', 'Tenants Jobs Create'] -- ['projects_tenants-jobs-delete.md', 'Projects', 'Tenants Jobs Delete'] -- ['projects_tenants-jobs-get.md', 'Projects', 'Tenants Jobs Get'] -- ['projects_tenants-jobs-list.md', 'Projects', 'Tenants Jobs List'] -- ['projects_tenants-jobs-patch.md', 'Projects', 'Tenants Jobs Patch'] -- ['projects_tenants-jobs-search.md', 'Projects', 'Tenants Jobs Search'] -- ['projects_tenants-jobs-search-for-alert.md', 'Projects', 'Tenants Jobs Search For Alert'] -- ['projects_tenants-list.md', 'Projects', 'Tenants List'] -- ['projects_tenants-patch.md', 'Projects', 'Tenants Patch'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Operations Get': 'projects_operations-get.md' + - 'Tenants Client Events Create': 'projects_tenants-client-events-create.md' + - 'Tenants Companies Create': 'projects_tenants-companies-create.md' + - 'Tenants Companies Delete': 'projects_tenants-companies-delete.md' + - 'Tenants Companies Get': 'projects_tenants-companies-get.md' + - 'Tenants Companies List': 'projects_tenants-companies-list.md' + - 'Tenants Companies Patch': 'projects_tenants-companies-patch.md' + - 'Tenants Complete Query': 'projects_tenants-complete-query.md' + - 'Tenants Create': 'projects_tenants-create.md' + - 'Tenants Delete': 'projects_tenants-delete.md' + - 'Tenants Get': 'projects_tenants-get.md' + - 'Tenants Jobs Batch Create': 'projects_tenants-jobs-batch-create.md' + - 'Tenants Jobs Batch Delete': 'projects_tenants-jobs-batch-delete.md' + - 'Tenants Jobs Batch Update': 'projects_tenants-jobs-batch-update.md' + - 'Tenants Jobs Create': 'projects_tenants-jobs-create.md' + - 'Tenants Jobs Delete': 'projects_tenants-jobs-delete.md' + - 'Tenants Jobs Get': 'projects_tenants-jobs-get.md' + - 'Tenants Jobs List': 'projects_tenants-jobs-list.md' + - 'Tenants Jobs Patch': 'projects_tenants-jobs-patch.md' + - 'Tenants Jobs Search': 'projects_tenants-jobs-search.md' + - 'Tenants Jobs Search For Alert': 'projects_tenants-jobs-search-for-alert.md' + - 'Tenants List': 'projects_tenants-list.md' + - 'Tenants Patch': 'projects_tenants-patch.md' theme: readthedocs diff --git a/gen/jobs4-cli/src/client.rs b/gen/jobs4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/jobs4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/jobs4-cli/src/main.rs b/gen/jobs4-cli/src/main.rs index 70f5a9dcae..63fce8e9fe 100644 --- a/gen/jobs4-cli/src/main.rs +++ b/gen/jobs4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_jobs4::{api, Error, oauth2}; +use google_jobs4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -415,13 +414,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "require-open-jobs" => { - call = call.require_open_jobs(arg_from_str(value.unwrap_or("false"), err, "require-open-jobs", "boolean")); + call = call.require_open_jobs( value.map(|v| arg_from_str(v, err, "require-open-jobs", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -536,7 +535,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -601,7 +600,7 @@ where call = call.query(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-codes" => { call = call.add_language_codes(value.unwrap_or("")); @@ -1341,7 +1340,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "job-view" => { call = call.job_view(value.unwrap_or("")); @@ -1478,7 +1477,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1803,7 +1802,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1893,7 +1892,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2682,7 +2681,7 @@ async fn main() { let mut app = App::new("jobs4") .author("Sebastian Thiel ") - .version("4.0.1+20220211") + .version("5.0.2+20230120") .about("Cloud Talent Solution provides the capability to create, read, update, and delete job postings, as well as search jobs based on keywords and filters. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_jobs4_cli") .arg(Arg::with_name("url") diff --git a/gen/jobs4/Cargo.toml b/gen/jobs4/Cargo.toml index 23f39cadb8..a21b4ccc8d 100644 --- a/gen/jobs4/Cargo.toml +++ b/gen/jobs4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-jobs4" -version = "5.0.2-beta-1+20230120" +version = "5.0.2+20230120" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Talent Solution (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/jobs4" homepage = "https://cloud.google.com/talent-solution/job-search/docs/" -documentation = "https://docs.rs/google-jobs4/5.0.2-beta-1+20230120" +documentation = "https://docs.rs/google-jobs4/5.0.2+20230120" license = "MIT" keywords = ["jobs", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/jobs4/README.md b/gen/jobs4/README.md index 2d31488455..3af50eca1b 100644 --- a/gen/jobs4/README.md +++ b/gen/jobs4/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-jobs4` library allows access to all features of the *Google Cloud Talent Solution* service. -This documentation was generated from *Cloud Talent Solution* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *jobs:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Talent Solution* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *jobs:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Talent Solution* *v4* API can be found at the [official documentation site](https://cloud.google.com/talent-solution/job-search/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/CloudTalentSolution) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/CloudTalentSolution) ... * projects - * [*operations get*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectOperationGetCall), [*tenants client events create*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantClientEventCreateCall), [*tenants companies create*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantCompanyCreateCall), [*tenants companies delete*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantCompanyDeleteCall), [*tenants companies get*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantCompanyGetCall), [*tenants companies list*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantCompanyListCall), [*tenants companies patch*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantCompanyPatchCall), [*tenants complete query*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantCompleteQueryCall), [*tenants create*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantCreateCall), [*tenants delete*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantDeleteCall), [*tenants get*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantGetCall), [*tenants jobs batch create*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobBatchCreateCall), [*tenants jobs batch delete*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobBatchDeleteCall), [*tenants jobs batch update*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobBatchUpdateCall), [*tenants jobs create*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobCreateCall), [*tenants jobs delete*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobDeleteCall), [*tenants jobs get*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobGetCall), [*tenants jobs list*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobListCall), [*tenants jobs patch*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobPatchCall), [*tenants jobs search*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobSearchCall), [*tenants jobs search for alert*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantJobSearchForAlertCall), [*tenants list*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantListCall) and [*tenants patch*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/api::ProjectTenantPatchCall) + * [*operations get*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectOperationGetCall), [*tenants client events create*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantClientEventCreateCall), [*tenants companies create*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantCompanyCreateCall), [*tenants companies delete*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantCompanyDeleteCall), [*tenants companies get*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantCompanyGetCall), [*tenants companies list*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantCompanyListCall), [*tenants companies patch*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantCompanyPatchCall), [*tenants complete query*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantCompleteQueryCall), [*tenants create*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantCreateCall), [*tenants delete*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantDeleteCall), [*tenants get*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantGetCall), [*tenants jobs batch create*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobBatchCreateCall), [*tenants jobs batch delete*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobBatchDeleteCall), [*tenants jobs batch update*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobBatchUpdateCall), [*tenants jobs create*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobCreateCall), [*tenants jobs delete*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobDeleteCall), [*tenants jobs get*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobGetCall), [*tenants jobs list*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobListCall), [*tenants jobs patch*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobPatchCall), [*tenants jobs search*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobSearchCall), [*tenants jobs search for alert*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantJobSearchForAlertCall), [*tenants list*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantListCall) and [*tenants patch*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/api::ProjectTenantPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/CloudTalentSolution)** +* **[Hub](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/CloudTalentSolution)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::CallBuilder) -* **[Resources](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::CallBuilder) +* **[Resources](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Part)** + * **[Parts](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Delegate) to the -[Method Builder](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Delegate) to the +[Method Builder](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::RequestValue) and -[decodable](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::RequestValue) and +[decodable](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-jobs4/5.0.2-beta-1+20230120/google_jobs4/client::RequestValue) are moved +* [request values](https://docs.rs/google-jobs4/5.0.2+20230120/google_jobs4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/jobs4/src/api.rs b/gen/jobs4/src/api.rs index 96d8b40c38..79e5c79aa6 100644 --- a/gen/jobs4/src/api.rs +++ b/gen/jobs4/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> CloudTalentSolution { CloudTalentSolution { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://jobs.googleapis.com/".to_string(), _root_url: "https://jobs.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudTalentSolution { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/jobs4/src/client.rs b/gen/jobs4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/jobs4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/jobs4/src/lib.rs b/gen/jobs4/src/lib.rs index 9d09384f65..a03a106141 100644 --- a/gen/jobs4/src/lib.rs +++ b/gen/jobs4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Talent Solution* crate version *5.0.2-beta-1+20230120*, where *20230120* is the exact revision of the *jobs:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Talent Solution* crate version *5.0.2+20230120*, where *20230120* is the exact revision of the *jobs:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Talent Solution* *v4* API can be found at the //! [official documentation site](https://cloud.google.com/talent-solution/job-search/docs/). diff --git a/gen/keep1-cli/Cargo.toml b/gen/keep1-cli/Cargo.toml index 98f477b9bd..74765c9743 100644 --- a/gen/keep1-cli/Cargo.toml +++ b/gen/keep1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-keep1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Keep (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/keep1-cli" @@ -20,13 +20,13 @@ name = "keep1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-keep1] path = "../keep1" -version = "4.0.1+20220301" +version = "5.0.2+20230117" + diff --git a/gen/keep1-cli/README.md b/gen/keep1-cli/README.md index b42d9972c8..2056f11589 100644 --- a/gen/keep1-cli/README.md +++ b/gen/keep1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Keep* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Keep* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash keep1 [options] diff --git a/gen/keep1-cli/mkdocs.yml b/gen/keep1-cli/mkdocs.yml index 91229820d1..15b3b7553d 100644 --- a/gen/keep1-cli/mkdocs.yml +++ b/gen/keep1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Keep v4.0.1+20220301 +site_name: Keep v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-keep1-cli site_description: A complete library to interact with Keep (protocol v1) @@ -7,15 +7,17 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/keep1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['media_download.md', 'Media', 'Download'] -- ['notes_create.md', 'Notes', 'Create'] -- ['notes_delete.md', 'Notes', 'Delete'] -- ['notes_get.md', 'Notes', 'Get'] -- ['notes_list.md', 'Notes', 'List'] -- ['notes_permissions-batch-create.md', 'Notes', 'Permissions Batch Create'] -- ['notes_permissions-batch-delete.md', 'Notes', 'Permissions Batch Delete'] +nav: +- Home: 'index.md' +- 'Media': + - 'Download': 'media_download.md' +- 'Notes': + - 'Create': 'notes_create.md' + - 'Delete': 'notes_delete.md' + - 'Get': 'notes_get.md' + - 'List': 'notes_list.md' + - 'Permissions Batch Create': 'notes_permissions-batch-create.md' + - 'Permissions Batch Delete': 'notes_permissions-batch-delete.md' theme: readthedocs diff --git a/gen/keep1-cli/src/client.rs b/gen/keep1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/keep1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/keep1-cli/src/main.rs b/gen/keep1-cli/src/main.rs index f700f40997..7c3428c18a 100644 --- a/gen/keep1-cli/src/main.rs +++ b/gen/keep1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_keep1::{api, Error, oauth2}; +use google_keep1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -322,7 +321,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -828,8 +827,8 @@ async fn main() { let mut app = App::new("keep1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") - .about("This API is an enterprise-only API used to create and manage the Keep notes within your domain, including resolving issues identified by CASB software.") + .version("5.0.2+20230117") + .about("The Google Keep API is used in an enterprise environment to manage Google Keep content and resolve issues identified by cloud security software.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_keep1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/keep1/Cargo.toml b/gen/keep1/Cargo.toml index b9a2d38741..45b68ca019 100644 --- a/gen/keep1/Cargo.toml +++ b/gen/keep1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-keep1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Keep (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/keep1" homepage = "https://developers.google.com/keep/api" -documentation = "https://docs.rs/google-keep1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-keep1/5.0.2+20230117" license = "MIT" keywords = ["keep", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/keep1/README.md b/gen/keep1/README.md index b2c7333242..559cd7fdc6 100644 --- a/gen/keep1/README.md +++ b/gen/keep1/README.md @@ -5,23 +5,23 @@ DO NOT EDIT ! --> The `google-keep1` library allows access to all features of the *Google Keep* service. -This documentation was generated from *Keep* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *keep:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Keep* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *keep:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Keep* *v1* API can be found at the [official documentation site](https://developers.google.com/keep/api). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/Keep) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/Keep) ... * media - * [*download*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::MediaDownloadCall) -* [notes](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::Note) - * [*create*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::NoteCreateCall), [*delete*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::NoteDeleteCall), [*get*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::NoteGetCall), [*list*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::NoteListCall), [*permissions batch create*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::NotePermissionBatchCreateCall) and [*permissions batch delete*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::NotePermissionBatchDeleteCall) + * [*download*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::MediaDownloadCall) +* [notes](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::Note) + * [*create*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::NoteCreateCall), [*delete*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::NoteDeleteCall), [*get*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::NoteGetCall), [*list*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::NoteListCall), [*permissions batch create*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::NotePermissionBatchCreateCall) and [*permissions batch delete*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::NotePermissionBatchDeleteCall) Download supported by ... -* [*download media*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/api::MediaDownloadCall) +* [*download media*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/api::MediaDownloadCall) @@ -29,17 +29,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/Keep)** +* **[Hub](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/Keep)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::CallBuilder) -* **[Resources](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::CallBuilder) +* **[Resources](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Part)** + * **[Parts](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Delegate) to the -[Method Builder](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Delegate) to the +[Method Builder](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::RequestValue) and -[decodable](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::RequestValue) and +[decodable](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-keep1/5.0.2-beta-1+20230117/google_keep1/client::RequestValue) are moved +* [request values](https://docs.rs/google-keep1/5.0.2+20230117/google_keep1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/keep1/src/api.rs b/gen/keep1/src/api.rs index c3c55ca7a1..e23456dec3 100644 --- a/gen/keep1/src/api.rs +++ b/gen/keep1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Keep { Keep { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://keep.googleapis.com/".to_string(), _root_url: "https://keep.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Keep { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/keep1/src/client.rs b/gen/keep1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/keep1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/keep1/src/lib.rs b/gen/keep1/src/lib.rs index 2e2db1055d..06eb81c270 100644 --- a/gen/keep1/src/lib.rs +++ b/gen/keep1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Keep* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *keep:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Keep* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *keep:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Keep* *v1* API can be found at the //! [official documentation site](https://developers.google.com/keep/api). diff --git a/gen/language1-cli/Cargo.toml b/gen/language1-cli/Cargo.toml index 2bc355a709..e2787b1e87 100644 --- a/gen/language1-cli/Cargo.toml +++ b/gen/language1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-language1-cli" -version = "4.0.1+20220218" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Natural Language (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/language1-cli" @@ -20,13 +20,13 @@ name = "language1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-language1] path = "../language1" -version = "4.0.1+20220218" +version = "5.0.2+20230121" + diff --git a/gen/language1-cli/README.md b/gen/language1-cli/README.md index c65d157286..0302e792ea 100644 --- a/gen/language1-cli/README.md +++ b/gen/language1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Natural Language* API at revision *20220218*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Natural Language* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash language1 [options] diff --git a/gen/language1-cli/mkdocs.yml b/gen/language1-cli/mkdocs.yml index 91c598efb6..245facd754 100644 --- a/gen/language1-cli/mkdocs.yml +++ b/gen/language1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Natural Language v4.0.1+20220218 +site_name: Cloud Natural Language v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-language1-cli site_description: A complete library to interact with Cloud Natural Language (protocol v1) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/language1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['documents_analyze-entities.md', 'Documents', 'Analyze Entities'] -- ['documents_analyze-entity-sentiment.md', 'Documents', 'Analyze Entity Sentiment'] -- ['documents_analyze-sentiment.md', 'Documents', 'Analyze Sentiment'] -- ['documents_analyze-syntax.md', 'Documents', 'Analyze Syntax'] -- ['documents_annotate-text.md', 'Documents', 'Annotate Text'] -- ['documents_classify-text.md', 'Documents', 'Classify Text'] +nav: +- Home: 'index.md' +- 'Documents': + - 'Analyze Entities': 'documents_analyze-entities.md' + - 'Analyze Entity Sentiment': 'documents_analyze-entity-sentiment.md' + - 'Analyze Sentiment': 'documents_analyze-sentiment.md' + - 'Analyze Syntax': 'documents_analyze-syntax.md' + - 'Annotate Text': 'documents_annotate-text.md' + - 'Classify Text': 'documents_classify-text.md' theme: readthedocs diff --git a/gen/language1-cli/src/client.rs b/gen/language1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/language1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/language1-cli/src/main.rs b/gen/language1-cli/src/main.rs index 27f894f7fa..099908fd10 100644 --- a/gen/language1-cli/src/main.rs +++ b/gen/language1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_language1::{api, Error, oauth2}; +use google_language1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -435,13 +434,14 @@ where "document.language" => Some(("document.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document.type" => Some(("document.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "encoding-type" => Some(("encodingType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "features.classification-model-options.v2-model.content-categories-version" => Some(("features.classificationModelOptions.v2Model.contentCategoriesVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "features.classify-text" => Some(("features.classifyText", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "features.extract-document-sentiment" => Some(("features.extractDocumentSentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "features.extract-entities" => Some(("features.extractEntities", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "features.extract-entity-sentiment" => Some(("features.extractEntitySentiment", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "features.extract-syntax" => Some(("features.extractSyntax", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["classify-text", "content", "document", "encoding-type", "extract-document-sentiment", "extract-entities", "extract-entity-sentiment", "extract-syntax", "features", "gcs-content-uri", "language", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["classification-model-options", "classify-text", "content", "content-categories-version", "document", "encoding-type", "extract-document-sentiment", "extract-entities", "extract-entity-sentiment", "extract-syntax", "features", "gcs-content-uri", "language", "type", "v2-model"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -524,12 +524,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "classification-model-options.v2-model.content-categories-version" => Some(("classificationModelOptions.v2Model.contentCategoriesVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document.content" => Some(("document.content", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document.gcs-content-uri" => Some(("document.gcsContentUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document.language" => Some(("document.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document.type" => Some(("document.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["content", "document", "gcs-content-uri", "language", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["classification-model-options", "content", "content-categories-version", "document", "gcs-content-uri", "language", "type", "v2-model"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -832,7 +833,7 @@ async fn main() { let mut app = App::new("language1") .author("Sebastian Thiel ") - .version("4.0.1+20220218") + .version("5.0.2+20230121") .about("Provides natural language understanding technologies, such as sentiment analysis, entity recognition, entity sentiment analysis, and other text annotations, to developers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_language1_cli") .arg(Arg::with_name("url") diff --git a/gen/language1/Cargo.toml b/gen/language1/Cargo.toml index 32df54e8e7..32fb1fe684 100644 --- a/gen/language1/Cargo.toml +++ b/gen/language1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-language1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Natural Language (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/language1" homepage = "https://cloud.google.com/natural-language/" -documentation = "https://docs.rs/google-language1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-language1/5.0.2+20230121" license = "MIT" keywords = ["language", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/language1/README.md b/gen/language1/README.md index 5e5faa0136..730c0104e8 100644 --- a/gen/language1/README.md +++ b/gen/language1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-language1` library allows access to all features of the *Google Cloud Natural Language* service. -This documentation was generated from *Cloud Natural Language* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *language:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Natural Language* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *language:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Natural Language* *v1* API can be found at the [official documentation site](https://cloud.google.com/natural-language/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/CloudNaturalLanguage) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-language1/5.0.2+20230121/google_language1/CloudNaturalLanguage) ... -* [documents](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/api::Document) - * [*analyze entities*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/api::DocumentAnalyzeEntityCall), [*analyze entity sentiment*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/api::DocumentAnalyzeEntitySentimentCall), [*analyze sentiment*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/api::DocumentAnalyzeSentimentCall), [*analyze syntax*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/api::DocumentAnalyzeSyntaxCall), [*annotate text*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/api::DocumentAnnotateTextCall) and [*classify text*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/api::DocumentClassifyTextCall) +* [documents](https://docs.rs/google-language1/5.0.2+20230121/google_language1/api::Document) + * [*analyze entities*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/api::DocumentAnalyzeEntityCall), [*analyze entity sentiment*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/api::DocumentAnalyzeEntitySentimentCall), [*analyze sentiment*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/api::DocumentAnalyzeSentimentCall), [*analyze syntax*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/api::DocumentAnalyzeSyntaxCall), [*annotate text*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/api::DocumentAnnotateTextCall) and [*classify text*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/api::DocumentClassifyTextCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/CloudNaturalLanguage)** +* **[Hub](https://docs.rs/google-language1/5.0.2+20230121/google_language1/CloudNaturalLanguage)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::CallBuilder) -* **[Resources](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::CallBuilder) +* **[Resources](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Part)** + * **[Parts](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Delegate) to the -[Method Builder](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Delegate) to the +[Method Builder](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::RequestValue) and -[decodable](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::RequestValue) and +[decodable](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-language1/5.0.2-beta-1+20230121/google_language1/client::RequestValue) are moved +* [request values](https://docs.rs/google-language1/5.0.2+20230121/google_language1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/language1/src/api.rs b/gen/language1/src/api.rs index 4b253d005f..4ccec64797 100644 --- a/gen/language1/src/api.rs +++ b/gen/language1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> CloudNaturalLanguage { CloudNaturalLanguage { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://language.googleapis.com/".to_string(), _root_url: "https://language.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudNaturalLanguage { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/language1/src/client.rs b/gen/language1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/language1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/language1/src/lib.rs b/gen/language1/src/lib.rs index 35aaaecec8..7e695c5631 100644 --- a/gen/language1/src/lib.rs +++ b/gen/language1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Natural Language* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *language:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Natural Language* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *language:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Natural Language* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/natural-language/). diff --git a/gen/language1_beta1-cli/Cargo.toml b/gen/language1_beta1-cli/Cargo.toml index cb46e0403c..f42c3d8773 100644 --- a/gen/language1_beta1-cli/Cargo.toml +++ b/gen/language1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-language1_beta1-cli" -version = "4.0.1+20220218" +version = "5.0.2+20220218" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Natural Language (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/language1_beta1-cli" @@ -20,13 +20,13 @@ name = "language1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-language1_beta1] path = "../language1_beta1" -version = "4.0.1+20220218" +version = "5.0.2+20220218" + diff --git a/gen/language1_beta1-cli/README.md b/gen/language1_beta1-cli/README.md index 1310892865..d18d99b0fb 100644 --- a/gen/language1_beta1-cli/README.md +++ b/gen/language1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Natural Language* API at revision *20220218*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Natural Language* API at revision *20220218*. The CLI is at version *5.0.2*. ```bash language1-beta1 [options] diff --git a/gen/language1_beta1-cli/mkdocs.yml b/gen/language1_beta1-cli/mkdocs.yml index 7189d29dcc..53ab69f92b 100644 --- a/gen/language1_beta1-cli/mkdocs.yml +++ b/gen/language1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Natural Language v4.0.1+20220218 +site_name: Cloud Natural Language v5.0.2+20220218 site_url: http://byron.github.io/google-apis-rs/google-language1_beta1-cli site_description: A complete library to interact with Cloud Natural Language (protocol v1beta1) @@ -7,12 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/language1_beta1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['documents_analyze-entities.md', 'Documents', 'Analyze Entities'] -- ['documents_analyze-sentiment.md', 'Documents', 'Analyze Sentiment'] -- ['documents_analyze-syntax.md', 'Documents', 'Analyze Syntax'] -- ['documents_annotate-text.md', 'Documents', 'Annotate Text'] +nav: +- Home: 'index.md' +- 'Documents': + - 'Analyze Entities': 'documents_analyze-entities.md' + - 'Analyze Sentiment': 'documents_analyze-sentiment.md' + - 'Analyze Syntax': 'documents_analyze-syntax.md' + - 'Annotate Text': 'documents_annotate-text.md' theme: readthedocs diff --git a/gen/language1_beta1-cli/src/client.rs b/gen/language1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/language1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/language1_beta1-cli/src/main.rs b/gen/language1_beta1-cli/src/main.rs index 0cbbb9e6bc..434a6bb8bc 100644 --- a/gen/language1_beta1-cli/src/main.rs +++ b/gen/language1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_language1_beta1::{api, Error, oauth2}; +use google_language1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -603,7 +602,7 @@ async fn main() { let mut app = App::new("language1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220218") + .version("5.0.2+20220218") .about("Provides natural language understanding technologies, such as sentiment analysis, entity recognition, entity sentiment analysis, and other text annotations, to developers.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_language1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/language1_beta1/Cargo.toml b/gen/language1_beta1/Cargo.toml index 259ea9d677..a244ec3ad5 100644 --- a/gen/language1_beta1/Cargo.toml +++ b/gen/language1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-language1_beta1" -version = "5.0.2-beta-1+20220218" +version = "5.0.2+20220218" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Natural Language (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/language1_beta1" homepage = "https://cloud.google.com/natural-language/" -documentation = "https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218" +documentation = "https://docs.rs/google-language1_beta1/5.0.2+20220218" license = "MIT" keywords = ["language", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/language1_beta1/README.md b/gen/language1_beta1/README.md index c8dd8e526e..c9e95c2a7c 100644 --- a/gen/language1_beta1/README.md +++ b/gen/language1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-language1_beta1` library allows access to all features of the *Google Cloud Natural Language* service. -This documentation was generated from *Cloud Natural Language* crate version *5.0.2-beta-1+20220218*, where *20220218* is the exact revision of the *language:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Natural Language* crate version *5.0.2+20220218*, where *20220218* is the exact revision of the *language:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Natural Language* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/natural-language/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/CloudNaturalLanguage) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/CloudNaturalLanguage) ... -* [documents](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/api::Document) - * [*analyze entities*](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/api::DocumentAnalyzeEntityCall), [*analyze sentiment*](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/api::DocumentAnalyzeSentimentCall), [*analyze syntax*](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/api::DocumentAnalyzeSyntaxCall) and [*annotate text*](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/api::DocumentAnnotateTextCall) +* [documents](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/api::Document) + * [*analyze entities*](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/api::DocumentAnalyzeEntityCall), [*analyze sentiment*](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/api::DocumentAnalyzeSentimentCall), [*analyze syntax*](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/api::DocumentAnalyzeSyntaxCall) and [*annotate text*](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/api::DocumentAnnotateTextCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/CloudNaturalLanguage)** +* **[Hub](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/CloudNaturalLanguage)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-language1_beta1/5.0.2-beta-1+20220218/google_language1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-language1_beta1/5.0.2+20220218/google_language1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/language1_beta1/src/api.rs b/gen/language1_beta1/src/api.rs index ba29fb407b..f2498119e3 100644 --- a/gen/language1_beta1/src/api.rs +++ b/gen/language1_beta1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> CloudNaturalLanguage { CloudNaturalLanguage { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://language.googleapis.com/".to_string(), _root_url: "https://language.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> CloudNaturalLanguage { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/language1_beta1/src/client.rs b/gen/language1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/language1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/language1_beta1/src/lib.rs b/gen/language1_beta1/src/lib.rs index 029ca78fe5..3ea03881ce 100644 --- a/gen/language1_beta1/src/lib.rs +++ b/gen/language1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Natural Language* crate version *5.0.2-beta-1+20220218*, where *20220218* is the exact revision of the *language:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Natural Language* crate version *5.0.2+20220218*, where *20220218* is the exact revision of the *language:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Natural Language* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/natural-language/). diff --git a/gen/libraryagent1-cli/Cargo.toml b/gen/libraryagent1-cli/Cargo.toml index 6ae9a7d96f..24c650a6b1 100644 --- a/gen/libraryagent1-cli/Cargo.toml +++ b/gen/libraryagent1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-libraryagent1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Libraryagent (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/libraryagent1-cli" @@ -20,13 +20,13 @@ name = "libraryagent1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-libraryagent1] path = "../libraryagent1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/libraryagent1-cli/README.md b/gen/libraryagent1-cli/README.md index 0e86ca16b5..95032315c4 100644 --- a/gen/libraryagent1-cli/README.md +++ b/gen/libraryagent1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Libraryagent* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Libraryagent* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash libraryagent1 [options] diff --git a/gen/libraryagent1-cli/mkdocs.yml b/gen/libraryagent1-cli/mkdocs.yml index 575d716de7..4c48389486 100644 --- a/gen/libraryagent1-cli/mkdocs.yml +++ b/gen/libraryagent1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Libraryagent v4.0.1+20220305 +site_name: Libraryagent v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-libraryagent1-cli site_description: A complete library to interact with Libraryagent (protocol v1) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/libraryagent1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['shelves_books-borrow.md', 'Shelves', 'Books Borrow'] -- ['shelves_books-get.md', 'Shelves', 'Books Get'] -- ['shelves_books-list.md', 'Shelves', 'Books List'] -- ['shelves_books-return.md', 'Shelves', 'Books Return'] -- ['shelves_get.md', 'Shelves', 'Get'] -- ['shelves_list.md', 'Shelves', 'List'] +nav: +- Home: 'index.md' +- 'Shelves': + - 'Books Borrow': 'shelves_books-borrow.md' + - 'Books Get': 'shelves_books-get.md' + - 'Books List': 'shelves_books-list.md' + - 'Books Return': 'shelves_books-return.md' + - 'Get': 'shelves_get.md' + - 'List': 'shelves_list.md' theme: readthedocs diff --git a/gen/libraryagent1-cli/src/client.rs b/gen/libraryagent1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/libraryagent1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/libraryagent1-cli/src/main.rs b/gen/libraryagent1-cli/src/main.rs index 42b76ffcf5..a4bd455873 100644 --- a/gen/libraryagent1-cli/src/main.rs +++ b/gen/libraryagent1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_libraryagent1::{api, Error, oauth2}; +use google_libraryagent1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -165,7 +164,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -328,7 +327,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -614,7 +613,7 @@ async fn main() { let mut app = App::new("libraryagent1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("A simple Google Example Library API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_libraryagent1_cli") .arg(Arg::with_name("url") diff --git a/gen/libraryagent1/Cargo.toml b/gen/libraryagent1/Cargo.toml index 5607fd9748..653df5f504 100644 --- a/gen/libraryagent1/Cargo.toml +++ b/gen/libraryagent1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-libraryagent1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Libraryagent (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/libraryagent1" homepage = "https://cloud.google.com/docs/quota" -documentation = "https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-libraryagent1/5.0.2+20230123" license = "MIT" keywords = ["libraryagent", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/libraryagent1/README.md b/gen/libraryagent1/README.md index 5ebe22f68d..1d0207cb17 100644 --- a/gen/libraryagent1/README.md +++ b/gen/libraryagent1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-libraryagent1` library allows access to all features of the *Google Libraryagent* service. -This documentation was generated from *Libraryagent* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *libraryagent:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Libraryagent* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *libraryagent:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Libraryagent* *v1* API can be found at the [official documentation site](https://cloud.google.com/docs/quota). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/Libraryagent) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/Libraryagent) ... * shelves - * [*books borrow*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/api::ShelfBookBorrowCall), [*books get*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/api::ShelfBookGetCall), [*books list*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/api::ShelfBookListCall), [*books return*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/api::ShelfBookReturnCall), [*get*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/api::ShelfGetCall) and [*list*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/api::ShelfListCall) + * [*books borrow*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/api::ShelfBookBorrowCall), [*books get*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/api::ShelfBookGetCall), [*books list*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/api::ShelfBookListCall), [*books return*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/api::ShelfBookReturnCall), [*get*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/api::ShelfGetCall) and [*list*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/api::ShelfListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/Libraryagent)** +* **[Hub](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/Libraryagent)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::CallBuilder) -* **[Resources](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::CallBuilder) +* **[Resources](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Part)** + * **[Parts](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Delegate) to the -[Method Builder](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Delegate) to the +[Method Builder](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::RequestValue) and -[decodable](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::RequestValue) and +[decodable](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-libraryagent1/5.0.2-beta-1+20230123/google_libraryagent1/client::RequestValue) are moved +* [request values](https://docs.rs/google-libraryagent1/5.0.2+20230123/google_libraryagent1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/libraryagent1/src/api.rs b/gen/libraryagent1/src/api.rs index f652c122fa..8043232fab 100644 --- a/gen/libraryagent1/src/api.rs +++ b/gen/libraryagent1/src/api.rs @@ -119,7 +119,7 @@ impl<'a, S> Libraryagent { Libraryagent { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://libraryagent.googleapis.com/".to_string(), _root_url: "https://libraryagent.googleapis.com/".to_string(), } @@ -130,7 +130,7 @@ impl<'a, S> Libraryagent { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/libraryagent1/src/client.rs b/gen/libraryagent1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/libraryagent1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/libraryagent1/src/lib.rs b/gen/libraryagent1/src/lib.rs index 175dc50ea2..c8f07fc09b 100644 --- a/gen/libraryagent1/src/lib.rs +++ b/gen/libraryagent1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Libraryagent* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *libraryagent:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Libraryagent* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *libraryagent:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Libraryagent* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/docs/quota). diff --git a/gen/licensing1-cli/Cargo.toml b/gen/licensing1-cli/Cargo.toml index 9ccc6b14a0..098f531b14 100644 --- a/gen/licensing1-cli/Cargo.toml +++ b/gen/licensing1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-licensing1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with licensing (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/licensing1-cli" @@ -20,13 +20,13 @@ name = "licensing1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-licensing1] path = "../licensing1" -version = "4.0.1+20220305" +version = "5.0.2+20230121" + diff --git a/gen/licensing1-cli/README.md b/gen/licensing1-cli/README.md index c94458ed38..12465e7680 100644 --- a/gen/licensing1-cli/README.md +++ b/gen/licensing1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *licensing* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *licensing* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash licensing1 [options] diff --git a/gen/licensing1-cli/mkdocs.yml b/gen/licensing1-cli/mkdocs.yml index b0d4fc346c..dce4607a9b 100644 --- a/gen/licensing1-cli/mkdocs.yml +++ b/gen/licensing1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: licensing v4.0.1+20220305 +site_name: licensing v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-licensing1-cli site_description: A complete library to interact with licensing (protocol v1) @@ -7,15 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/licensing1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['license-assignments_delete.md', 'License Assignments', 'Delete'] -- ['license-assignments_get.md', 'License Assignments', 'Get'] -- ['license-assignments_insert.md', 'License Assignments', 'Insert'] -- ['license-assignments_list-for-product.md', 'License Assignments', 'List For Product'] -- ['license-assignments_list-for-product-and-sku.md', 'License Assignments', 'List For Product And Sku'] -- ['license-assignments_patch.md', 'License Assignments', 'Patch'] -- ['license-assignments_update.md', 'License Assignments', 'Update'] +nav: +- Home: 'index.md' +- 'License Assignments': + - 'Delete': 'license-assignments_delete.md' + - 'Get': 'license-assignments_get.md' + - 'Insert': 'license-assignments_insert.md' + - 'List For Product': 'license-assignments_list-for-product.md' + - 'List For Product And Sku': 'license-assignments_list-for-product-and-sku.md' + - 'Patch': 'license-assignments_patch.md' + - 'Update': 'license-assignments_update.md' theme: readthedocs diff --git a/gen/licensing1-cli/src/client.rs b/gen/licensing1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/licensing1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/licensing1-cli/src/main.rs b/gen/licensing1-cli/src/main.rs index 0052a02bfd..ab2302a02e 100644 --- a/gen/licensing1-cli/src/main.rs +++ b/gen/licensing1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_licensing1::{api, Error, oauth2}; +use google_licensing1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -250,7 +249,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -309,7 +308,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -764,7 +763,7 @@ async fn main() { (Some(r##"customer-id"##), None, - Some(r##"Customer's `customerId`. A previous version of this API accepted the primary domain name as a value for this field. If the customer is suspended, the server returns an error."##), + Some(r##"The customer's unique ID as defined in the Admin console, such as `C00000000`. If the customer is suspended, the server returns an error."##), Some(true), Some(false)), @@ -798,7 +797,7 @@ async fn main() { (Some(r##"customer-id"##), None, - Some(r##"Customer's `customerId`. A previous version of this API accepted the primary domain name as a value for this field. If the customer is suspended, the server returns an error."##), + Some(r##"The customer's unique ID as defined in the Admin console, such as `C00000000`. If the customer is suspended, the server returns an error."##), Some(true), Some(false)), @@ -900,8 +899,8 @@ async fn main() { let mut app = App::new("licensing1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("The Google Enterprise License Manager API's allows you to license apps for all the users of a domain managed by you.") + .version("5.0.2+20230121") + .about("The Google Enterprise License Manager API lets you manage Google Workspace and related licenses for all users of a customer that you manage.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_licensing1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/licensing1/Cargo.toml b/gen/licensing1/Cargo.toml index 7a986d3d22..8aceba7147 100644 --- a/gen/licensing1/Cargo.toml +++ b/gen/licensing1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-licensing1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with licensing (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/licensing1" homepage = "https://developers.google.com/admin-sdk/licensing/" -documentation = "https://docs.rs/google-licensing1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-licensing1/5.0.2+20230121" license = "MIT" keywords = ["licensing", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/licensing1/README.md b/gen/licensing1/README.md index e8885f4eed..ac1ce1405e 100644 --- a/gen/licensing1/README.md +++ b/gen/licensing1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-licensing1` library allows access to all features of the *Google licensing* service. -This documentation was generated from *licensing* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *licensing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *licensing* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *licensing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *licensing* *v1* API can be found at the [official documentation site](https://developers.google.com/admin-sdk/licensing/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/Licensing) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/Licensing) ... -* [license assignments](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignment) - * [*delete*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignmentDeleteCall), [*get*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignmentGetCall), [*insert*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignmentInsertCall), [*list for product*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignmentListForProductCall), [*list for product and sku*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignmentListForProductAndSkuCall), [*patch*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignmentPatchCall) and [*update*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/api::LicenseAssignmentUpdateCall) +* [license assignments](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignment) + * [*delete*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignmentDeleteCall), [*get*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignmentGetCall), [*insert*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignmentInsertCall), [*list for product*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignmentListForProductCall), [*list for product and sku*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignmentListForProductAndSkuCall), [*patch*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignmentPatchCall) and [*update*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/api::LicenseAssignmentUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/Licensing)** +* **[Hub](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/Licensing)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::CallBuilder) -* **[Resources](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::CallBuilder) +* **[Resources](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Part)** + * **[Parts](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Delegate) to the -[Method Builder](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Delegate) to the +[Method Builder](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::RequestValue) and -[decodable](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::RequestValue) and +[decodable](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-licensing1/5.0.2-beta-1+20230121/google_licensing1/client::RequestValue) are moved +* [request values](https://docs.rs/google-licensing1/5.0.2+20230121/google_licensing1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/licensing1/src/api.rs b/gen/licensing1/src/api.rs index dd7bbc472a..c2203d5a67 100644 --- a/gen/licensing1/src/api.rs +++ b/gen/licensing1/src/api.rs @@ -121,7 +121,7 @@ impl<'a, S> Licensing { Licensing { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://licensing.googleapis.com/".to_string(), _root_url: "https://licensing.googleapis.com/".to_string(), } @@ -132,7 +132,7 @@ impl<'a, S> Licensing { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/licensing1/src/client.rs b/gen/licensing1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/licensing1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/licensing1/src/lib.rs b/gen/licensing1/src/lib.rs index 8cd9835828..81f1d28246 100644 --- a/gen/licensing1/src/lib.rs +++ b/gen/licensing1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *licensing* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *licensing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *licensing* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *licensing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *licensing* *v1* API can be found at the //! [official documentation site](https://developers.google.com/admin-sdk/licensing/). diff --git a/gen/lifesciences2_beta-cli/Cargo.toml b/gen/lifesciences2_beta-cli/Cargo.toml index b592733ee4..1adc1cd7c3 100644 --- a/gen/lifesciences2_beta-cli/Cargo.toml +++ b/gen/lifesciences2_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-lifesciences2_beta-cli" -version = "4.0.1+20220211" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Life Sciences (protocol v2beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/lifesciences2_beta-cli" @@ -20,13 +20,13 @@ name = "lifesciences2-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-lifesciences2_beta] path = "../lifesciences2_beta" -version = "4.0.1+20220211" +version = "5.0.2+20230106" + diff --git a/gen/lifesciences2_beta-cli/README.md b/gen/lifesciences2_beta-cli/README.md index 13cd0c2c04..242beadf98 100644 --- a/gen/lifesciences2_beta-cli/README.md +++ b/gen/lifesciences2_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Life Sciences* API at revision *20220211*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Life Sciences* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash lifesciences2-beta [options] diff --git a/gen/lifesciences2_beta-cli/mkdocs.yml b/gen/lifesciences2_beta-cli/mkdocs.yml index cbdacc1400..3dd85eb025 100644 --- a/gen/lifesciences2_beta-cli/mkdocs.yml +++ b/gen/lifesciences2_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Life Sciences v4.0.1+20220211 +site_name: Cloud Life Sciences v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-lifesciences2_beta-cli site_description: A complete library to interact with Cloud Life Sciences (protocol v2beta) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/lifesciences2_be docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-pipelines-run.md', 'Projects', 'Locations Pipelines Run'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Pipelines Run': 'projects_locations-pipelines-run.md' theme: readthedocs diff --git a/gen/lifesciences2_beta-cli/src/client.rs b/gen/lifesciences2_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/lifesciences2_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/lifesciences2_beta-cli/src/main.rs b/gen/lifesciences2_beta-cli/src/main.rs index 1202ccbc4f..d376f4ee22 100644 --- a/gen/lifesciences2_beta-cli/src/main.rs +++ b/gen/lifesciences2_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_lifesciences2_beta::{api, Error, oauth2}; +use google_lifesciences2_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -311,7 +310,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -725,7 +724,7 @@ async fn main() { let mut app = App::new("lifesciences2-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220211") + .version("5.0.2+20230106") .about("Cloud Life Sciences is a suite of services and tools for managing, processing, and transforming life sciences data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_lifesciences2_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/lifesciences2_beta/Cargo.toml b/gen/lifesciences2_beta/Cargo.toml index a12b8e0a7f..ede7126a75 100644 --- a/gen/lifesciences2_beta/Cargo.toml +++ b/gen/lifesciences2_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-lifesciences2_beta" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Life Sciences (protocol v2beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/lifesciences2_beta" homepage = "https://cloud.google.com/life-sciences" -documentation = "https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-lifesciences2_beta/5.0.2+20230106" license = "MIT" keywords = ["lifesciences", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/lifesciences2_beta/README.md b/gen/lifesciences2_beta/README.md index 53d24ca4f7..41893fb3cb 100644 --- a/gen/lifesciences2_beta/README.md +++ b/gen/lifesciences2_beta/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-lifesciences2_beta` library allows access to all features of the *Google Cloud Life Sciences* service. -This documentation was generated from *Cloud Life Sciences* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *lifesciences:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Life Sciences* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *lifesciences:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Life Sciences* *v2_beta* API can be found at the [official documentation site](https://cloud.google.com/life-sciences). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/CloudLifeSciences) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/CloudLifeSciences) ... * projects - * [*locations get*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/api::ProjectLocationOperationListCall) and [*locations pipelines run*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/api::ProjectLocationPipelineRunCall) + * [*locations get*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/api::ProjectLocationOperationListCall) and [*locations pipelines run*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/api::ProjectLocationPipelineRunCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/CloudLifeSciences)** +* **[Hub](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/CloudLifeSciences)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Part)** + * **[Parts](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::RequestValue) and -[decodable](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::RequestValue) and +[decodable](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-lifesciences2_beta/5.0.2-beta-1+20230106/google_lifesciences2_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-lifesciences2_beta/5.0.2+20230106/google_lifesciences2_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/lifesciences2_beta/src/api.rs b/gen/lifesciences2_beta/src/api.rs index f41cf288ce..ddc1fc9f5d 100644 --- a/gen/lifesciences2_beta/src/api.rs +++ b/gen/lifesciences2_beta/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudLifeSciences { CloudLifeSciences { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://lifesciences.googleapis.com/".to_string(), _root_url: "https://lifesciences.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CloudLifeSciences { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/lifesciences2_beta/src/client.rs b/gen/lifesciences2_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/lifesciences2_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/lifesciences2_beta/src/lib.rs b/gen/lifesciences2_beta/src/lib.rs index 6a58723eec..a4f76cc4eb 100644 --- a/gen/lifesciences2_beta/src/lib.rs +++ b/gen/lifesciences2_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Life Sciences* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *lifesciences:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Life Sciences* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *lifesciences:v2beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Life Sciences* *v2_beta* API can be found at the //! [official documentation site](https://cloud.google.com/life-sciences). diff --git a/gen/localservices1-cli/Cargo.toml b/gen/localservices1-cli/Cargo.toml index 4f622dedec..7f9606a831 100644 --- a/gen/localservices1-cli/Cargo.toml +++ b/gen/localservices1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-localservices1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Localservices (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/localservices1-cli" @@ -20,13 +20,13 @@ name = "localservices1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-localservices1] path = "../localservices1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/localservices1-cli/README.md b/gen/localservices1-cli/README.md index 88d33d5b90..95a83932de 100644 --- a/gen/localservices1-cli/README.md +++ b/gen/localservices1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Localservices* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Localservices* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash localservices1 [options] diff --git a/gen/localservices1-cli/mkdocs.yml b/gen/localservices1-cli/mkdocs.yml index eb965a8858..541d534d6a 100644 --- a/gen/localservices1-cli/mkdocs.yml +++ b/gen/localservices1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Localservices v4.0.1+20220305 +site_name: Localservices v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-localservices1-cli site_description: A complete library to interact with Localservices (protocol v1) @@ -7,10 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/localservices1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['account-reports_search.md', 'Account Reports', 'Search'] -- ['detailed-lead-reports_search.md', 'Detailed Lead Reports', 'Search'] +nav: +- Home: 'index.md' +- 'Account Reports': + - 'Search': 'account-reports_search.md' +- 'Detailed Lead Reports': + - 'Search': 'detailed-lead-reports_search.md' theme: readthedocs diff --git a/gen/localservices1-cli/src/client.rs b/gen/localservices1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/localservices1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/localservices1-cli/src/main.rs b/gen/localservices1-cli/src/main.rs index f05301d296..a7239eae50 100644 --- a/gen/localservices1-cli/src/main.rs +++ b/gen/localservices1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_localservices1::{api, Error, oauth2}; +use google_localservices1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,13 +57,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "query" => { call = call.query(value.unwrap_or("")); @@ -73,16 +72,16 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -138,13 +137,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-date-year" => { - call = call.start_date_year(arg_from_str(value.unwrap_or("-0"), err, "start-date-year", "integer")); + call = call.start_date_year( value.map(|v| arg_from_str(v, err, "start-date-year", "int32")).unwrap_or(-0)); }, "start-date-month" => { - call = call.start_date_month(arg_from_str(value.unwrap_or("-0"), err, "start-date-month", "integer")); + call = call.start_date_month( value.map(|v| arg_from_str(v, err, "start-date-month", "int32")).unwrap_or(-0)); }, "start-date-day" => { - call = call.start_date_day(arg_from_str(value.unwrap_or("-0"), err, "start-date-day", "integer")); + call = call.start_date_day( value.map(|v| arg_from_str(v, err, "start-date-day", "int32")).unwrap_or(-0)); }, "query" => { call = call.query(value.unwrap_or("")); @@ -153,16 +152,16 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "end-date-year" => { - call = call.end_date_year(arg_from_str(value.unwrap_or("-0"), err, "end-date-year", "integer")); + call = call.end_date_year( value.map(|v| arg_from_str(v, err, "end-date-year", "int32")).unwrap_or(-0)); }, "end-date-month" => { - call = call.end_date_month(arg_from_str(value.unwrap_or("-0"), err, "end-date-month", "integer")); + call = call.end_date_month( value.map(|v| arg_from_str(v, err, "end-date-month", "int32")).unwrap_or(-0)); }, "end-date-day" => { - call = call.end_date_day(arg_from_str(value.unwrap_or("-0"), err, "end-date-day", "integer")); + call = call.end_date_day( value.map(|v| arg_from_str(v, err, "end-date-day", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -353,7 +352,7 @@ async fn main() { let mut app = App::new("localservices1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_localservices1_cli") .arg(Arg::with_name("url") diff --git a/gen/localservices1/Cargo.toml b/gen/localservices1/Cargo.toml index 20adbf4186..e6742d2b8a 100644 --- a/gen/localservices1/Cargo.toml +++ b/gen/localservices1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-localservices1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Localservices (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/localservices1" homepage = "https://ads.google.com/local-services-ads/" -documentation = "https://docs.rs/google-localservices1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-localservices1/5.0.2+20230123" license = "MIT" keywords = ["localservices", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/localservices1/README.md b/gen/localservices1/README.md index de01511d1f..913046236a 100644 --- a/gen/localservices1/README.md +++ b/gen/localservices1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-localservices1` library allows access to all features of the *Google Localservices* service. -This documentation was generated from *Localservices* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *localservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Localservices* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *localservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Localservices* *v1* API can be found at the [official documentation site](https://ads.google.com/local-services-ads/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/Localservices) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/Localservices) ... * account reports - * [*search*](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/api::AccountReportSearchCall) + * [*search*](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/api::AccountReportSearchCall) * detailed lead reports - * [*search*](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/api::DetailedLeadReportSearchCall) + * [*search*](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/api::DetailedLeadReportSearchCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/Localservices)** +* **[Hub](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/Localservices)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::CallBuilder) -* **[Resources](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::CallBuilder) +* **[Resources](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Part)** + * **[Parts](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Delegate) to the -[Method Builder](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Delegate) to the +[Method Builder](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::RequestValue) and -[decodable](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::RequestValue) and +[decodable](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-localservices1/5.0.2-beta-1+20230123/google_localservices1/client::RequestValue) are moved +* [request values](https://docs.rs/google-localservices1/5.0.2+20230123/google_localservices1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/localservices1/src/api.rs b/gen/localservices1/src/api.rs index 402d326b51..67374e5bac 100644 --- a/gen/localservices1/src/api.rs +++ b/gen/localservices1/src/api.rs @@ -128,7 +128,7 @@ impl<'a, S> Localservices { Localservices { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://localservices.googleapis.com/".to_string(), _root_url: "https://localservices.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> Localservices { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/localservices1/src/client.rs b/gen/localservices1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/localservices1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/localservices1/src/lib.rs b/gen/localservices1/src/lib.rs index 65f05c7d2c..bd05076fac 100644 --- a/gen/localservices1/src/lib.rs +++ b/gen/localservices1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Localservices* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *localservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Localservices* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *localservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Localservices* *v1* API can be found at the //! [official documentation site](https://ads.google.com/local-services-ads/). diff --git a/gen/logging2-cli/Cargo.toml b/gen/logging2-cli/Cargo.toml index 691b0a9371..cd3f4d922b 100644 --- a/gen/logging2-cli/Cargo.toml +++ b/gen/logging2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-logging2-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Logging (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/logging2-cli" @@ -20,13 +20,13 @@ name = "logging2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-logging2] path = "../logging2" -version = "4.0.1+20220225" +version = "5.0.2+20230104" + diff --git a/gen/logging2-cli/README.md b/gen/logging2-cli/README.md index 4b4b5344e5..34e53d68fe 100644 --- a/gen/logging2-cli/README.md +++ b/gen/logging2-cli/README.md @@ -25,13 +25,11 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Logging* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Logging* API at revision *20230104*. The CLI is at version *5.0.2*. ```bash logging2 [options] billing-accounts - buckets-get [-p ]... [-o ] - buckets-views-get [-p ]... [-o ] exclusions-create (-r )... [-p ]... [-o ] exclusions-delete [-p ]... [-o ] exclusions-get [-p ]... [-o ] @@ -41,20 +39,23 @@ logging2 [options] get-settings [-p ]... [-o ] locations-buckets-create (-r )... [-p ]... [-o ] locations-buckets-delete [-p ]... [-o ] + locations-buckets-get [-p ]... [-o ] locations-buckets-list [-p ]... [-o ] locations-buckets-patch (-r )... [-p ]... [-o ] locations-buckets-undelete (-r )... [-p ]... [-o ] locations-buckets-views-create (-r )... [-p ]... [-o ] locations-buckets-views-delete [-p ]... [-o ] + locations-buckets-views-get [-p ]... [-o ] locations-buckets-views-list [-p ]... [-o ] + locations-buckets-views-logs-list [-p ]... [-o ] locations-buckets-views-patch (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] + locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] logs-delete [-p ]... [-o ] logs-list [-p ]... [-o ] - operations-get [-p ]... [-o ] sinks-create (-r )... [-p ]... [-o ] sinks-delete [-p ]... [-o ] sinks-get [-p ]... [-o ] @@ -90,6 +91,7 @@ logging2 [options] locations-buckets-views-delete [-p ]... [-o ] locations-buckets-views-get [-p ]... [-o ] locations-buckets-views-list [-p ]... [-o ] + locations-buckets-views-logs-list [-p ]... [-o ] locations-buckets-views-patch (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] @@ -150,6 +152,7 @@ logging2 [options] locations-buckets-views-delete [-p ]... [-o ] locations-buckets-views-get [-p ]... [-o ] locations-buckets-views-list [-p ]... [-o ] + locations-buckets-views-logs-list [-p ]... [-o ] locations-buckets-views-patch (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] @@ -184,6 +187,7 @@ logging2 [options] locations-buckets-views-delete [-p ]... [-o ] locations-buckets-views-get [-p ]... [-o ] locations-buckets-views-list [-p ]... [-o ] + locations-buckets-views-logs-list [-p ]... [-o ] locations-buckets-views-patch (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] diff --git a/gen/logging2-cli/mkdocs.yml b/gen/logging2-cli/mkdocs.yml index 291b7a9e38..0a03e4bfb8 100644 --- a/gen/logging2-cli/mkdocs.yml +++ b/gen/logging2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Logging v4.0.1+20220225 +site_name: Logging v5.0.2+20230104 site_url: http://byron.github.io/google-apis-rs/google-logging2-cli site_description: A complete library to interact with Logging (protocol v2) @@ -7,177 +7,192 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/logging2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['billing-accounts_buckets-get.md', 'Billing Accounts', 'Buckets Get'] -- ['billing-accounts_buckets-views-get.md', 'Billing Accounts', 'Buckets Views Get'] -- ['billing-accounts_exclusions-create.md', 'Billing Accounts', 'Exclusions Create'] -- ['billing-accounts_exclusions-delete.md', 'Billing Accounts', 'Exclusions Delete'] -- ['billing-accounts_exclusions-get.md', 'Billing Accounts', 'Exclusions Get'] -- ['billing-accounts_exclusions-list.md', 'Billing Accounts', 'Exclusions List'] -- ['billing-accounts_exclusions-patch.md', 'Billing Accounts', 'Exclusions Patch'] -- ['billing-accounts_get-cmek-settings.md', 'Billing Accounts', 'Get Cmek Settings'] -- ['billing-accounts_get-settings.md', 'Billing Accounts', 'Get Settings'] -- ['billing-accounts_locations-buckets-create.md', 'Billing Accounts', 'Locations Buckets Create'] -- ['billing-accounts_locations-buckets-delete.md', 'Billing Accounts', 'Locations Buckets Delete'] -- ['billing-accounts_locations-buckets-list.md', 'Billing Accounts', 'Locations Buckets List'] -- ['billing-accounts_locations-buckets-patch.md', 'Billing Accounts', 'Locations Buckets Patch'] -- ['billing-accounts_locations-buckets-undelete.md', 'Billing Accounts', 'Locations Buckets Undelete'] -- ['billing-accounts_locations-buckets-views-create.md', 'Billing Accounts', 'Locations Buckets Views Create'] -- ['billing-accounts_locations-buckets-views-delete.md', 'Billing Accounts', 'Locations Buckets Views Delete'] -- ['billing-accounts_locations-buckets-views-list.md', 'Billing Accounts', 'Locations Buckets Views List'] -- ['billing-accounts_locations-buckets-views-patch.md', 'Billing Accounts', 'Locations Buckets Views Patch'] -- ['billing-accounts_locations-get.md', 'Billing Accounts', 'Locations Get'] -- ['billing-accounts_locations-list.md', 'Billing Accounts', 'Locations List'] -- ['billing-accounts_locations-operations-cancel.md', 'Billing Accounts', 'Locations Operations Cancel'] -- ['billing-accounts_locations-operations-list.md', 'Billing Accounts', 'Locations Operations List'] -- ['billing-accounts_logs-delete.md', 'Billing Accounts', 'Logs Delete'] -- ['billing-accounts_logs-list.md', 'Billing Accounts', 'Logs List'] -- ['billing-accounts_operations-get.md', 'Billing Accounts', 'Operations Get'] -- ['billing-accounts_sinks-create.md', 'Billing Accounts', 'Sinks Create'] -- ['billing-accounts_sinks-delete.md', 'Billing Accounts', 'Sinks Delete'] -- ['billing-accounts_sinks-get.md', 'Billing Accounts', 'Sinks Get'] -- ['billing-accounts_sinks-list.md', 'Billing Accounts', 'Sinks List'] -- ['billing-accounts_sinks-patch.md', 'Billing Accounts', 'Sinks Patch'] -- ['billing-accounts_sinks-update.md', 'Billing Accounts', 'Sinks Update'] -- ['entries_copy.md', 'Entries', 'Copy'] -- ['entries_list.md', 'Entries', 'List'] -- ['entries_tail.md', 'Entries', 'Tail'] -- ['entries_write.md', 'Entries', 'Write'] -- ['exclusions_create.md', 'Exclusions', 'Create'] -- ['exclusions_delete.md', 'Exclusions', 'Delete'] -- ['exclusions_get.md', 'Exclusions', 'Get'] -- ['exclusions_list.md', 'Exclusions', 'List'] -- ['exclusions_patch.md', 'Exclusions', 'Patch'] -- ['folders_exclusions-create.md', 'Folders', 'Exclusions Create'] -- ['folders_exclusions-delete.md', 'Folders', 'Exclusions Delete'] -- ['folders_exclusions-get.md', 'Folders', 'Exclusions Get'] -- ['folders_exclusions-list.md', 'Folders', 'Exclusions List'] -- ['folders_exclusions-patch.md', 'Folders', 'Exclusions Patch'] -- ['folders_get-cmek-settings.md', 'Folders', 'Get Cmek Settings'] -- ['folders_get-settings.md', 'Folders', 'Get Settings'] -- ['folders_locations-buckets-create.md', 'Folders', 'Locations Buckets Create'] -- ['folders_locations-buckets-delete.md', 'Folders', 'Locations Buckets Delete'] -- ['folders_locations-buckets-get.md', 'Folders', 'Locations Buckets Get'] -- ['folders_locations-buckets-list.md', 'Folders', 'Locations Buckets List'] -- ['folders_locations-buckets-patch.md', 'Folders', 'Locations Buckets Patch'] -- ['folders_locations-buckets-undelete.md', 'Folders', 'Locations Buckets Undelete'] -- ['folders_locations-buckets-views-create.md', 'Folders', 'Locations Buckets Views Create'] -- ['folders_locations-buckets-views-delete.md', 'Folders', 'Locations Buckets Views Delete'] -- ['folders_locations-buckets-views-get.md', 'Folders', 'Locations Buckets Views Get'] -- ['folders_locations-buckets-views-list.md', 'Folders', 'Locations Buckets Views List'] -- ['folders_locations-buckets-views-patch.md', 'Folders', 'Locations Buckets Views Patch'] -- ['folders_locations-get.md', 'Folders', 'Locations Get'] -- ['folders_locations-list.md', 'Folders', 'Locations List'] -- ['folders_locations-operations-cancel.md', 'Folders', 'Locations Operations Cancel'] -- ['folders_locations-operations-get.md', 'Folders', 'Locations Operations Get'] -- ['folders_locations-operations-list.md', 'Folders', 'Locations Operations List'] -- ['folders_logs-delete.md', 'Folders', 'Logs Delete'] -- ['folders_logs-list.md', 'Folders', 'Logs List'] -- ['folders_sinks-create.md', 'Folders', 'Sinks Create'] -- ['folders_sinks-delete.md', 'Folders', 'Sinks Delete'] -- ['folders_sinks-get.md', 'Folders', 'Sinks Get'] -- ['folders_sinks-list.md', 'Folders', 'Sinks List'] -- ['folders_sinks-patch.md', 'Folders', 'Sinks Patch'] -- ['folders_sinks-update.md', 'Folders', 'Sinks Update'] -- ['folders_update-settings.md', 'Folders', 'Update Settings'] -- ['locations_buckets-create.md', 'Locations', 'Buckets Create'] -- ['locations_buckets-delete.md', 'Locations', 'Buckets Delete'] -- ['locations_buckets-get.md', 'Locations', 'Buckets Get'] -- ['locations_buckets-list.md', 'Locations', 'Buckets List'] -- ['locations_buckets-patch.md', 'Locations', 'Buckets Patch'] -- ['locations_buckets-undelete.md', 'Locations', 'Buckets Undelete'] -- ['locations_buckets-views-create.md', 'Locations', 'Buckets Views Create'] -- ['locations_buckets-views-delete.md', 'Locations', 'Buckets Views Delete'] -- ['locations_buckets-views-get.md', 'Locations', 'Buckets Views Get'] -- ['locations_buckets-views-list.md', 'Locations', 'Buckets Views List'] -- ['locations_buckets-views-patch.md', 'Locations', 'Buckets Views Patch'] -- ['locations_get.md', 'Locations', 'Get'] -- ['locations_list.md', 'Locations', 'List'] -- ['locations_operations-cancel.md', 'Locations', 'Operations Cancel'] -- ['locations_operations-get.md', 'Locations', 'Operations Get'] -- ['locations_operations-list.md', 'Locations', 'Operations List'] -- ['logs_delete.md', 'Logs', 'Delete'] -- ['logs_list.md', 'Logs', 'List'] -- ['methods_get-cmek-settings.md', 'Methods', 'Get Cmek Settings'] -- ['methods_get-settings.md', 'Methods', 'Get Settings'] -- ['methods_update-cmek-settings.md', 'Methods', 'Update Cmek Settings'] -- ['methods_update-settings.md', 'Methods', 'Update Settings'] -- ['monitored-resource-descriptors_list.md', 'Monitored Resource Descriptors', 'List'] -- ['organizations_exclusions-create.md', 'Organizations', 'Exclusions Create'] -- ['organizations_exclusions-delete.md', 'Organizations', 'Exclusions Delete'] -- ['organizations_exclusions-get.md', 'Organizations', 'Exclusions Get'] -- ['organizations_exclusions-list.md', 'Organizations', 'Exclusions List'] -- ['organizations_exclusions-patch.md', 'Organizations', 'Exclusions Patch'] -- ['organizations_get-cmek-settings.md', 'Organizations', 'Get Cmek Settings'] -- ['organizations_get-settings.md', 'Organizations', 'Get Settings'] -- ['organizations_locations-buckets-create.md', 'Organizations', 'Locations Buckets Create'] -- ['organizations_locations-buckets-delete.md', 'Organizations', 'Locations Buckets Delete'] -- ['organizations_locations-buckets-get.md', 'Organizations', 'Locations Buckets Get'] -- ['organizations_locations-buckets-list.md', 'Organizations', 'Locations Buckets List'] -- ['organizations_locations-buckets-patch.md', 'Organizations', 'Locations Buckets Patch'] -- ['organizations_locations-buckets-undelete.md', 'Organizations', 'Locations Buckets Undelete'] -- ['organizations_locations-buckets-views-create.md', 'Organizations', 'Locations Buckets Views Create'] -- ['organizations_locations-buckets-views-delete.md', 'Organizations', 'Locations Buckets Views Delete'] -- ['organizations_locations-buckets-views-get.md', 'Organizations', 'Locations Buckets Views Get'] -- ['organizations_locations-buckets-views-list.md', 'Organizations', 'Locations Buckets Views List'] -- ['organizations_locations-buckets-views-patch.md', 'Organizations', 'Locations Buckets Views Patch'] -- ['organizations_locations-get.md', 'Organizations', 'Locations Get'] -- ['organizations_locations-list.md', 'Organizations', 'Locations List'] -- ['organizations_locations-operations-cancel.md', 'Organizations', 'Locations Operations Cancel'] -- ['organizations_locations-operations-get.md', 'Organizations', 'Locations Operations Get'] -- ['organizations_locations-operations-list.md', 'Organizations', 'Locations Operations List'] -- ['organizations_logs-delete.md', 'Organizations', 'Logs Delete'] -- ['organizations_logs-list.md', 'Organizations', 'Logs List'] -- ['organizations_sinks-create.md', 'Organizations', 'Sinks Create'] -- ['organizations_sinks-delete.md', 'Organizations', 'Sinks Delete'] -- ['organizations_sinks-get.md', 'Organizations', 'Sinks Get'] -- ['organizations_sinks-list.md', 'Organizations', 'Sinks List'] -- ['organizations_sinks-patch.md', 'Organizations', 'Sinks Patch'] -- ['organizations_sinks-update.md', 'Organizations', 'Sinks Update'] -- ['organizations_update-cmek-settings.md', 'Organizations', 'Update Cmek Settings'] -- ['organizations_update-settings.md', 'Organizations', 'Update Settings'] -- ['projects_exclusions-create.md', 'Projects', 'Exclusions Create'] -- ['projects_exclusions-delete.md', 'Projects', 'Exclusions Delete'] -- ['projects_exclusions-get.md', 'Projects', 'Exclusions Get'] -- ['projects_exclusions-list.md', 'Projects', 'Exclusions List'] -- ['projects_exclusions-patch.md', 'Projects', 'Exclusions Patch'] -- ['projects_get-cmek-settings.md', 'Projects', 'Get Cmek Settings'] -- ['projects_get-settings.md', 'Projects', 'Get Settings'] -- ['projects_locations-buckets-create.md', 'Projects', 'Locations Buckets Create'] -- ['projects_locations-buckets-delete.md', 'Projects', 'Locations Buckets Delete'] -- ['projects_locations-buckets-get.md', 'Projects', 'Locations Buckets Get'] -- ['projects_locations-buckets-list.md', 'Projects', 'Locations Buckets List'] -- ['projects_locations-buckets-patch.md', 'Projects', 'Locations Buckets Patch'] -- ['projects_locations-buckets-undelete.md', 'Projects', 'Locations Buckets Undelete'] -- ['projects_locations-buckets-views-create.md', 'Projects', 'Locations Buckets Views Create'] -- ['projects_locations-buckets-views-delete.md', 'Projects', 'Locations Buckets Views Delete'] -- ['projects_locations-buckets-views-get.md', 'Projects', 'Locations Buckets Views Get'] -- ['projects_locations-buckets-views-list.md', 'Projects', 'Locations Buckets Views List'] -- ['projects_locations-buckets-views-patch.md', 'Projects', 'Locations Buckets Views Patch'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_logs-delete.md', 'Projects', 'Logs Delete'] -- ['projects_logs-list.md', 'Projects', 'Logs List'] -- ['projects_metrics-create.md', 'Projects', 'Metrics Create'] -- ['projects_metrics-delete.md', 'Projects', 'Metrics Delete'] -- ['projects_metrics-get.md', 'Projects', 'Metrics Get'] -- ['projects_metrics-list.md', 'Projects', 'Metrics List'] -- ['projects_metrics-update.md', 'Projects', 'Metrics Update'] -- ['projects_sinks-create.md', 'Projects', 'Sinks Create'] -- ['projects_sinks-delete.md', 'Projects', 'Sinks Delete'] -- ['projects_sinks-get.md', 'Projects', 'Sinks Get'] -- ['projects_sinks-list.md', 'Projects', 'Sinks List'] -- ['projects_sinks-patch.md', 'Projects', 'Sinks Patch'] -- ['projects_sinks-update.md', 'Projects', 'Sinks Update'] -- ['sinks_create.md', 'Sinks', 'Create'] -- ['sinks_delete.md', 'Sinks', 'Delete'] -- ['sinks_get.md', 'Sinks', 'Get'] -- ['sinks_list.md', 'Sinks', 'List'] -- ['sinks_update.md', 'Sinks', 'Update'] +nav: +- Home: 'index.md' +- 'Billing Accounts': + - 'Exclusions Create': 'billing-accounts_exclusions-create.md' + - 'Exclusions Delete': 'billing-accounts_exclusions-delete.md' + - 'Exclusions Get': 'billing-accounts_exclusions-get.md' + - 'Exclusions List': 'billing-accounts_exclusions-list.md' + - 'Exclusions Patch': 'billing-accounts_exclusions-patch.md' + - 'Get Cmek Settings': 'billing-accounts_get-cmek-settings.md' + - 'Get Settings': 'billing-accounts_get-settings.md' + - 'Locations Buckets Create': 'billing-accounts_locations-buckets-create.md' + - 'Locations Buckets Delete': 'billing-accounts_locations-buckets-delete.md' + - 'Locations Buckets Get': 'billing-accounts_locations-buckets-get.md' + - 'Locations Buckets List': 'billing-accounts_locations-buckets-list.md' + - 'Locations Buckets Patch': 'billing-accounts_locations-buckets-patch.md' + - 'Locations Buckets Undelete': 'billing-accounts_locations-buckets-undelete.md' + - 'Locations Buckets Views Create': 'billing-accounts_locations-buckets-views-create.md' + - 'Locations Buckets Views Delete': 'billing-accounts_locations-buckets-views-delete.md' + - 'Locations Buckets Views Get': 'billing-accounts_locations-buckets-views-get.md' + - 'Locations Buckets Views List': 'billing-accounts_locations-buckets-views-list.md' + - 'Locations Buckets Views Logs List': 'billing-accounts_locations-buckets-views-logs-list.md' + - 'Locations Buckets Views Patch': 'billing-accounts_locations-buckets-views-patch.md' + - 'Locations Get': 'billing-accounts_locations-get.md' + - 'Locations List': 'billing-accounts_locations-list.md' + - 'Locations Operations Cancel': 'billing-accounts_locations-operations-cancel.md' + - 'Locations Operations Get': 'billing-accounts_locations-operations-get.md' + - 'Locations Operations List': 'billing-accounts_locations-operations-list.md' + - 'Logs Delete': 'billing-accounts_logs-delete.md' + - 'Logs List': 'billing-accounts_logs-list.md' + - 'Sinks Create': 'billing-accounts_sinks-create.md' + - 'Sinks Delete': 'billing-accounts_sinks-delete.md' + - 'Sinks Get': 'billing-accounts_sinks-get.md' + - 'Sinks List': 'billing-accounts_sinks-list.md' + - 'Sinks Patch': 'billing-accounts_sinks-patch.md' + - 'Sinks Update': 'billing-accounts_sinks-update.md' +- 'Entries': + - 'Copy': 'entries_copy.md' + - 'List': 'entries_list.md' + - 'Tail': 'entries_tail.md' + - 'Write': 'entries_write.md' +- 'Exclusions': + - 'Create': 'exclusions_create.md' + - 'Delete': 'exclusions_delete.md' + - 'Get': 'exclusions_get.md' + - 'List': 'exclusions_list.md' + - 'Patch': 'exclusions_patch.md' +- 'Folders': + - 'Exclusions Create': 'folders_exclusions-create.md' + - 'Exclusions Delete': 'folders_exclusions-delete.md' + - 'Exclusions Get': 'folders_exclusions-get.md' + - 'Exclusions List': 'folders_exclusions-list.md' + - 'Exclusions Patch': 'folders_exclusions-patch.md' + - 'Get Cmek Settings': 'folders_get-cmek-settings.md' + - 'Get Settings': 'folders_get-settings.md' + - 'Locations Buckets Create': 'folders_locations-buckets-create.md' + - 'Locations Buckets Delete': 'folders_locations-buckets-delete.md' + - 'Locations Buckets Get': 'folders_locations-buckets-get.md' + - 'Locations Buckets List': 'folders_locations-buckets-list.md' + - 'Locations Buckets Patch': 'folders_locations-buckets-patch.md' + - 'Locations Buckets Undelete': 'folders_locations-buckets-undelete.md' + - 'Locations Buckets Views Create': 'folders_locations-buckets-views-create.md' + - 'Locations Buckets Views Delete': 'folders_locations-buckets-views-delete.md' + - 'Locations Buckets Views Get': 'folders_locations-buckets-views-get.md' + - 'Locations Buckets Views List': 'folders_locations-buckets-views-list.md' + - 'Locations Buckets Views Logs List': 'folders_locations-buckets-views-logs-list.md' + - 'Locations Buckets Views Patch': 'folders_locations-buckets-views-patch.md' + - 'Locations Get': 'folders_locations-get.md' + - 'Locations List': 'folders_locations-list.md' + - 'Locations Operations Cancel': 'folders_locations-operations-cancel.md' + - 'Locations Operations Get': 'folders_locations-operations-get.md' + - 'Locations Operations List': 'folders_locations-operations-list.md' + - 'Logs Delete': 'folders_logs-delete.md' + - 'Logs List': 'folders_logs-list.md' + - 'Sinks Create': 'folders_sinks-create.md' + - 'Sinks Delete': 'folders_sinks-delete.md' + - 'Sinks Get': 'folders_sinks-get.md' + - 'Sinks List': 'folders_sinks-list.md' + - 'Sinks Patch': 'folders_sinks-patch.md' + - 'Sinks Update': 'folders_sinks-update.md' + - 'Update Settings': 'folders_update-settings.md' +- 'Locations': + - 'Buckets Create': 'locations_buckets-create.md' + - 'Buckets Delete': 'locations_buckets-delete.md' + - 'Buckets Get': 'locations_buckets-get.md' + - 'Buckets List': 'locations_buckets-list.md' + - 'Buckets Patch': 'locations_buckets-patch.md' + - 'Buckets Undelete': 'locations_buckets-undelete.md' + - 'Buckets Views Create': 'locations_buckets-views-create.md' + - 'Buckets Views Delete': 'locations_buckets-views-delete.md' + - 'Buckets Views Get': 'locations_buckets-views-get.md' + - 'Buckets Views List': 'locations_buckets-views-list.md' + - 'Buckets Views Patch': 'locations_buckets-views-patch.md' + - 'Get': 'locations_get.md' + - 'List': 'locations_list.md' + - 'Operations Cancel': 'locations_operations-cancel.md' + - 'Operations Get': 'locations_operations-get.md' + - 'Operations List': 'locations_operations-list.md' +- 'Logs': + - 'Delete': 'logs_delete.md' + - 'List': 'logs_list.md' +- 'Methods': + - 'Get Cmek Settings': 'methods_get-cmek-settings.md' + - 'Get Settings': 'methods_get-settings.md' + - 'Update Cmek Settings': 'methods_update-cmek-settings.md' + - 'Update Settings': 'methods_update-settings.md' +- 'Monitored Resource Descriptors': + - 'List': 'monitored-resource-descriptors_list.md' +- 'Organizations': + - 'Exclusions Create': 'organizations_exclusions-create.md' + - 'Exclusions Delete': 'organizations_exclusions-delete.md' + - 'Exclusions Get': 'organizations_exclusions-get.md' + - 'Exclusions List': 'organizations_exclusions-list.md' + - 'Exclusions Patch': 'organizations_exclusions-patch.md' + - 'Get Cmek Settings': 'organizations_get-cmek-settings.md' + - 'Get Settings': 'organizations_get-settings.md' + - 'Locations Buckets Create': 'organizations_locations-buckets-create.md' + - 'Locations Buckets Delete': 'organizations_locations-buckets-delete.md' + - 'Locations Buckets Get': 'organizations_locations-buckets-get.md' + - 'Locations Buckets List': 'organizations_locations-buckets-list.md' + - 'Locations Buckets Patch': 'organizations_locations-buckets-patch.md' + - 'Locations Buckets Undelete': 'organizations_locations-buckets-undelete.md' + - 'Locations Buckets Views Create': 'organizations_locations-buckets-views-create.md' + - 'Locations Buckets Views Delete': 'organizations_locations-buckets-views-delete.md' + - 'Locations Buckets Views Get': 'organizations_locations-buckets-views-get.md' + - 'Locations Buckets Views List': 'organizations_locations-buckets-views-list.md' + - 'Locations Buckets Views Logs List': 'organizations_locations-buckets-views-logs-list.md' + - 'Locations Buckets Views Patch': 'organizations_locations-buckets-views-patch.md' + - 'Locations Get': 'organizations_locations-get.md' + - 'Locations List': 'organizations_locations-list.md' + - 'Locations Operations Cancel': 'organizations_locations-operations-cancel.md' + - 'Locations Operations Get': 'organizations_locations-operations-get.md' + - 'Locations Operations List': 'organizations_locations-operations-list.md' + - 'Logs Delete': 'organizations_logs-delete.md' + - 'Logs List': 'organizations_logs-list.md' + - 'Sinks Create': 'organizations_sinks-create.md' + - 'Sinks Delete': 'organizations_sinks-delete.md' + - 'Sinks Get': 'organizations_sinks-get.md' + - 'Sinks List': 'organizations_sinks-list.md' + - 'Sinks Patch': 'organizations_sinks-patch.md' + - 'Sinks Update': 'organizations_sinks-update.md' + - 'Update Cmek Settings': 'organizations_update-cmek-settings.md' + - 'Update Settings': 'organizations_update-settings.md' +- 'Projects': + - 'Exclusions Create': 'projects_exclusions-create.md' + - 'Exclusions Delete': 'projects_exclusions-delete.md' + - 'Exclusions Get': 'projects_exclusions-get.md' + - 'Exclusions List': 'projects_exclusions-list.md' + - 'Exclusions Patch': 'projects_exclusions-patch.md' + - 'Get Cmek Settings': 'projects_get-cmek-settings.md' + - 'Get Settings': 'projects_get-settings.md' + - 'Locations Buckets Create': 'projects_locations-buckets-create.md' + - 'Locations Buckets Delete': 'projects_locations-buckets-delete.md' + - 'Locations Buckets Get': 'projects_locations-buckets-get.md' + - 'Locations Buckets List': 'projects_locations-buckets-list.md' + - 'Locations Buckets Patch': 'projects_locations-buckets-patch.md' + - 'Locations Buckets Undelete': 'projects_locations-buckets-undelete.md' + - 'Locations Buckets Views Create': 'projects_locations-buckets-views-create.md' + - 'Locations Buckets Views Delete': 'projects_locations-buckets-views-delete.md' + - 'Locations Buckets Views Get': 'projects_locations-buckets-views-get.md' + - 'Locations Buckets Views List': 'projects_locations-buckets-views-list.md' + - 'Locations Buckets Views Logs List': 'projects_locations-buckets-views-logs-list.md' + - 'Locations Buckets Views Patch': 'projects_locations-buckets-views-patch.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Logs Delete': 'projects_logs-delete.md' + - 'Logs List': 'projects_logs-list.md' + - 'Metrics Create': 'projects_metrics-create.md' + - 'Metrics Delete': 'projects_metrics-delete.md' + - 'Metrics Get': 'projects_metrics-get.md' + - 'Metrics List': 'projects_metrics-list.md' + - 'Metrics Update': 'projects_metrics-update.md' + - 'Sinks Create': 'projects_sinks-create.md' + - 'Sinks Delete': 'projects_sinks-delete.md' + - 'Sinks Get': 'projects_sinks-get.md' + - 'Sinks List': 'projects_sinks-list.md' + - 'Sinks Patch': 'projects_sinks-patch.md' + - 'Sinks Update': 'projects_sinks-update.md' +- 'Sinks': + - 'Create': 'sinks_create.md' + - 'Delete': 'sinks_delete.md' + - 'Get': 'sinks_get.md' + - 'List': 'sinks_list.md' + - 'Update': 'sinks_update.md' theme: readthedocs diff --git a/gen/logging2-cli/src/client.rs b/gen/logging2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/logging2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/logging2-cli/src/main.rs b/gen/logging2-cli/src/main.rs index 81783462f4..d149b4af53 100644 --- a/gen/logging2-cli/src/main.rs +++ b/gen/logging2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_logging2::{api, Error, oauth2}; +use google_logging2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,110 +50,6 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { - async fn _billing_accounts_buckets_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.billing_accounts().buckets_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - - async fn _billing_accounts_buckets_views_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.billing_accounts().buckets_views_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _billing_accounts_exclusions_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -359,7 +254,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -453,7 +348,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -630,6 +525,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -641,7 +537,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -757,6 +653,58 @@ where } } + async fn _billing_accounts_locations_buckets_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_buckets_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_buckets_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_accounts().locations_buckets_list(opt.value_of("parent").unwrap_or("")); @@ -767,7 +715,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -840,6 +788,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -851,7 +800,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -866,7 +815,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1144,6 +1093,58 @@ where } } + async fn _billing_accounts_locations_buckets_views_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_buckets_views_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_buckets_views_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_accounts().locations_buckets_views_list(opt.value_of("parent").unwrap_or("")); @@ -1154,7 +1155,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1203,6 +1204,68 @@ where } } + async fn _billing_accounts_locations_buckets_views_logs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_buckets_views_logs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "resource-names" => { + call = call.add_resource_names(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "resource-names"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_buckets_views_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1247,7 +1310,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1358,7 +1421,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1494,6 +1557,58 @@ where } } + async fn _billing_accounts_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_accounts().locations_operations_list(opt.value_of("name").unwrap_or("")); @@ -1504,7 +1619,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1621,7 +1736,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1670,58 +1785,6 @@ where } } - async fn _billing_accounts_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - let mut call = self.hub.billing_accounts().operations_get(opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _billing_accounts_sinks_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1773,7 +1836,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1936,7 +1999,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2036,10 +2099,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2139,10 +2202,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2749,7 +2812,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2843,7 +2906,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3096,7 +3159,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3190,7 +3253,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3367,6 +3430,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3378,7 +3442,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3556,7 +3620,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3629,6 +3693,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3640,7 +3705,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3655,7 +3720,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3995,7 +4060,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4044,6 +4109,68 @@ where } } + async fn _folders_locations_buckets_views_logs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.folders().locations_buckets_views_logs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "resource-names" => { + call = call.add_resource_names(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "resource-names"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_locations_buckets_views_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -4088,7 +4215,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4199,7 +4326,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4397,7 +4524,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4514,7 +4641,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4614,7 +4741,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4777,7 +4904,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4877,10 +5004,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4980,10 +5107,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5076,7 +5203,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5149,6 +5276,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5160,7 +5288,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5338,7 +5466,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5411,6 +5539,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5422,7 +5551,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5437,7 +5566,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5777,7 +5906,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5870,7 +5999,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5981,7 +6110,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6179,7 +6308,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6296,7 +6425,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6473,10 +6602,11 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kms-key-version-name" => Some(("kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-account-id" => Some(("serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["kms-key-name", "name", "service-account-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["kms-key-name", "kms-key-version-name", "name", "service-account-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6491,7 +6621,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6584,7 +6714,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6643,7 +6773,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6896,7 +7026,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6990,7 +7120,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7167,6 +7297,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -7178,7 +7309,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7356,7 +7487,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7429,6 +7560,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -7440,7 +7572,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -7455,7 +7587,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7795,7 +7927,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -7844,6 +7976,68 @@ where } } + async fn _organizations_locations_buckets_views_logs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().locations_buckets_views_logs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "resource-names" => { + call = call.add_resource_names(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "resource-names"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_locations_buckets_views_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -7888,7 +8082,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -7999,7 +8193,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8197,7 +8391,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -8314,7 +8508,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8414,7 +8608,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -8577,7 +8771,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -8677,10 +8871,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -8780,10 +8974,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -8856,10 +9050,11 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kms-key-name" => Some(("kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kms-key-version-name" => Some(("kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-account-id" => Some(("serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["kms-key-name", "name", "service-account-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["kms-key-name", "kms-key-version-name", "name", "service-account-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -8874,7 +9069,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8967,7 +9162,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -9220,7 +9415,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9314,7 +9509,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -9491,6 +9686,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -9502,7 +9698,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -9680,7 +9876,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9753,6 +9949,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "cmek-settings.kms-key-name" => Some(("cmekSettings.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cmek-settings.kms-key-version-name" => Some(("cmekSettings.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.name" => Some(("cmekSettings.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cmek-settings.service-account-id" => Some(("cmekSettings.serviceAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -9764,7 +9961,7 @@ where "retention-days" => Some(("retentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["cmek-settings", "create-time", "description", "kms-key-name", "kms-key-version-name", "lifecycle-state", "locked", "name", "restricted-fields", "retention-days", "service-account-id", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -9779,7 +9976,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -10119,7 +10316,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10168,6 +10365,68 @@ where } } + async fn _projects_locations_buckets_views_logs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_buckets_views_logs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "resource-names" => { + call = call.add_resource_names(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "resource-names"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_buckets_views_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -10212,7 +10471,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -10323,7 +10582,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -10521,7 +10780,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -10638,7 +10897,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10710,6 +10969,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "bucket-name" => Some(("bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bucket-options.explicit-buckets.bounds" => Some(("bucketOptions.explicitBuckets.bounds", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), "bucket-options.exponential-buckets.growth-factor" => Some(("bucketOptions.exponentialBuckets.growthFactor", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "bucket-options.exponential-buckets.num-finite-buckets" => Some(("bucketOptions.exponentialBuckets.numFiniteBuckets", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -10739,7 +10999,7 @@ where "value-extractor" => Some(("valueExtractor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bounds", "bucket-options", "create-time", "description", "disabled", "display-name", "explicit-buckets", "exponential-buckets", "filter", "growth-factor", "ingest-delay", "label-extractors", "launch-stage", "linear-buckets", "metadata", "metric-descriptor", "metric-kind", "monitored-resource-types", "name", "num-finite-buckets", "offset", "sample-period", "scale", "type", "unit", "update-time", "value-extractor", "value-type", "version", "width"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bounds", "bucket-name", "bucket-options", "create-time", "description", "disabled", "display-name", "explicit-buckets", "exponential-buckets", "filter", "growth-factor", "ingest-delay", "label-extractors", "launch-stage", "linear-buckets", "metadata", "metric-descriptor", "metric-kind", "monitored-resource-types", "name", "num-finite-buckets", "offset", "sample-period", "scale", "type", "unit", "update-time", "value-extractor", "value-type", "version", "width"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -10913,7 +11173,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -10985,6 +11245,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "bucket-name" => Some(("bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "bucket-options.explicit-buckets.bounds" => Some(("bucketOptions.explicitBuckets.bounds", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Vec })), "bucket-options.exponential-buckets.growth-factor" => Some(("bucketOptions.exponentialBuckets.growthFactor", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "bucket-options.exponential-buckets.num-finite-buckets" => Some(("bucketOptions.exponentialBuckets.numFiniteBuckets", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -11014,7 +11275,7 @@ where "value-extractor" => Some(("valueExtractor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bounds", "bucket-options", "create-time", "description", "disabled", "display-name", "explicit-buckets", "exponential-buckets", "filter", "growth-factor", "ingest-delay", "label-extractors", "launch-stage", "linear-buckets", "metadata", "metric-descriptor", "metric-kind", "monitored-resource-types", "name", "num-finite-buckets", "offset", "sample-period", "scale", "type", "unit", "update-time", "value-extractor", "value-type", "version", "width"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bounds", "bucket-name", "bucket-options", "create-time", "description", "disabled", "display-name", "explicit-buckets", "exponential-buckets", "filter", "growth-factor", "ingest-delay", "label-extractors", "launch-stage", "linear-buckets", "metadata", "metric-descriptor", "metric-kind", "monitored-resource-types", "name", "num-finite-buckets", "offset", "sample-period", "scale", "type", "unit", "update-time", "value-extractor", "value-type", "version", "width"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -11125,7 +11386,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -11288,7 +11549,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11388,10 +11649,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -11491,10 +11752,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -11594,7 +11855,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -11757,7 +12018,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11857,10 +12118,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -11916,12 +12177,6 @@ where match self.opt.subcommand() { ("billing-accounts", Some(opt)) => { match opt.subcommand() { - ("buckets-get", Some(opt)) => { - call_result = self._billing_accounts_buckets_get(opt, dry_run, &mut err).await; - }, - ("buckets-views-get", Some(opt)) => { - call_result = self._billing_accounts_buckets_views_get(opt, dry_run, &mut err).await; - }, ("exclusions-create", Some(opt)) => { call_result = self._billing_accounts_exclusions_create(opt, dry_run, &mut err).await; }, @@ -11949,6 +12204,9 @@ where ("locations-buckets-delete", Some(opt)) => { call_result = self._billing_accounts_locations_buckets_delete(opt, dry_run, &mut err).await; }, + ("locations-buckets-get", Some(opt)) => { + call_result = self._billing_accounts_locations_buckets_get(opt, dry_run, &mut err).await; + }, ("locations-buckets-list", Some(opt)) => { call_result = self._billing_accounts_locations_buckets_list(opt, dry_run, &mut err).await; }, @@ -11964,9 +12222,15 @@ where ("locations-buckets-views-delete", Some(opt)) => { call_result = self._billing_accounts_locations_buckets_views_delete(opt, dry_run, &mut err).await; }, + ("locations-buckets-views-get", Some(opt)) => { + call_result = self._billing_accounts_locations_buckets_views_get(opt, dry_run, &mut err).await; + }, ("locations-buckets-views-list", Some(opt)) => { call_result = self._billing_accounts_locations_buckets_views_list(opt, dry_run, &mut err).await; }, + ("locations-buckets-views-logs-list", Some(opt)) => { + call_result = self._billing_accounts_locations_buckets_views_logs_list(opt, dry_run, &mut err).await; + }, ("locations-buckets-views-patch", Some(opt)) => { call_result = self._billing_accounts_locations_buckets_views_patch(opt, dry_run, &mut err).await; }, @@ -11979,6 +12243,9 @@ where ("locations-operations-cancel", Some(opt)) => { call_result = self._billing_accounts_locations_operations_cancel(opt, dry_run, &mut err).await; }, + ("locations-operations-get", Some(opt)) => { + call_result = self._billing_accounts_locations_operations_get(opt, dry_run, &mut err).await; + }, ("locations-operations-list", Some(opt)) => { call_result = self._billing_accounts_locations_operations_list(opt, dry_run, &mut err).await; }, @@ -11988,9 +12255,6 @@ where ("logs-list", Some(opt)) => { call_result = self._billing_accounts_logs_list(opt, dry_run, &mut err).await; }, - ("operations-get", Some(opt)) => { - call_result = self._billing_accounts_operations_get(opt, dry_run, &mut err).await; - }, ("sinks-create", Some(opt)) => { call_result = self._billing_accounts_sinks_create(opt, dry_run, &mut err).await; }, @@ -12111,6 +12375,9 @@ where ("locations-buckets-views-list", Some(opt)) => { call_result = self._folders_locations_buckets_views_list(opt, dry_run, &mut err).await; }, + ("locations-buckets-views-logs-list", Some(opt)) => { + call_result = self._folders_locations_buckets_views_logs_list(opt, dry_run, &mut err).await; + }, ("locations-buckets-views-patch", Some(opt)) => { call_result = self._folders_locations_buckets_views_patch(opt, dry_run, &mut err).await; }, @@ -12316,6 +12583,9 @@ where ("locations-buckets-views-list", Some(opt)) => { call_result = self._organizations_locations_buckets_views_list(opt, dry_run, &mut err).await; }, + ("locations-buckets-views-logs-list", Some(opt)) => { + call_result = self._organizations_locations_buckets_views_logs_list(opt, dry_run, &mut err).await; + }, ("locations-buckets-views-patch", Some(opt)) => { call_result = self._organizations_locations_buckets_views_patch(opt, dry_run, &mut err).await; }, @@ -12423,6 +12693,9 @@ where ("locations-buckets-views-list", Some(opt)) => { call_result = self._projects_locations_buckets_views_list(opt, dry_run, &mut err).await; }, + ("locations-buckets-views-logs-list", Some(opt)) => { + call_result = self._projects_locations_buckets_views_logs_list(opt, dry_run, &mut err).await; + }, ("locations-buckets-views-patch", Some(opt)) => { call_result = self._projects_locations_buckets_views_patch(opt, dry_run, &mut err).await; }, @@ -12582,51 +12855,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("billing-accounts", "methods: 'buckets-get', 'buckets-views-get', 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-list', 'logs-delete', 'logs-list', 'operations-get', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch' and 'sinks-update'", vec![ - ("buckets-get", - Some(r##"Gets a log bucket."##), - "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_buckets-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The resource name of the bucket: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket""##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("buckets-views-get", - Some(r##"Gets a view on a log bucket.."##), - "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_buckets-views-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"Required. The resource name of the policy: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket/views/my-view""##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), + ("billing-accounts", "methods: 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-get', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-get', 'locations-buckets-views-list', 'locations-buckets-views-logs-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'logs-delete', 'logs-list', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch' and 'sinks-update'", vec![ ("exclusions-create", Some(r##"Creates a new exclusion in the _Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource."##), "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_exclusions-create", @@ -12837,6 +13066,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-buckets-get", + Some(r##"Gets a log bucket."##), + "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_locations-buckets-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the bucket: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket""##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -12965,6 +13216,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-buckets-views-get", + Some(r##"Gets a view on a log bucket.."##), + "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_locations-buckets-views-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the policy: "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" For example:"projects/my-project/locations/global/buckets/my-bucket/views/my-view""##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -12987,6 +13260,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-buckets-views-logs-list", + Some(r##"Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed."##), + "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_locations-buckets-views-logs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name that owns the logs: projects/[PROJECT_ID] organizations/[ORGANIZATION_ID] billingAccounts/[BILLING_ACCOUNT_ID] folders/[FOLDER_ID]"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -13087,6 +13382,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_locations-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -13153,28 +13470,6 @@ async fn main() { Some(false), Some(true)), - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), - ("operations-get", - Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), - "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/billing-accounts_operations-get", - vec![ - (Some(r##"name"##), - None, - Some(r##"The name of the operation resource."##), - Some(true), - Some(false)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -13549,7 +13844,7 @@ async fn main() { ]), ]), - ("folders", "methods: 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-get', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-get', 'locations-buckets-views-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'logs-delete', 'logs-list', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch', 'sinks-update' and 'update-settings'", vec![ + ("folders", "methods: 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-get', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-get', 'locations-buckets-views-list', 'locations-buckets-views-logs-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'logs-delete', 'logs-list', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch', 'sinks-update' and 'update-settings'", vec![ ("exclusions-create", Some(r##"Creates a new exclusion in the _Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource."##), "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/folders_exclusions-create", @@ -13954,6 +14249,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-buckets-views-logs-list", + Some(r##"Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed."##), + "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/folders_locations-buckets-views-logs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name that owns the logs: projects/[PROJECT_ID] organizations/[ORGANIZATION_ID] billingAccounts/[BILLING_ACCOUNT_ID] folders/[FOLDER_ID]"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -14888,7 +15205,7 @@ async fn main() { ]), ]), - ("organizations", "methods: 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-get', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-get', 'locations-buckets-views-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'logs-delete', 'logs-list', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch', 'sinks-update', 'update-cmek-settings' and 'update-settings'", vec![ + ("organizations", "methods: 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-get', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-get', 'locations-buckets-views-list', 'locations-buckets-views-logs-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'logs-delete', 'logs-list', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch', 'sinks-update', 'update-cmek-settings' and 'update-settings'", vec![ ("exclusions-create", Some(r##"Creates a new exclusion in the _Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource."##), "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/organizations_exclusions-create", @@ -15293,6 +15610,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-buckets-views-logs-list", + Some(r##"Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed."##), + "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/organizations_locations-buckets-views-logs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name that owns the logs: projects/[PROJECT_ID] organizations/[ORGANIZATION_ID] billingAccounts/[BILLING_ACCOUNT_ID] folders/[FOLDER_ID]"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -15695,7 +16034,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-get', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-get', 'locations-buckets-views-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'logs-delete', 'logs-list', 'metrics-create', 'metrics-delete', 'metrics-get', 'metrics-list', 'metrics-update', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch' and 'sinks-update'", vec![ + ("projects", "methods: 'exclusions-create', 'exclusions-delete', 'exclusions-get', 'exclusions-list', 'exclusions-patch', 'get-cmek-settings', 'get-settings', 'locations-buckets-create', 'locations-buckets-delete', 'locations-buckets-get', 'locations-buckets-list', 'locations-buckets-patch', 'locations-buckets-undelete', 'locations-buckets-views-create', 'locations-buckets-views-delete', 'locations-buckets-views-get', 'locations-buckets-views-list', 'locations-buckets-views-logs-list', 'locations-buckets-views-patch', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-get', 'locations-operations-list', 'logs-delete', 'logs-list', 'metrics-create', 'metrics-delete', 'metrics-get', 'metrics-list', 'metrics-update', 'sinks-create', 'sinks-delete', 'sinks-get', 'sinks-list', 'sinks-patch' and 'sinks-update'", vec![ ("exclusions-create", Some(r##"Creates a new exclusion in the _Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource."##), "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/projects_exclusions-create", @@ -16100,6 +16439,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-buckets-views-logs-list", + Some(r##"Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed."##), + "Details at http://byron.github.io/google-apis-rs/google_logging2_cli/projects_locations-buckets-views-logs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name that owns the logs: projects/[PROJECT_ID] organizations/[ORGANIZATION_ID] billingAccounts/[BILLING_ACCOUNT_ID] folders/[FOLDER_ID]"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -16697,7 +17058,7 @@ async fn main() { let mut app = App::new("logging2") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230104") .about("Writes log entries and manages your Cloud Logging configuration.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_logging2_cli") .arg(Arg::with_name("url") diff --git a/gen/logging2/Cargo.toml b/gen/logging2/Cargo.toml index 8e2510d8fb..3c5e650369 100644 --- a/gen/logging2/Cargo.toml +++ b/gen/logging2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-logging2" -version = "5.0.2-beta-1+20230104" +version = "5.0.2+20230104" authors = ["Sebastian Thiel "] description = "A complete library to interact with Logging (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/logging2" homepage = "https://cloud.google.com/logging/docs/" -documentation = "https://docs.rs/google-logging2/5.0.2-beta-1+20230104" +documentation = "https://docs.rs/google-logging2/5.0.2+20230104" license = "MIT" keywords = ["logging", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/logging2/README.md b/gen/logging2/README.md index 5db48db520..b5b59b6579 100644 --- a/gen/logging2/README.md +++ b/gen/logging2/README.md @@ -5,41 +5,41 @@ DO NOT EDIT ! --> The `google-logging2` library allows access to all features of the *Google Logging* service. -This documentation was generated from *Logging* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *logging:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Logging* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *logging:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Logging* *v2* API can be found at the [official documentation site](https://cloud.google.com/logging/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/Logging) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/Logging) ... * billing accounts - * [*exclusions create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountLogListCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountSinkPatchCall) and [*sinks update*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::BillingAccountSinkUpdateCall) + * [*exclusions create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountLogListCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountSinkPatchCall) and [*sinks update*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::BillingAccountSinkUpdateCall) * entries - * [*copy*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::EntryCopyCall), [*list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::EntryListCall), [*tail*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::EntryTailCall) and [*write*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::EntryWriteCall) + * [*copy*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::EntryCopyCall), [*list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::EntryListCall), [*tail*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::EntryTailCall) and [*write*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::EntryWriteCall) * exclusions - * [*create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ExclusionCreateCall), [*delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ExclusionDeleteCall), [*get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ExclusionGetCall), [*list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ExclusionListCall) and [*patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ExclusionPatchCall) + * [*create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ExclusionCreateCall), [*delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ExclusionDeleteCall), [*get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ExclusionGetCall), [*list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ExclusionListCall) and [*patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ExclusionPatchCall) * folders - * [*exclusions create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderLogListCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderSinkPatchCall), [*sinks update*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderSinkUpdateCall) and [*update settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::FolderUpdateSettingCall) -* [locations](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::Location) - * [*buckets create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketCreateCall), [*buckets delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketDeleteCall), [*buckets get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketGetCall), [*buckets list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketListCall), [*buckets patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketPatchCall), [*buckets undelete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketUndeleteCall), [*buckets views create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketViewCreateCall), [*buckets views delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketViewDeleteCall), [*buckets views get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketViewGetCall), [*buckets views list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketViewListCall), [*buckets views patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationBucketViewPatchCall), [*get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationGetCall), [*list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationListCall), [*operations cancel*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationOperationCancelCall), [*operations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationOperationGetCall) and [*operations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LocationOperationListCall) + * [*exclusions create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderLogListCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderSinkPatchCall), [*sinks update*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderSinkUpdateCall) and [*update settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::FolderUpdateSettingCall) +* [locations](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::Location) + * [*buckets create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketCreateCall), [*buckets delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketDeleteCall), [*buckets get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketGetCall), [*buckets list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketListCall), [*buckets patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketPatchCall), [*buckets undelete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketUndeleteCall), [*buckets views create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketViewCreateCall), [*buckets views delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketViewDeleteCall), [*buckets views get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketViewGetCall), [*buckets views list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketViewListCall), [*buckets views patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationBucketViewPatchCall), [*get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationGetCall), [*list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationListCall), [*operations cancel*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationOperationCancelCall), [*operations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationOperationGetCall) and [*operations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LocationOperationListCall) * logs - * [*delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LogDeleteCall) and [*list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::LogListCall) -* [monitored resource descriptors](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::MonitoredResourceDescriptor) - * [*list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::MonitoredResourceDescriptorListCall) + * [*delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LogDeleteCall) and [*list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::LogListCall) +* [monitored resource descriptors](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::MonitoredResourceDescriptor) + * [*list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::MonitoredResourceDescriptorListCall) * organizations - * [*exclusions create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationLogListCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationSinkPatchCall), [*sinks update*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationSinkUpdateCall), [*update cmek settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationUpdateCmekSettingCall) and [*update settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::OrganizationUpdateSettingCall) + * [*exclusions create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationLogListCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationSinkPatchCall), [*sinks update*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationSinkUpdateCall), [*update cmek settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationUpdateCmekSettingCall) and [*update settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::OrganizationUpdateSettingCall) * projects - * [*exclusions create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectLogListCall), [*metrics create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectMetricCreateCall), [*metrics delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectMetricDeleteCall), [*metrics get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectMetricGetCall), [*metrics list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectMetricListCall), [*metrics update*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectMetricUpdateCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectSinkPatchCall) and [*sinks update*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::ProjectSinkUpdateCall) + * [*exclusions create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectExclusionCreateCall), [*exclusions delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectExclusionDeleteCall), [*exclusions get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectExclusionGetCall), [*exclusions list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectExclusionListCall), [*exclusions patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectExclusionPatchCall), [*get cmek settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectGetCmekSettingCall), [*get settings*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectGetSettingCall), [*locations buckets create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketCreateCall), [*locations buckets delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketDeleteCall), [*locations buckets get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketGetCall), [*locations buckets list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketListCall), [*locations buckets patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketPatchCall), [*locations buckets undelete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketUndeleteCall), [*locations buckets views create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketViewCreateCall), [*locations buckets views delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketViewDeleteCall), [*locations buckets views get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketViewGetCall), [*locations buckets views list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketViewListCall), [*locations buckets views logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketViewLogListCall), [*locations buckets views patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationBucketViewPatchCall), [*locations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLocationOperationListCall), [*logs delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLogDeleteCall), [*logs list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectLogListCall), [*metrics create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectMetricCreateCall), [*metrics delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectMetricDeleteCall), [*metrics get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectMetricGetCall), [*metrics list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectMetricListCall), [*metrics update*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectMetricUpdateCall), [*sinks create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectSinkGetCall), [*sinks list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectSinkListCall), [*sinks patch*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectSinkPatchCall) and [*sinks update*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::ProjectSinkUpdateCall) * sinks - * [*create*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::SinkCreateCall), [*delete*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::SinkDeleteCall), [*get*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::SinkGetCall), [*list*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::SinkListCall) and [*update*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::SinkUpdateCall) + * [*create*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::SinkCreateCall), [*delete*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::SinkDeleteCall), [*get*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::SinkGetCall), [*list*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::SinkListCall) and [*update*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::SinkUpdateCall) Other activities are ... -* [get cmek settings](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::MethodGetCmekSettingCall) -* [get settings](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::MethodGetSettingCall) -* [update cmek settings](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::MethodUpdateCmekSettingCall) -* [update settings](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/api::MethodUpdateSettingCall) +* [get cmek settings](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::MethodGetCmekSettingCall) +* [get settings](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::MethodGetSettingCall) +* [update cmek settings](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::MethodUpdateCmekSettingCall) +* [update settings](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/api::MethodUpdateSettingCall) @@ -47,17 +47,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/Logging)** +* **[Hub](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/Logging)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::CallBuilder) -* **[Resources](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::CallBuilder) +* **[Resources](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Part)** + * **[Parts](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -183,17 +183,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -203,29 +203,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Delegate) to the -[Method Builder](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Delegate) to the +[Method Builder](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::RequestValue) and -[decodable](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::RequestValue) and +[decodable](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-logging2/5.0.2-beta-1+20230104/google_logging2/client::RequestValue) are moved +* [request values](https://docs.rs/google-logging2/5.0.2+20230104/google_logging2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/logging2/src/api.rs b/gen/logging2/src/api.rs index f9007e5438..23a7fa0709 100644 --- a/gen/logging2/src/api.rs +++ b/gen/logging2/src/api.rs @@ -141,7 +141,7 @@ impl<'a, S> Logging { Logging { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://logging.googleapis.com/".to_string(), _root_url: "https://logging.googleapis.com/".to_string(), } @@ -182,7 +182,7 @@ impl<'a, S> Logging { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/logging2/src/client.rs b/gen/logging2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/logging2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/logging2/src/lib.rs b/gen/logging2/src/lib.rs index 750f3a9c89..90f838294b 100644 --- a/gen/logging2/src/lib.rs +++ b/gen/logging2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Logging* crate version *5.0.2-beta-1+20230104*, where *20230104* is the exact revision of the *logging:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Logging* crate version *5.0.2+20230104*, where *20230104* is the exact revision of the *logging:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Logging* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/logging/docs/). diff --git a/gen/logging2_beta1-cli/Cargo.toml b/gen/logging2_beta1-cli/Cargo.toml index e8bd6a01e6..9236022ea9 100644 --- a/gen/logging2_beta1-cli/Cargo.toml +++ b/gen/logging2_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-logging2_beta1-cli" -version = "4.0.1+20190325" +version = "5.0.2+20190325" authors = ["Sebastian Thiel "] description = "A complete library to interact with Logging (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/logging2_beta1-cli" @@ -20,13 +20,13 @@ name = "logging2-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-logging2_beta1] path = "../logging2_beta1" -version = "4.0.1+20190325" +version = "5.0.2+20190325" + diff --git a/gen/logging2_beta1-cli/README.md b/gen/logging2_beta1-cli/README.md index 8adb5c8682..e0de4f9e95 100644 --- a/gen/logging2_beta1-cli/README.md +++ b/gen/logging2_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Logging* API at revision *20190325*. The CLI is at version *4.0.1*. +This documentation was generated from the *Logging* API at revision *20190325*. The CLI is at version *5.0.2*. ```bash logging2-beta1 [options] diff --git a/gen/logging2_beta1-cli/mkdocs.yml b/gen/logging2_beta1-cli/mkdocs.yml index a382fe2799..f2281575f1 100644 --- a/gen/logging2_beta1-cli/mkdocs.yml +++ b/gen/logging2_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Logging v4.0.1+20190325 +site_name: Logging v5.0.2+20190325 site_url: http://byron.github.io/google-apis-rs/google-logging2_beta1-cli site_description: A complete library to interact with Logging (protocol v2beta1) @@ -7,21 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/logging2_beta1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['entries_list.md', 'Entries', 'List'] -- ['entries_write.md', 'Entries', 'Write'] -- ['monitored-resource-descriptors_list.md', 'Monitored Resource Descriptors', 'List'] -- ['projects_metrics-create.md', 'Projects', 'Metrics Create'] -- ['projects_metrics-delete.md', 'Projects', 'Metrics Delete'] -- ['projects_metrics-get.md', 'Projects', 'Metrics Get'] -- ['projects_metrics-list.md', 'Projects', 'Metrics List'] -- ['projects_metrics-update.md', 'Projects', 'Metrics Update'] -- ['projects_sinks-create.md', 'Projects', 'Sinks Create'] -- ['projects_sinks-delete.md', 'Projects', 'Sinks Delete'] -- ['projects_sinks-get.md', 'Projects', 'Sinks Get'] -- ['projects_sinks-list.md', 'Projects', 'Sinks List'] -- ['projects_sinks-update.md', 'Projects', 'Sinks Update'] +nav: +- Home: 'index.md' +- 'Entries': + - 'List': 'entries_list.md' + - 'Write': 'entries_write.md' +- 'Monitored Resource Descriptors': + - 'List': 'monitored-resource-descriptors_list.md' +- 'Projects': + - 'Metrics Create': 'projects_metrics-create.md' + - 'Metrics Delete': 'projects_metrics-delete.md' + - 'Metrics Get': 'projects_metrics-get.md' + - 'Metrics List': 'projects_metrics-list.md' + - 'Metrics Update': 'projects_metrics-update.md' + - 'Sinks Create': 'projects_sinks-create.md' + - 'Sinks Delete': 'projects_sinks-delete.md' + - 'Sinks Get': 'projects_sinks-get.md' + - 'Sinks List': 'projects_sinks-list.md' + - 'Sinks Update': 'projects_sinks-update.md' theme: readthedocs diff --git a/gen/logging2_beta1-cli/src/client.rs b/gen/logging2_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/logging2_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/logging2_beta1-cli/src/main.rs b/gen/logging2_beta1-cli/src/main.rs index 3599fa43c9..972554cb98 100644 --- a/gen/logging2_beta1-cli/src/main.rs +++ b/gen/logging2_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_logging2_beta1::{api, Error, oauth2}; +use google_logging2_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -241,7 +240,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -513,7 +512,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -718,7 +717,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -881,7 +880,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -977,10 +976,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "unique-writer-identity" => { - call = call.unique_writer_identity(arg_from_str(value.unwrap_or("false"), err, "unique-writer-identity", "boolean")); + call = call.unique_writer_identity( value.map(|v| arg_from_str(v, err, "unique-writer-identity", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1522,7 +1521,7 @@ async fn main() { let mut app = App::new("logging2-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20190325") + .version("5.0.2+20190325") .about("Writes log entries and manages your Logging configuration.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_logging2_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/logging2_beta1/Cargo.toml b/gen/logging2_beta1/Cargo.toml index c9808bbd48..cb25e0ddfb 100644 --- a/gen/logging2_beta1/Cargo.toml +++ b/gen/logging2_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-logging2_beta1" -version = "5.0.2-beta-1+20190325" +version = "5.0.2+20190325" authors = ["Sebastian Thiel "] description = "A complete library to interact with Logging (protocol v2beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/logging2_beta1" homepage = "https://cloud.google.com/logging/docs/" -documentation = "https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325" +documentation = "https://docs.rs/google-logging2_beta1/5.0.2+20190325" license = "MIT" keywords = ["logging", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/logging2_beta1/README.md b/gen/logging2_beta1/README.md index d095c3fca5..2a0ea2a0bd 100644 --- a/gen/logging2_beta1/README.md +++ b/gen/logging2_beta1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-logging2_beta1` library allows access to all features of the *Google Logging* service. -This documentation was generated from *Logging* crate version *5.0.2-beta-1+20190325*, where *20190325* is the exact revision of the *logging:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Logging* crate version *5.0.2+20190325*, where *20190325* is the exact revision of the *logging:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Logging* *v2_beta1* API can be found at the [official documentation site](https://cloud.google.com/logging/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/Logging) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/Logging) ... * entries - * [*list*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::EntryListCall) and [*write*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::EntryWriteCall) -* [monitored resource descriptors](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::MonitoredResourceDescriptor) - * [*list*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::MonitoredResourceDescriptorListCall) + * [*list*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::EntryListCall) and [*write*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::EntryWriteCall) +* [monitored resource descriptors](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::MonitoredResourceDescriptor) + * [*list*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::MonitoredResourceDescriptorListCall) * projects - * [*metrics create*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectMetricCreateCall), [*metrics delete*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectMetricDeleteCall), [*metrics get*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectMetricGetCall), [*metrics list*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectMetricListCall), [*metrics update*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectMetricUpdateCall), [*sinks create*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectSinkGetCall), [*sinks list*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectSinkListCall) and [*sinks update*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/api::ProjectSinkUpdateCall) + * [*metrics create*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectMetricCreateCall), [*metrics delete*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectMetricDeleteCall), [*metrics get*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectMetricGetCall), [*metrics list*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectMetricListCall), [*metrics update*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectMetricUpdateCall), [*sinks create*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectSinkCreateCall), [*sinks delete*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectSinkDeleteCall), [*sinks get*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectSinkGetCall), [*sinks list*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectSinkListCall) and [*sinks update*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/api::ProjectSinkUpdateCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/Logging)** +* **[Hub](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/Logging)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Part)** + * **[Parts](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-logging2_beta1/5.0.2-beta-1+20190325/google_logging2_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-logging2_beta1/5.0.2+20190325/google_logging2_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/logging2_beta1/src/api.rs b/gen/logging2_beta1/src/api.rs index 226cdcdff0..0f42583639 100644 --- a/gen/logging2_beta1/src/api.rs +++ b/gen/logging2_beta1/src/api.rs @@ -141,7 +141,7 @@ impl<'a, S> Logging { Logging { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://logging.googleapis.com/".to_string(), _root_url: "https://logging.googleapis.com/".to_string(), } @@ -158,7 +158,7 @@ impl<'a, S> Logging { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/logging2_beta1/src/client.rs b/gen/logging2_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/logging2_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/logging2_beta1/src/lib.rs b/gen/logging2_beta1/src/lib.rs index 7622e66b12..8b27468022 100644 --- a/gen/logging2_beta1/src/lib.rs +++ b/gen/logging2_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Logging* crate version *5.0.2-beta-1+20190325*, where *20190325* is the exact revision of the *logging:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Logging* crate version *5.0.2+20190325*, where *20190325* is the exact revision of the *logging:v2beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Logging* *v2_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/logging/docs/). diff --git a/gen/managedidentities1-cli/Cargo.toml b/gen/managedidentities1-cli/Cargo.toml index 95814781dc..417c18659f 100644 --- a/gen/managedidentities1-cli/Cargo.toml +++ b/gen/managedidentities1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-managedidentities1-cli" -version = "4.0.1+20220216" +version = "5.0.2+20221227" authors = ["Sebastian Thiel "] description = "A complete library to interact with Managed Service for Microsoft Active Directory Consumer API (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/managedidentities1-cli" @@ -20,13 +20,13 @@ name = "managedidentities1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-managedidentities1] path = "../managedidentities1" -version = "4.0.1+20220216" +version = "5.0.2+20221227" + diff --git a/gen/managedidentities1-cli/README.md b/gen/managedidentities1-cli/README.md index cec6433498..85d0bca17b 100644 --- a/gen/managedidentities1-cli/README.md +++ b/gen/managedidentities1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Managed Service for Microsoft Active Directory Consumer API* API at revision *20220216*. The CLI is at version *4.0.1*. +This documentation was generated from the *Managed Service for Microsoft Active Directory Consumer API* API at revision *20221227*. The CLI is at version *5.0.2*. ```bash managedidentities1 [options] @@ -43,6 +43,7 @@ managedidentities1 [options] locations-global-domains-create (-r )... [-p ]... [-o ] locations-global-domains-delete [-p ]... [-o ] locations-global-domains-detach-trust (-r )... [-p ]... [-o ] + locations-global-domains-extend-schema (-r )... [-p ]... [-o ] locations-global-domains-get [-p ]... [-o ] locations-global-domains-get-iam-policy [-p ]... [-o ] locations-global-domains-get-ldapssettings [-p ]... [-o ] diff --git a/gen/managedidentities1-cli/mkdocs.yml b/gen/managedidentities1-cli/mkdocs.yml index 2778f2def4..97e1a59ee1 100644 --- a/gen/managedidentities1-cli/mkdocs.yml +++ b/gen/managedidentities1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Managed Service for Microsoft Active Directory Consumer API v4.0.1+20220216 +site_name: Managed Service for Microsoft Active Directory Consumer API v5.0.2+20221227 site_url: http://byron.github.io/google-apis-rs/google-managedidentities1-cli site_description: A complete library to interact with Managed Service for Microsoft Active Directory Consumer API (protocol v1) @@ -7,48 +7,50 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/managedidentitie docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-global-domains-attach-trust.md', 'Projects', 'Locations Global Domains Attach Trust'] -- ['projects_locations-global-domains-backups-create.md', 'Projects', 'Locations Global Domains Backups Create'] -- ['projects_locations-global-domains-backups-delete.md', 'Projects', 'Locations Global Domains Backups Delete'] -- ['projects_locations-global-domains-backups-get.md', 'Projects', 'Locations Global Domains Backups Get'] -- ['projects_locations-global-domains-backups-get-iam-policy.md', 'Projects', 'Locations Global Domains Backups Get Iam Policy'] -- ['projects_locations-global-domains-backups-list.md', 'Projects', 'Locations Global Domains Backups List'] -- ['projects_locations-global-domains-backups-patch.md', 'Projects', 'Locations Global Domains Backups Patch'] -- ['projects_locations-global-domains-backups-set-iam-policy.md', 'Projects', 'Locations Global Domains Backups Set Iam Policy'] -- ['projects_locations-global-domains-backups-test-iam-permissions.md', 'Projects', 'Locations Global Domains Backups Test Iam Permissions'] -- ['projects_locations-global-domains-create.md', 'Projects', 'Locations Global Domains Create'] -- ['projects_locations-global-domains-delete.md', 'Projects', 'Locations Global Domains Delete'] -- ['projects_locations-global-domains-detach-trust.md', 'Projects', 'Locations Global Domains Detach Trust'] -- ['projects_locations-global-domains-get.md', 'Projects', 'Locations Global Domains Get'] -- ['projects_locations-global-domains-get-iam-policy.md', 'Projects', 'Locations Global Domains Get Iam Policy'] -- ['projects_locations-global-domains-get-ldapssettings.md', 'Projects', 'Locations Global Domains Get Ldapssettings'] -- ['projects_locations-global-domains-list.md', 'Projects', 'Locations Global Domains List'] -- ['projects_locations-global-domains-patch.md', 'Projects', 'Locations Global Domains Patch'] -- ['projects_locations-global-domains-reconfigure-trust.md', 'Projects', 'Locations Global Domains Reconfigure Trust'] -- ['projects_locations-global-domains-reset-admin-password.md', 'Projects', 'Locations Global Domains Reset Admin Password'] -- ['projects_locations-global-domains-restore.md', 'Projects', 'Locations Global Domains Restore'] -- ['projects_locations-global-domains-set-iam-policy.md', 'Projects', 'Locations Global Domains Set Iam Policy'] -- ['projects_locations-global-domains-sql-integrations-get.md', 'Projects', 'Locations Global Domains Sql Integrations Get'] -- ['projects_locations-global-domains-sql-integrations-list.md', 'Projects', 'Locations Global Domains Sql Integrations List'] -- ['projects_locations-global-domains-test-iam-permissions.md', 'Projects', 'Locations Global Domains Test Iam Permissions'] -- ['projects_locations-global-domains-update-ldapssettings.md', 'Projects', 'Locations Global Domains Update Ldapssettings'] -- ['projects_locations-global-domains-validate-trust.md', 'Projects', 'Locations Global Domains Validate Trust'] -- ['projects_locations-global-operations-cancel.md', 'Projects', 'Locations Global Operations Cancel'] -- ['projects_locations-global-operations-delete.md', 'Projects', 'Locations Global Operations Delete'] -- ['projects_locations-global-operations-get.md', 'Projects', 'Locations Global Operations Get'] -- ['projects_locations-global-operations-list.md', 'Projects', 'Locations Global Operations List'] -- ['projects_locations-global-peerings-create.md', 'Projects', 'Locations Global Peerings Create'] -- ['projects_locations-global-peerings-delete.md', 'Projects', 'Locations Global Peerings Delete'] -- ['projects_locations-global-peerings-get.md', 'Projects', 'Locations Global Peerings Get'] -- ['projects_locations-global-peerings-get-iam-policy.md', 'Projects', 'Locations Global Peerings Get Iam Policy'] -- ['projects_locations-global-peerings-list.md', 'Projects', 'Locations Global Peerings List'] -- ['projects_locations-global-peerings-patch.md', 'Projects', 'Locations Global Peerings Patch'] -- ['projects_locations-global-peerings-set-iam-policy.md', 'Projects', 'Locations Global Peerings Set Iam Policy'] -- ['projects_locations-global-peerings-test-iam-permissions.md', 'Projects', 'Locations Global Peerings Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Global Domains Attach Trust': 'projects_locations-global-domains-attach-trust.md' + - 'Locations Global Domains Backups Create': 'projects_locations-global-domains-backups-create.md' + - 'Locations Global Domains Backups Delete': 'projects_locations-global-domains-backups-delete.md' + - 'Locations Global Domains Backups Get': 'projects_locations-global-domains-backups-get.md' + - 'Locations Global Domains Backups Get Iam Policy': 'projects_locations-global-domains-backups-get-iam-policy.md' + - 'Locations Global Domains Backups List': 'projects_locations-global-domains-backups-list.md' + - 'Locations Global Domains Backups Patch': 'projects_locations-global-domains-backups-patch.md' + - 'Locations Global Domains Backups Set Iam Policy': 'projects_locations-global-domains-backups-set-iam-policy.md' + - 'Locations Global Domains Backups Test Iam Permissions': 'projects_locations-global-domains-backups-test-iam-permissions.md' + - 'Locations Global Domains Create': 'projects_locations-global-domains-create.md' + - 'Locations Global Domains Delete': 'projects_locations-global-domains-delete.md' + - 'Locations Global Domains Detach Trust': 'projects_locations-global-domains-detach-trust.md' + - 'Locations Global Domains Extend Schema': 'projects_locations-global-domains-extend-schema.md' + - 'Locations Global Domains Get': 'projects_locations-global-domains-get.md' + - 'Locations Global Domains Get Iam Policy': 'projects_locations-global-domains-get-iam-policy.md' + - 'Locations Global Domains Get Ldapssettings': 'projects_locations-global-domains-get-ldapssettings.md' + - 'Locations Global Domains List': 'projects_locations-global-domains-list.md' + - 'Locations Global Domains Patch': 'projects_locations-global-domains-patch.md' + - 'Locations Global Domains Reconfigure Trust': 'projects_locations-global-domains-reconfigure-trust.md' + - 'Locations Global Domains Reset Admin Password': 'projects_locations-global-domains-reset-admin-password.md' + - 'Locations Global Domains Restore': 'projects_locations-global-domains-restore.md' + - 'Locations Global Domains Set Iam Policy': 'projects_locations-global-domains-set-iam-policy.md' + - 'Locations Global Domains Sql Integrations Get': 'projects_locations-global-domains-sql-integrations-get.md' + - 'Locations Global Domains Sql Integrations List': 'projects_locations-global-domains-sql-integrations-list.md' + - 'Locations Global Domains Test Iam Permissions': 'projects_locations-global-domains-test-iam-permissions.md' + - 'Locations Global Domains Update Ldapssettings': 'projects_locations-global-domains-update-ldapssettings.md' + - 'Locations Global Domains Validate Trust': 'projects_locations-global-domains-validate-trust.md' + - 'Locations Global Operations Cancel': 'projects_locations-global-operations-cancel.md' + - 'Locations Global Operations Delete': 'projects_locations-global-operations-delete.md' + - 'Locations Global Operations Get': 'projects_locations-global-operations-get.md' + - 'Locations Global Operations List': 'projects_locations-global-operations-list.md' + - 'Locations Global Peerings Create': 'projects_locations-global-peerings-create.md' + - 'Locations Global Peerings Delete': 'projects_locations-global-peerings-delete.md' + - 'Locations Global Peerings Get': 'projects_locations-global-peerings-get.md' + - 'Locations Global Peerings Get Iam Policy': 'projects_locations-global-peerings-get-iam-policy.md' + - 'Locations Global Peerings List': 'projects_locations-global-peerings-list.md' + - 'Locations Global Peerings Patch': 'projects_locations-global-peerings-patch.md' + - 'Locations Global Peerings Set Iam Policy': 'projects_locations-global-peerings-set-iam-policy.md' + - 'Locations Global Peerings Test Iam Permissions': 'projects_locations-global-peerings-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/managedidentities1-cli/src/client.rs b/gen/managedidentities1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/managedidentities1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/managedidentities1-cli/src/main.rs b/gen/managedidentities1-cli/src/main.rs index 3bf07be187..fbd844f7bf 100644 --- a/gen/managedidentities1-cli/src/main.rs +++ b/gen/managedidentities1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_managedidentities1::{api, Error, oauth2}; +use google_managedidentities1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -404,7 +403,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -463,7 +462,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -564,7 +563,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1031,6 +1030,93 @@ where } } + async fn _projects_locations_global_domains_extend_schema(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "file-contents" => Some(("fileContents", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gcs-path" => Some(("gcsPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "file-contents", "gcs-path"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ExtendSchemaRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_global_domains_extend_schema(request, opt.value_of("domain").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_global_domains_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_global_domains_get(opt.value_of("name").unwrap_or("")); @@ -1090,7 +1176,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1201,7 +1287,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1307,7 +1393,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1759,7 +1845,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1947,7 +2033,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2289,7 +2375,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2548,7 +2634,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2607,7 +2693,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2709,7 +2795,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2939,7 +3025,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3037,6 +3123,9 @@ where ("locations-global-domains-detach-trust", Some(opt)) => { call_result = self._projects_locations_global_domains_detach_trust(opt, dry_run, &mut err).await; }, + ("locations-global-domains-extend-schema", Some(opt)) => { + call_result = self._projects_locations_global_domains_extend_schema(opt, dry_run, &mut err).await; + }, ("locations-global-domains-get", Some(opt)) => { call_result = self._projects_locations_global_domains_get(opt, dry_run, &mut err).await; }, @@ -3197,7 +3286,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-global-domains-attach-trust', 'locations-global-domains-backups-create', 'locations-global-domains-backups-delete', 'locations-global-domains-backups-get', 'locations-global-domains-backups-get-iam-policy', 'locations-global-domains-backups-list', 'locations-global-domains-backups-patch', 'locations-global-domains-backups-set-iam-policy', 'locations-global-domains-backups-test-iam-permissions', 'locations-global-domains-create', 'locations-global-domains-delete', 'locations-global-domains-detach-trust', 'locations-global-domains-get', 'locations-global-domains-get-iam-policy', 'locations-global-domains-get-ldapssettings', 'locations-global-domains-list', 'locations-global-domains-patch', 'locations-global-domains-reconfigure-trust', 'locations-global-domains-reset-admin-password', 'locations-global-domains-restore', 'locations-global-domains-set-iam-policy', 'locations-global-domains-sql-integrations-get', 'locations-global-domains-sql-integrations-list', 'locations-global-domains-test-iam-permissions', 'locations-global-domains-update-ldapssettings', 'locations-global-domains-validate-trust', 'locations-global-operations-cancel', 'locations-global-operations-delete', 'locations-global-operations-get', 'locations-global-operations-list', 'locations-global-peerings-create', 'locations-global-peerings-delete', 'locations-global-peerings-get', 'locations-global-peerings-get-iam-policy', 'locations-global-peerings-list', 'locations-global-peerings-patch', 'locations-global-peerings-set-iam-policy', 'locations-global-peerings-test-iam-permissions' and 'locations-list'", vec![ + ("projects", "methods: 'locations-get', 'locations-global-domains-attach-trust', 'locations-global-domains-backups-create', 'locations-global-domains-backups-delete', 'locations-global-domains-backups-get', 'locations-global-domains-backups-get-iam-policy', 'locations-global-domains-backups-list', 'locations-global-domains-backups-patch', 'locations-global-domains-backups-set-iam-policy', 'locations-global-domains-backups-test-iam-permissions', 'locations-global-domains-create', 'locations-global-domains-delete', 'locations-global-domains-detach-trust', 'locations-global-domains-extend-schema', 'locations-global-domains-get', 'locations-global-domains-get-iam-policy', 'locations-global-domains-get-ldapssettings', 'locations-global-domains-list', 'locations-global-domains-patch', 'locations-global-domains-reconfigure-trust', 'locations-global-domains-reset-admin-password', 'locations-global-domains-restore', 'locations-global-domains-set-iam-policy', 'locations-global-domains-sql-integrations-get', 'locations-global-domains-sql-integrations-list', 'locations-global-domains-test-iam-permissions', 'locations-global-domains-update-ldapssettings', 'locations-global-domains-validate-trust', 'locations-global-operations-cancel', 'locations-global-operations-delete', 'locations-global-operations-get', 'locations-global-operations-list', 'locations-global-peerings-create', 'locations-global-peerings-delete', 'locations-global-peerings-get', 'locations-global-peerings-get-iam-policy', 'locations-global-peerings-list', 'locations-global-peerings-patch', 'locations-global-peerings-set-iam-policy', 'locations-global-peerings-test-iam-permissions' and 'locations-list'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_managedidentities1_cli/projects_locations-get", @@ -3326,7 +3415,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3398,7 +3487,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3426,7 +3515,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3520,6 +3609,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-global-domains-extend-schema", + Some(r##"Extend Schema for Domain"##), + "Details at http://byron.github.io/google-apis-rs/google_managedidentities1_cli/projects_locations-global-domains-extend-schema", + vec![ + (Some(r##"domain"##), + None, + Some(r##"Required. The domain resource name using the form: `projects/{project_id}/locations/global/domains/{domain_name}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3554,7 +3671,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3732,7 +3849,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3804,7 +3921,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4054,7 +4171,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4126,7 +4243,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4154,7 +4271,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4204,7 +4321,7 @@ async fn main() { let mut app = App::new("managedidentities1") .author("Sebastian Thiel ") - .version("4.0.1+20220216") + .version("5.0.2+20221227") .about("The Managed Service for Microsoft Active Directory API is used for managing a highly available, hardened service running Microsoft Active Directory (AD).") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_managedidentities1_cli") .arg(Arg::with_name("url") diff --git a/gen/managedidentities1/Cargo.toml b/gen/managedidentities1/Cargo.toml index 047f85b26b..27692abe7a 100644 --- a/gen/managedidentities1/Cargo.toml +++ b/gen/managedidentities1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-managedidentities1" -version = "5.0.2-beta-1+20221227" +version = "5.0.2+20221227" authors = ["Sebastian Thiel "] description = "A complete library to interact with Managed Service for Microsoft Active Directory Consumer API (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/managedidentities1" homepage = "https://cloud.google.com/managed-microsoft-ad/" -documentation = "https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227" +documentation = "https://docs.rs/google-managedidentities1/5.0.2+20221227" license = "MIT" keywords = ["managedidentities", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/managedidentities1/README.md b/gen/managedidentities1/README.md index 943ecc7d3f..ed52f294e3 100644 --- a/gen/managedidentities1/README.md +++ b/gen/managedidentities1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-managedidentities1` library allows access to all features of the *Google Managed Service for Microsoft Active Directory Consumer API* service. -This documentation was generated from *Managed Service for Microsoft Active Directory Consumer API* crate version *5.0.2-beta-1+20221227*, where *20221227* is the exact revision of the *managedidentities:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Managed Service for Microsoft Active Directory Consumer API* crate version *5.0.2+20221227*, where *20221227* is the exact revision of the *managedidentities:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Managed Service for Microsoft Active Directory Consumer API* *v1* API can be found at the [official documentation site](https://cloud.google.com/managed-microsoft-ad/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/ManagedServiceForMicrosoftActiveDirectoryConsumerAPI) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/ManagedServiceForMicrosoftActiveDirectoryConsumerAPI) ... * projects - * [*locations get*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGetCall), [*locations global domains attach trust*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainAttachTrustCall), [*locations global domains backups create*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupCreateCall), [*locations global domains backups delete*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupDeleteCall), [*locations global domains backups get*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupGetCall), [*locations global domains backups get iam policy*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupGetIamPolicyCall), [*locations global domains backups list*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupListCall), [*locations global domains backups patch*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupPatchCall), [*locations global domains backups set iam policy*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupSetIamPolicyCall), [*locations global domains backups test iam permissions*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupTestIamPermissionCall), [*locations global domains create*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainCreateCall), [*locations global domains delete*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainDeleteCall), [*locations global domains detach trust*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainDetachTrustCall), [*locations global domains extend schema*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainExtendSchemaCall), [*locations global domains get*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainGetCall), [*locations global domains get iam policy*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainGetIamPolicyCall), [*locations global domains get ldapssettings*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainGetLdapssettingCall), [*locations global domains list*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainListCall), [*locations global domains patch*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainPatchCall), [*locations global domains reconfigure trust*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainReconfigureTrustCall), [*locations global domains reset admin password*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainResetAdminPasswordCall), [*locations global domains restore*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainRestoreCall), [*locations global domains set iam policy*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainSetIamPolicyCall), [*locations global domains sql integrations get*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainSqlIntegrationGetCall), [*locations global domains sql integrations list*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainSqlIntegrationListCall), [*locations global domains test iam permissions*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainTestIamPermissionCall), [*locations global domains update ldapssettings*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainUpdateLdapssettingCall), [*locations global domains validate trust*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainValidateTrustCall), [*locations global operations cancel*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationCancelCall), [*locations global operations delete*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationDeleteCall), [*locations global operations get*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationGetCall), [*locations global operations list*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationListCall), [*locations global peerings create*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringCreateCall), [*locations global peerings delete*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringDeleteCall), [*locations global peerings get*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringGetCall), [*locations global peerings get iam policy*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringGetIamPolicyCall), [*locations global peerings list*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringListCall), [*locations global peerings patch*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringPatchCall), [*locations global peerings set iam policy*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringSetIamPolicyCall), [*locations global peerings test iam permissions*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringTestIamPermissionCall) and [*locations list*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/api::ProjectLocationListCall) + * [*locations get*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGetCall), [*locations global domains attach trust*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainAttachTrustCall), [*locations global domains backups create*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupCreateCall), [*locations global domains backups delete*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupDeleteCall), [*locations global domains backups get*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupGetCall), [*locations global domains backups get iam policy*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupGetIamPolicyCall), [*locations global domains backups list*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupListCall), [*locations global domains backups patch*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupPatchCall), [*locations global domains backups set iam policy*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupSetIamPolicyCall), [*locations global domains backups test iam permissions*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainBackupTestIamPermissionCall), [*locations global domains create*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainCreateCall), [*locations global domains delete*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainDeleteCall), [*locations global domains detach trust*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainDetachTrustCall), [*locations global domains extend schema*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainExtendSchemaCall), [*locations global domains get*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainGetCall), [*locations global domains get iam policy*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainGetIamPolicyCall), [*locations global domains get ldapssettings*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainGetLdapssettingCall), [*locations global domains list*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainListCall), [*locations global domains patch*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainPatchCall), [*locations global domains reconfigure trust*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainReconfigureTrustCall), [*locations global domains reset admin password*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainResetAdminPasswordCall), [*locations global domains restore*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainRestoreCall), [*locations global domains set iam policy*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainSetIamPolicyCall), [*locations global domains sql integrations get*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainSqlIntegrationGetCall), [*locations global domains sql integrations list*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainSqlIntegrationListCall), [*locations global domains test iam permissions*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainTestIamPermissionCall), [*locations global domains update ldapssettings*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainUpdateLdapssettingCall), [*locations global domains validate trust*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalDomainValidateTrustCall), [*locations global operations cancel*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationCancelCall), [*locations global operations delete*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationDeleteCall), [*locations global operations get*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationGetCall), [*locations global operations list*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalOperationListCall), [*locations global peerings create*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringCreateCall), [*locations global peerings delete*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringDeleteCall), [*locations global peerings get*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringGetCall), [*locations global peerings get iam policy*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringGetIamPolicyCall), [*locations global peerings list*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringListCall), [*locations global peerings patch*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringPatchCall), [*locations global peerings set iam policy*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringSetIamPolicyCall), [*locations global peerings test iam permissions*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationGlobalPeeringTestIamPermissionCall) and [*locations list*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/api::ProjectLocationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/ManagedServiceForMicrosoftActiveDirectoryConsumerAPI)** +* **[Hub](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/ManagedServiceForMicrosoftActiveDirectoryConsumerAPI)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::CallBuilder) -* **[Resources](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::CallBuilder) +* **[Resources](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Part)** + * **[Parts](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -141,17 +141,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -161,29 +161,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Delegate) to the -[Method Builder](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Delegate) to the +[Method Builder](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::RequestValue) and -[decodable](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::RequestValue) and +[decodable](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-managedidentities1/5.0.2-beta-1+20221227/google_managedidentities1/client::RequestValue) are moved +* [request values](https://docs.rs/google-managedidentities1/5.0.2+20221227/google_managedidentities1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/managedidentities1/src/api.rs b/gen/managedidentities1/src/api.rs index 9fb1482484..0b1365f895 100644 --- a/gen/managedidentities1/src/api.rs +++ b/gen/managedidentities1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> ManagedServiceForMicrosoftActiveDirectoryConsumerAPI { ManagedServiceForMicrosoftActiveDirectoryConsumerAPI { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://managedidentities.googleapis.com/".to_string(), _root_url: "https://managedidentities.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> ManagedServiceForMicrosoftActiveDirectoryConsumerAPI { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/managedidentities1/src/client.rs b/gen/managedidentities1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/managedidentities1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/managedidentities1/src/lib.rs b/gen/managedidentities1/src/lib.rs index f795cd1f67..1117f545ed 100644 --- a/gen/managedidentities1/src/lib.rs +++ b/gen/managedidentities1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Managed Service for Microsoft Active Directory Consumer API* crate version *5.0.2-beta-1+20221227*, where *20221227* is the exact revision of the *managedidentities:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Managed Service for Microsoft Active Directory Consumer API* crate version *5.0.2+20221227*, where *20221227* is the exact revision of the *managedidentities:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Managed Service for Microsoft Active Directory Consumer API* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/managed-microsoft-ad/). diff --git a/gen/manager1_beta2-cli/Cargo.toml b/gen/manager1_beta2-cli/Cargo.toml index 015ac4a2af..fcb4e9db9f 100644 --- a/gen/manager1_beta2-cli/Cargo.toml +++ b/gen/manager1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-manager1_beta2-cli" -version = "4.0.1+20140915" +version = "5.0.2+20140915" authors = ["Sebastian Thiel "] description = "A complete library to interact with manager (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/manager1_beta2-cli" @@ -20,13 +20,13 @@ name = "manager1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-manager1_beta2] path = "../manager1_beta2" -version = "4.0.1+20140915" +version = "5.0.2+20140915" + diff --git a/gen/manager1_beta2-cli/README.md b/gen/manager1_beta2-cli/README.md index f2ba4425f7..2eceb68aaf 100644 --- a/gen/manager1_beta2-cli/README.md +++ b/gen/manager1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *manager* API at revision *20140915*. The CLI is at version *4.0.1*. +This documentation was generated from the *manager* API at revision *20140915*. The CLI is at version *5.0.2*. ```bash manager1-beta2 [options] diff --git a/gen/manager1_beta2-cli/mkdocs.yml b/gen/manager1_beta2-cli/mkdocs.yml index 5d228d328d..17c3d61167 100644 --- a/gen/manager1_beta2-cli/mkdocs.yml +++ b/gen/manager1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: manager v4.0.1+20140915 +site_name: manager v5.0.2+20140915 site_url: http://byron.github.io/google-apis-rs/google-manager1_beta2-cli site_description: A complete library to interact with manager (protocol v1beta2) @@ -7,16 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/manager1_beta2-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['deployments_delete.md', 'Deployments', 'Delete'] -- ['deployments_get.md', 'Deployments', 'Get'] -- ['deployments_insert.md', 'Deployments', 'Insert'] -- ['deployments_list.md', 'Deployments', 'List'] -- ['templates_delete.md', 'Templates', 'Delete'] -- ['templates_get.md', 'Templates', 'Get'] -- ['templates_insert.md', 'Templates', 'Insert'] -- ['templates_list.md', 'Templates', 'List'] +nav: +- Home: 'index.md' +- 'Deployments': + - 'Delete': 'deployments_delete.md' + - 'Get': 'deployments_get.md' + - 'Insert': 'deployments_insert.md' + - 'List': 'deployments_list.md' +- 'Templates': + - 'Delete': 'templates_delete.md' + - 'Get': 'templates_get.md' + - 'Insert': 'templates_insert.md' + - 'List': 'templates_list.md' theme: readthedocs diff --git a/gen/manager1_beta2-cli/src/client.rs b/gen/manager1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/manager1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/manager1_beta2-cli/src/main.rs b/gen/manager1_beta2-cli/src/main.rs index 5fda5531e8..5609976e85 100644 --- a/gen/manager1_beta2-cli/src/main.rs +++ b/gen/manager1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_manager1_beta2::{api, Error, oauth2}; +use google_manager1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -247,7 +246,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -488,7 +487,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -886,7 +885,7 @@ async fn main() { let mut app = App::new("manager1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20140915") + .version("5.0.2+20140915") .about("The Deployment Manager API allows users to declaratively configure, deploy and run complex solutions on the Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_manager1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/manager1_beta2/Cargo.toml b/gen/manager1_beta2/Cargo.toml index 9f2c2f0677..58e08166c6 100644 --- a/gen/manager1_beta2/Cargo.toml +++ b/gen/manager1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-manager1_beta2" -version = "5.0.2-beta-1+20140915" +version = "5.0.2+20140915" authors = ["Sebastian Thiel "] description = "A complete library to interact with manager (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/manager1_beta2" homepage = "https://developers.google.com/deployment-manager/" -documentation = "https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915" +documentation = "https://docs.rs/google-manager1_beta2/5.0.2+20140915" license = "MIT" keywords = ["manager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/manager1_beta2/README.md b/gen/manager1_beta2/README.md index 3b87cd95d1..6d31e645e0 100644 --- a/gen/manager1_beta2/README.md +++ b/gen/manager1_beta2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-manager1_beta2` library allows access to all features of the *Google manager* service. -This documentation was generated from *manager* crate version *5.0.2-beta-1+20140915*, where *20140915* is the exact revision of the *manager:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *manager* crate version *5.0.2+20140915*, where *20140915* is the exact revision of the *manager:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *manager* *v1_beta2* API can be found at the [official documentation site](https://developers.google.com/deployment-manager/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/Manager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/Manager) ... -* [deployments](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::Deployment) - * [*delete*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::DeploymentDeleteCall), [*get*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::DeploymentGetCall), [*insert*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::DeploymentInsertCall) and [*list*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::DeploymentListCall) -* [templates](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::Template) - * [*delete*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::TemplateDeleteCall), [*get*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::TemplateGetCall), [*insert*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::TemplateInsertCall) and [*list*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/api::TemplateListCall) +* [deployments](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::Deployment) + * [*delete*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::DeploymentDeleteCall), [*get*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::DeploymentGetCall), [*insert*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::DeploymentInsertCall) and [*list*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::DeploymentListCall) +* [templates](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::Template) + * [*delete*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::TemplateDeleteCall), [*get*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::TemplateGetCall), [*insert*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::TemplateInsertCall) and [*list*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/api::TemplateListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/Manager)** +* **[Hub](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/Manager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-manager1_beta2/5.0.2-beta-1+20140915/google_manager1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-manager1_beta2/5.0.2+20140915/google_manager1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/manager1_beta2/src/api.rs b/gen/manager1_beta2/src/api.rs index 7c826f87d1..4f48fbc548 100644 --- a/gen/manager1_beta2/src/api.rs +++ b/gen/manager1_beta2/src/api.rs @@ -145,7 +145,7 @@ impl<'a, S> Manager { Manager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/manager/v1beta2/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -159,7 +159,7 @@ impl<'a, S> Manager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/manager1_beta2/src/client.rs b/gen/manager1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/manager1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/manager1_beta2/src/lib.rs b/gen/manager1_beta2/src/lib.rs index b6a0bf7f87..36a2cb33e3 100644 --- a/gen/manager1_beta2/src/lib.rs +++ b/gen/manager1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *manager* crate version *5.0.2-beta-1+20140915*, where *20140915* is the exact revision of the *manager:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *manager* crate version *5.0.2+20140915*, where *20140915* is the exact revision of the *manager:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *manager* *v1_beta2* API can be found at the //! [official documentation site](https://developers.google.com/deployment-manager/). diff --git a/gen/manufacturers1-cli/Cargo.toml b/gen/manufacturers1-cli/Cargo.toml index 5a96ef3baf..9711874df4 100644 --- a/gen/manufacturers1-cli/Cargo.toml +++ b/gen/manufacturers1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-manufacturers1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Manufacturer Center (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/manufacturers1-cli" @@ -20,13 +20,13 @@ name = "manufacturers1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-manufacturers1] path = "../manufacturers1" -version = "4.0.1+20220303" +version = "5.0.2+20230123" + diff --git a/gen/manufacturers1-cli/README.md b/gen/manufacturers1-cli/README.md index 75e5d8d7ea..5d25211728 100644 --- a/gen/manufacturers1-cli/README.md +++ b/gen/manufacturers1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Manufacturer Center* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Manufacturer Center* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash manufacturers1 [options] diff --git a/gen/manufacturers1-cli/mkdocs.yml b/gen/manufacturers1-cli/mkdocs.yml index 1aae6af486..04f4262f15 100644 --- a/gen/manufacturers1-cli/mkdocs.yml +++ b/gen/manufacturers1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Manufacturer Center v4.0.1+20220303 +site_name: Manufacturer Center v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-manufacturers1-cli site_description: A complete library to interact with Manufacturer Center (protocol v1) @@ -7,12 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/manufacturers1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_products-delete.md', 'Accounts', 'Products Delete'] -- ['accounts_products-get.md', 'Accounts', 'Products Get'] -- ['accounts_products-list.md', 'Accounts', 'Products List'] -- ['accounts_products-update.md', 'Accounts', 'Products Update'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Products Delete': 'accounts_products-delete.md' + - 'Products Get': 'accounts_products-get.md' + - 'Products List': 'accounts_products-list.md' + - 'Products Update': 'accounts_products-update.md' theme: readthedocs diff --git a/gen/manufacturers1-cli/src/client.rs b/gen/manufacturers1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/manufacturers1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/manufacturers1-cli/src/main.rs b/gen/manufacturers1-cli/src/main.rs index b0a1ba5d9f..a85a8a1f9a 100644 --- a/gen/manufacturers1-cli/src/main.rs +++ b/gen/manufacturers1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_manufacturers1::{api, Error, oauth2}; +use google_manufacturers1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -169,7 +168,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "include" => { call = call.add_include(value.unwrap_or("")); @@ -257,6 +256,15 @@ where "flavor" => Some(("flavor", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format" => Some(("format", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gender" => Some(("gender", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "grocery.active-ingredients" => Some(("grocery.activeIngredients", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "grocery.alcohol-by-volume" => Some(("grocery.alcoholByVolume", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "grocery.allergens" => Some(("grocery.allergens", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "grocery.derived-nutrition-claim" => Some(("grocery.derivedNutritionClaim", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "grocery.directions" => Some(("grocery.directions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "grocery.indications" => Some(("grocery.indications", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "grocery.ingredients" => Some(("grocery.ingredients", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "grocery.nutrition-claim" => Some(("grocery.nutritionClaim", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "grocery.storage-instructions" => Some(("grocery.storageInstructions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "gtin" => Some(("gtin", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "image-link.image-url" => Some(("imageLink.imageUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "image-link.status" => Some(("imageLink.status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -265,6 +273,70 @@ where "item-group-id" => Some(("itemGroupId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "material" => Some(("material", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mpn" => Some(("mpn", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.added-sugars.amount" => Some(("nutrition.addedSugars.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.added-sugars.unit" => Some(("nutrition.addedSugars.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.added-sugars-daily-percentage" => Some(("nutrition.addedSugarsDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.calcium.amount" => Some(("nutrition.calcium.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.calcium.unit" => Some(("nutrition.calcium.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.calcium-daily-percentage" => Some(("nutrition.calciumDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.cholesterol.amount" => Some(("nutrition.cholesterol.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.cholesterol.unit" => Some(("nutrition.cholesterol.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.cholesterol-daily-percentage" => Some(("nutrition.cholesterolDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.dietary-fiber.amount" => Some(("nutrition.dietaryFiber.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.dietary-fiber.unit" => Some(("nutrition.dietaryFiber.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.dietary-fiber-daily-percentage" => Some(("nutrition.dietaryFiberDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.energy.amount" => Some(("nutrition.energy.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.energy.unit" => Some(("nutrition.energy.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.energy-from-fat.amount" => Some(("nutrition.energyFromFat.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.energy-from-fat.unit" => Some(("nutrition.energyFromFat.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.folate-daily-percentage" => Some(("nutrition.folateDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.folate-folic-acid.amount" => Some(("nutrition.folateFolicAcid.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.folate-folic-acid.unit" => Some(("nutrition.folateFolicAcid.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.folate-mcg-dfe" => Some(("nutrition.folateMcgDfe", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.iron.amount" => Some(("nutrition.iron.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.iron.unit" => Some(("nutrition.iron.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.iron-daily-percentage" => Some(("nutrition.ironDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.monounsaturated-fat.amount" => Some(("nutrition.monounsaturatedFat.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.monounsaturated-fat.unit" => Some(("nutrition.monounsaturatedFat.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.nutrition-fact-measure" => Some(("nutrition.nutritionFactMeasure", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.polyols.amount" => Some(("nutrition.polyols.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.polyols.unit" => Some(("nutrition.polyols.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.polyunsaturated-fat.amount" => Some(("nutrition.polyunsaturatedFat.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.polyunsaturated-fat.unit" => Some(("nutrition.polyunsaturatedFat.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.potassium.amount" => Some(("nutrition.potassium.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.potassium.unit" => Some(("nutrition.potassium.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.potassium-daily-percentage" => Some(("nutrition.potassiumDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.prepared-size-description" => Some(("nutrition.preparedSizeDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.protein.amount" => Some(("nutrition.protein.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.protein.unit" => Some(("nutrition.protein.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.protein-daily-percentage" => Some(("nutrition.proteinDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.saturated-fat.amount" => Some(("nutrition.saturatedFat.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.saturated-fat.unit" => Some(("nutrition.saturatedFat.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.saturated-fat-daily-percentage" => Some(("nutrition.saturatedFatDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.serving-size-description" => Some(("nutrition.servingSizeDescription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.serving-size-measure.amount" => Some(("nutrition.servingSizeMeasure.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.serving-size-measure.unit" => Some(("nutrition.servingSizeMeasure.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.servings-per-container" => Some(("nutrition.servingsPerContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.sodium.amount" => Some(("nutrition.sodium.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.sodium.unit" => Some(("nutrition.sodium.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.sodium-daily-percentage" => Some(("nutrition.sodiumDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.starch.amount" => Some(("nutrition.starch.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.starch.unit" => Some(("nutrition.starch.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.total-carbohydrate.amount" => Some(("nutrition.totalCarbohydrate.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.total-carbohydrate.unit" => Some(("nutrition.totalCarbohydrate.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.total-carbohydrate-daily-percentage" => Some(("nutrition.totalCarbohydrateDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.total-fat.amount" => Some(("nutrition.totalFat.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.total-fat.unit" => Some(("nutrition.totalFat.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.total-fat-daily-percentage" => Some(("nutrition.totalFatDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.total-sugars.amount" => Some(("nutrition.totalSugars.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.total-sugars.unit" => Some(("nutrition.totalSugars.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.total-sugars-daily-percentage" => Some(("nutrition.totalSugarsDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.trans-fat.amount" => Some(("nutrition.transFat.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.trans-fat.unit" => Some(("nutrition.transFat.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.trans-fat-daily-percentage" => Some(("nutrition.transFatDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.vitamin-d.amount" => Some(("nutrition.vitaminD.amount", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "nutrition.vitamin-d.unit" => Some(("nutrition.vitaminD.unit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "nutrition.vitamin-d-daily-percentage" => Some(("nutrition.vitaminDDailyPercentage", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "pattern" => Some(("pattern", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "product-highlight" => Some(("productHighlight", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "product-line" => Some(("productLine", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -284,7 +356,7 @@ where "title" => Some(("title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "video-link" => Some(("videoLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["age-group", "amount", "brand", "capacity", "color", "count", "currency", "description", "disclosure-date", "excluded-destination", "flavor", "format", "gender", "gtin", "image-link", "image-url", "included-destination", "item-group-id", "material", "mpn", "pattern", "product-highlight", "product-line", "product-name", "product-page-url", "product-type", "release-date", "rich-product-content", "scent", "size", "size-system", "size-type", "status", "suggested-retail-price", "target-client-id", "theme", "title", "type", "unit", "value", "video-link"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-ingredients", "added-sugars", "added-sugars-daily-percentage", "age-group", "alcohol-by-volume", "allergens", "amount", "brand", "calcium", "calcium-daily-percentage", "capacity", "cholesterol", "cholesterol-daily-percentage", "color", "count", "currency", "derived-nutrition-claim", "description", "dietary-fiber", "dietary-fiber-daily-percentage", "directions", "disclosure-date", "energy", "energy-from-fat", "excluded-destination", "flavor", "folate-daily-percentage", "folate-folic-acid", "folate-mcg-dfe", "format", "gender", "grocery", "gtin", "image-link", "image-url", "included-destination", "indications", "ingredients", "iron", "iron-daily-percentage", "item-group-id", "material", "monounsaturated-fat", "mpn", "nutrition", "nutrition-claim", "nutrition-fact-measure", "pattern", "polyols", "polyunsaturated-fat", "potassium", "potassium-daily-percentage", "prepared-size-description", "product-highlight", "product-line", "product-name", "product-page-url", "product-type", "protein", "protein-daily-percentage", "release-date", "rich-product-content", "saturated-fat", "saturated-fat-daily-percentage", "scent", "serving-size-description", "serving-size-measure", "servings-per-container", "size", "size-system", "size-type", "sodium", "sodium-daily-percentage", "starch", "status", "storage-instructions", "suggested-retail-price", "target-client-id", "theme", "title", "total-carbohydrate", "total-carbohydrate-daily-percentage", "total-fat", "total-fat-daily-percentage", "total-sugars", "total-sugars-daily-percentage", "trans-fat", "trans-fat-daily-percentage", "type", "unit", "value", "video-link", "vitamin-d", "vitamin-d-daily-percentage"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -561,7 +633,7 @@ async fn main() { let mut app = App::new("manufacturers1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230123") .about("Public API for managing Manufacturer Center related data.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_manufacturers1_cli") .arg(Arg::with_name("url") diff --git a/gen/manufacturers1/Cargo.toml b/gen/manufacturers1/Cargo.toml index 41dde5de0e..5f41b73e82 100644 --- a/gen/manufacturers1/Cargo.toml +++ b/gen/manufacturers1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-manufacturers1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Manufacturer Center (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/manufacturers1" homepage = "https://developers.google.com/manufacturers/" -documentation = "https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-manufacturers1/5.0.2+20230123" license = "MIT" keywords = ["manufacturers", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/manufacturers1/README.md b/gen/manufacturers1/README.md index aecf785b57..ae03052532 100644 --- a/gen/manufacturers1/README.md +++ b/gen/manufacturers1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-manufacturers1` library allows access to all features of the *Google Manufacturer Center* service. -This documentation was generated from *Manufacturer Center* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *manufacturers:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Manufacturer Center* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *manufacturers:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Manufacturer Center* *v1* API can be found at the [official documentation site](https://developers.google.com/manufacturers/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/ManufacturerCenter) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/ManufacturerCenter) ... * accounts - * [*products delete*](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/api::AccountProductDeleteCall), [*products get*](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/api::AccountProductGetCall), [*products list*](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/api::AccountProductListCall) and [*products update*](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/api::AccountProductUpdateCall) + * [*products delete*](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/api::AccountProductDeleteCall), [*products get*](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/api::AccountProductGetCall), [*products list*](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/api::AccountProductListCall) and [*products update*](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/api::AccountProductUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/ManufacturerCenter)** +* **[Hub](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/ManufacturerCenter)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::CallBuilder) -* **[Resources](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::CallBuilder) +* **[Resources](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Part)** + * **[Parts](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Delegate) to the -[Method Builder](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Delegate) to the +[Method Builder](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::RequestValue) and -[decodable](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::RequestValue) and +[decodable](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-manufacturers1/5.0.2-beta-1+20230123/google_manufacturers1/client::RequestValue) are moved +* [request values](https://docs.rs/google-manufacturers1/5.0.2+20230123/google_manufacturers1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/manufacturers1/src/api.rs b/gen/manufacturers1/src/api.rs index e31b27754e..ab947ff707 100644 --- a/gen/manufacturers1/src/api.rs +++ b/gen/manufacturers1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> ManufacturerCenter { ManufacturerCenter { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://manufacturers.googleapis.com/".to_string(), _root_url: "https://manufacturers.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> ManufacturerCenter { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/manufacturers1/src/client.rs b/gen/manufacturers1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/manufacturers1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/manufacturers1/src/lib.rs b/gen/manufacturers1/src/lib.rs index 17623e51a3..0b80395453 100644 --- a/gen/manufacturers1/src/lib.rs +++ b/gen/manufacturers1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Manufacturer Center* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *manufacturers:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Manufacturer Center* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *manufacturers:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Manufacturer Center* *v1* API can be found at the //! [official documentation site](https://developers.google.com/manufacturers/). diff --git a/gen/memcache1-cli/Cargo.toml b/gen/memcache1-cli/Cargo.toml index cb04958819..250ea94a0e 100644 --- a/gen/memcache1-cli/Cargo.toml +++ b/gen/memcache1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-memcache1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Memorystore for Memcached (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/memcache1-cli" @@ -20,13 +20,13 @@ name = "memcache1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-memcache1] path = "../memcache1" -version = "4.0.1+20220224" +version = "5.0.2+20230103" + diff --git a/gen/memcache1-cli/README.md b/gen/memcache1-cli/README.md index 89b899e76d..2653b69be3 100644 --- a/gen/memcache1-cli/README.md +++ b/gen/memcache1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Memorystore for Memcached* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Memorystore for Memcached* API at revision *20230103*. The CLI is at version *5.0.2*. ```bash memcache1 [options] @@ -37,6 +37,7 @@ memcache1 [options] locations-instances-get [-p ]... [-o ] locations-instances-list [-p ]... [-o ] locations-instances-patch (-r )... [-p ]... [-o ] + locations-instances-reschedule-maintenance (-r )... [-p ]... [-o ] locations-instances-update-parameters (-r )... [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] diff --git a/gen/memcache1-cli/mkdocs.yml b/gen/memcache1-cli/mkdocs.yml index 7d0441a45f..350e66caa8 100644 --- a/gen/memcache1-cli/mkdocs.yml +++ b/gen/memcache1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Memorystore for Memcached v4.0.1+20220224 +site_name: Cloud Memorystore for Memcached v5.0.2+20230103 site_url: http://byron.github.io/google-apis-rs/google-memcache1-cli site_description: A complete library to interact with Cloud Memorystore for Memcached (protocol v1) @@ -7,21 +7,23 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/memcache1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-apply-parameters.md', 'Projects', 'Locations Instances Apply Parameters'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-update-parameters.md', 'Projects', 'Locations Instances Update Parameters'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Apply Parameters': 'projects_locations-instances-apply-parameters.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Reschedule Maintenance': 'projects_locations-instances-reschedule-maintenance.md' + - 'Locations Instances Update Parameters': 'projects_locations-instances-update-parameters.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/memcache1-cli/src/client.rs b/gen/memcache1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/memcache1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/memcache1-cli/src/main.rs b/gen/memcache1-cli/src/main.rs index 55cc47096f..19b5819ad6 100644 --- a/gen/memcache1-cli/src/main.rs +++ b/gen/memcache1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_memcache1::{api, Error, oauth2}; +use google_memcache1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -217,6 +216,12 @@ where "discovery-endpoint" => Some(("discoveryEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "maintenance-policy.create-time" => Some(("maintenancePolicy.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.description" => Some(("maintenancePolicy.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.update-time" => Some(("maintenancePolicy.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.end-time" => Some(("maintenanceSchedule.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.schedule-deadline-time" => Some(("maintenanceSchedule.scheduleDeadlineTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.start-time" => Some(("maintenanceSchedule.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-full-version" => Some(("memcacheFullVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-version" => Some(("memcacheVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -229,7 +234,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zones" => Some(("zones", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "discovery-endpoint", "display-name", "id", "labels", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "state", "update-time", "zones"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "description", "discovery-endpoint", "display-name", "end-time", "id", "labels", "maintenance-policy", "maintenance-schedule", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "schedule-deadline-time", "start-time", "state", "update-time", "zones"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -407,7 +412,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -490,6 +495,12 @@ where "discovery-endpoint" => Some(("discoveryEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "maintenance-policy.create-time" => Some(("maintenancePolicy.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.description" => Some(("maintenancePolicy.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.update-time" => Some(("maintenancePolicy.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.end-time" => Some(("maintenanceSchedule.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.schedule-deadline-time" => Some(("maintenanceSchedule.scheduleDeadlineTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.start-time" => Some(("maintenanceSchedule.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-full-version" => Some(("memcacheFullVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-version" => Some(("memcacheVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -502,7 +513,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zones" => Some(("zones", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "discovery-endpoint", "display-name", "id", "labels", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "state", "update-time", "zones"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "description", "discovery-endpoint", "display-name", "end-time", "id", "labels", "maintenance-policy", "maintenance-schedule", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "schedule-deadline-time", "start-time", "state", "update-time", "zones"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -517,7 +528,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -566,6 +577,92 @@ where } } + async fn _projects_locations_instances_reschedule_maintenance(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "reschedule-type" => Some(("rescheduleType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schedule-time" => Some(("scheduleTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["reschedule-type", "schedule-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RescheduleMaintenanceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_reschedule_maintenance(request, opt.value_of("instance").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_instances_update_parameters(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -663,7 +760,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -913,7 +1010,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -993,6 +1090,9 @@ where ("locations-instances-patch", Some(opt)) => { call_result = self._projects_locations_instances_patch(opt, dry_run, &mut err).await; }, + ("locations-instances-reschedule-maintenance", Some(opt)) => { + call_result = self._projects_locations_instances_reschedule_maintenance(opt, dry_run, &mut err).await; + }, ("locations-instances-update-parameters", Some(opt)) => { call_result = self._projects_locations_instances_update_parameters(opt, dry_run, &mut err).await; }, @@ -1090,7 +1190,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-instances-apply-parameters', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-update-parameters', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-get', 'locations-instances-apply-parameters', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-reschedule-maintenance', 'locations-instances-update-parameters', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_memcache1_cli/projects_locations-get", @@ -1257,6 +1357,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-reschedule-maintenance", + Some(r##"Reschedules upcoming maintenance event."##), + "Details at http://byron.github.io/google-apis-rs/google_memcache1_cli/projects_locations-instances-reschedule-maintenance", + vec![ + (Some(r##"instance"##), + None, + Some(r##"Required. Memcache instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1413,7 +1541,7 @@ async fn main() { let mut app = App::new("memcache1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230103") .about("Google Cloud Memorystore for Memcached API is used for creating and managing Memcached instances in GCP.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_memcache1_cli") .arg(Arg::with_name("url") diff --git a/gen/memcache1/Cargo.toml b/gen/memcache1/Cargo.toml index 73289f95a7..95c03649d2 100644 --- a/gen/memcache1/Cargo.toml +++ b/gen/memcache1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-memcache1" -version = "5.0.2-beta-1+20230103" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Memorystore for Memcached (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/memcache1" homepage = "https://cloud.google.com/memorystore/" -documentation = "https://docs.rs/google-memcache1/5.0.2-beta-1+20230103" +documentation = "https://docs.rs/google-memcache1/5.0.2+20230103" license = "MIT" keywords = ["memcache", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/memcache1/README.md b/gen/memcache1/README.md index 8f2f7f6407..0452368641 100644 --- a/gen/memcache1/README.md +++ b/gen/memcache1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-memcache1` library allows access to all features of the *Google Cloud Memorystore for Memcached* service. -This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *memcache:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *memcache:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Memorystore for Memcached* *v1* API can be found at the [official documentation site](https://cloud.google.com/memorystore/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/CloudMemorystoreForMemcached) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/CloudMemorystoreForMemcached) ... * projects - * [*locations get*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationGetCall), [*locations instances apply parameters*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstanceApplyParameterCall), [*locations instances create*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstancePatchCall), [*locations instances reschedule maintenance*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstanceRescheduleMaintenanceCall), [*locations instances update parameters*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationInstanceUpdateParameterCall), [*locations list*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/api::ProjectLocationOperationListCall) + * [*locations get*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationGetCall), [*locations instances apply parameters*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstanceApplyParameterCall), [*locations instances create*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstancePatchCall), [*locations instances reschedule maintenance*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstanceRescheduleMaintenanceCall), [*locations instances update parameters*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationInstanceUpdateParameterCall), [*locations list*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/CloudMemorystoreForMemcached)** +* **[Hub](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/CloudMemorystoreForMemcached)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::CallBuilder) -* **[Resources](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::CallBuilder) +* **[Resources](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Part)** + * **[Parts](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Delegate) to the -[Method Builder](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Delegate) to the +[Method Builder](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::RequestValue) and -[decodable](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::RequestValue) and +[decodable](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-memcache1/5.0.2-beta-1+20230103/google_memcache1/client::RequestValue) are moved +* [request values](https://docs.rs/google-memcache1/5.0.2+20230103/google_memcache1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/memcache1/src/api.rs b/gen/memcache1/src/api.rs index a037d88458..d249a51cae 100644 --- a/gen/memcache1/src/api.rs +++ b/gen/memcache1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudMemorystoreForMemcached { CloudMemorystoreForMemcached { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://memcache.googleapis.com/".to_string(), _root_url: "https://memcache.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudMemorystoreForMemcached { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/memcache1/src/client.rs b/gen/memcache1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/memcache1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/memcache1/src/lib.rs b/gen/memcache1/src/lib.rs index 8daea3f7d2..db0ab2eb9a 100644 --- a/gen/memcache1/src/lib.rs +++ b/gen/memcache1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *memcache:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *memcache:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Memorystore for Memcached* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/memorystore/). diff --git a/gen/memcache1_beta2-cli/Cargo.toml b/gen/memcache1_beta2-cli/Cargo.toml index 6539d8c4c3..893209e0d4 100644 --- a/gen/memcache1_beta2-cli/Cargo.toml +++ b/gen/memcache1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-memcache1_beta2-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Memorystore for Memcached (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/memcache1_beta2-cli" @@ -20,13 +20,13 @@ name = "memcache1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-memcache1_beta2] path = "../memcache1_beta2" -version = "4.0.1+20220224" +version = "5.0.2+20230103" + diff --git a/gen/memcache1_beta2-cli/README.md b/gen/memcache1_beta2-cli/README.md index 2741013f84..d1659e5238 100644 --- a/gen/memcache1_beta2-cli/README.md +++ b/gen/memcache1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Memorystore for Memcached* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Memorystore for Memcached* API at revision *20230103*. The CLI is at version *5.0.2*. ```bash memcache1-beta2 [options] @@ -38,6 +38,7 @@ memcache1-beta2 [options] locations-instances-get [-p ]... [-o ] locations-instances-list [-p ]... [-o ] locations-instances-patch (-r )... [-p ]... [-o ] + locations-instances-reschedule-maintenance (-r )... [-p ]... [-o ] locations-instances-update-parameters (-r )... [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] diff --git a/gen/memcache1_beta2-cli/mkdocs.yml b/gen/memcache1_beta2-cli/mkdocs.yml index 8779651af6..09d45a4b37 100644 --- a/gen/memcache1_beta2-cli/mkdocs.yml +++ b/gen/memcache1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Memorystore for Memcached v4.0.1+20220224 +site_name: Cloud Memorystore for Memcached v5.0.2+20230103 site_url: http://byron.github.io/google-apis-rs/google-memcache1_beta2-cli site_description: A complete library to interact with Cloud Memorystore for Memcached (protocol v1beta2) @@ -7,22 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/memcache1_beta2- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-apply-parameters.md', 'Projects', 'Locations Instances Apply Parameters'] -- ['projects_locations-instances-apply-software-update.md', 'Projects', 'Locations Instances Apply Software Update'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-update-parameters.md', 'Projects', 'Locations Instances Update Parameters'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Apply Parameters': 'projects_locations-instances-apply-parameters.md' + - 'Locations Instances Apply Software Update': 'projects_locations-instances-apply-software-update.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Reschedule Maintenance': 'projects_locations-instances-reschedule-maintenance.md' + - 'Locations Instances Update Parameters': 'projects_locations-instances-update-parameters.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/memcache1_beta2-cli/src/client.rs b/gen/memcache1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/memcache1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/memcache1_beta2-cli/src/main.rs b/gen/memcache1_beta2-cli/src/main.rs index bd5f065a66..6bf536b5ec 100644 --- a/gen/memcache1_beta2-cli/src/main.rs +++ b/gen/memcache1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_memcache1_beta2::{api, Error, oauth2}; +use google_memcache1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -303,6 +302,12 @@ where "discovery-endpoint" => Some(("discoveryEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "maintenance-policy.create-time" => Some(("maintenancePolicy.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.description" => Some(("maintenancePolicy.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.update-time" => Some(("maintenancePolicy.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.end-time" => Some(("maintenanceSchedule.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.schedule-deadline-time" => Some(("maintenanceSchedule.scheduleDeadlineTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.start-time" => Some(("maintenanceSchedule.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-full-version" => Some(("memcacheFullVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-version" => Some(("memcacheVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -316,7 +321,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zones" => Some(("zones", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "discovery-endpoint", "display-name", "id", "labels", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "state", "update-available", "update-time", "zones"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "description", "discovery-endpoint", "display-name", "end-time", "id", "labels", "maintenance-policy", "maintenance-schedule", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "schedule-deadline-time", "start-time", "state", "update-available", "update-time", "zones"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -494,7 +499,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -577,6 +582,12 @@ where "discovery-endpoint" => Some(("discoveryEndpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "maintenance-policy.create-time" => Some(("maintenancePolicy.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.description" => Some(("maintenancePolicy.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-policy.update-time" => Some(("maintenancePolicy.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.end-time" => Some(("maintenanceSchedule.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.schedule-deadline-time" => Some(("maintenanceSchedule.scheduleDeadlineTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "maintenance-schedule.start-time" => Some(("maintenanceSchedule.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-full-version" => Some(("memcacheFullVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "memcache-version" => Some(("memcacheVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -590,7 +601,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "zones" => Some(("zones", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "discovery-endpoint", "display-name", "id", "labels", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "state", "update-available", "update-time", "zones"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["authorized-network", "cpu-count", "create-time", "description", "discovery-endpoint", "display-name", "end-time", "id", "labels", "maintenance-policy", "maintenance-schedule", "memcache-full-version", "memcache-version", "memory-size-mb", "name", "node-config", "node-count", "parameters", "params", "schedule-deadline-time", "start-time", "state", "update-available", "update-time", "zones"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -605,7 +616,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -654,6 +665,92 @@ where } } + async fn _projects_locations_instances_reschedule_maintenance(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "reschedule-type" => Some(("rescheduleType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schedule-time" => Some(("scheduleTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["reschedule-type", "schedule-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RescheduleMaintenanceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_reschedule_maintenance(request, opt.value_of("instance").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_instances_update_parameters(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -751,7 +848,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1001,7 +1098,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1084,6 +1181,9 @@ where ("locations-instances-patch", Some(opt)) => { call_result = self._projects_locations_instances_patch(opt, dry_run, &mut err).await; }, + ("locations-instances-reschedule-maintenance", Some(opt)) => { + call_result = self._projects_locations_instances_reschedule_maintenance(opt, dry_run, &mut err).await; + }, ("locations-instances-update-parameters", Some(opt)) => { call_result = self._projects_locations_instances_update_parameters(opt, dry_run, &mut err).await; }, @@ -1181,7 +1281,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-instances-apply-parameters', 'locations-instances-apply-software-update', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-update-parameters', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-get', 'locations-instances-apply-parameters', 'locations-instances-apply-software-update', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-list', 'locations-instances-patch', 'locations-instances-reschedule-maintenance', 'locations-instances-update-parameters', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_memcache1_beta2_cli/projects_locations-get", @@ -1376,6 +1476,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-reschedule-maintenance", + Some(r##"Performs the apply phase of the RescheduleMaintenance verb."##), + "Details at http://byron.github.io/google-apis-rs/google_memcache1_beta2_cli/projects_locations-instances-reschedule-maintenance", + vec![ + (Some(r##"instance"##), + None, + Some(r##"Required. Memcache instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1532,7 +1660,7 @@ async fn main() { let mut app = App::new("memcache1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230103") .about("Google Cloud Memorystore for Memcached API is used for creating and managing Memcached instances in GCP.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_memcache1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/memcache1_beta2/Cargo.toml b/gen/memcache1_beta2/Cargo.toml index 8703098c25..3293492a1f 100644 --- a/gen/memcache1_beta2/Cargo.toml +++ b/gen/memcache1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-memcache1_beta2" -version = "5.0.2-beta-1+20230103" +version = "5.0.2+20230103" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Memorystore for Memcached (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/memcache1_beta2" homepage = "https://cloud.google.com/memorystore/" -documentation = "https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103" +documentation = "https://docs.rs/google-memcache1_beta2/5.0.2+20230103" license = "MIT" keywords = ["memcache", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/memcache1_beta2/README.md b/gen/memcache1_beta2/README.md index 886955e81a..f316cdb4f3 100644 --- a/gen/memcache1_beta2/README.md +++ b/gen/memcache1_beta2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-memcache1_beta2` library allows access to all features of the *Google Cloud Memorystore for Memcached* service. -This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *memcache:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *memcache:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Memorystore for Memcached* *v1_beta2* API can be found at the [official documentation site](https://cloud.google.com/memorystore/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/CloudMemorystoreForMemcached) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/CloudMemorystoreForMemcached) ... * projects - * [*locations get*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationGetCall), [*locations instances apply parameters*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceApplyParameterCall), [*locations instances apply software update*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceApplySoftwareUpdateCall), [*locations instances create*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstancePatchCall), [*locations instances reschedule maintenance*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceRescheduleMaintenanceCall), [*locations instances update parameters*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationInstanceUpdateParameterCall), [*locations list*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/api::ProjectLocationOperationListCall) + * [*locations get*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationGetCall), [*locations instances apply parameters*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceApplyParameterCall), [*locations instances apply software update*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceApplySoftwareUpdateCall), [*locations instances create*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceDeleteCall), [*locations instances get*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceGetCall), [*locations instances list*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstancePatchCall), [*locations instances reschedule maintenance*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceRescheduleMaintenanceCall), [*locations instances update parameters*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationInstanceUpdateParameterCall), [*locations list*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/CloudMemorystoreForMemcached)** +* **[Hub](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/CloudMemorystoreForMemcached)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-memcache1_beta2/5.0.2-beta-1+20230103/google_memcache1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-memcache1_beta2/5.0.2+20230103/google_memcache1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/memcache1_beta2/src/api.rs b/gen/memcache1_beta2/src/api.rs index a6fe0b4bb4..72a9ef50c3 100644 --- a/gen/memcache1_beta2/src/api.rs +++ b/gen/memcache1_beta2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudMemorystoreForMemcached { CloudMemorystoreForMemcached { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://memcache.googleapis.com/".to_string(), _root_url: "https://memcache.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudMemorystoreForMemcached { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/memcache1_beta2/src/client.rs b/gen/memcache1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/memcache1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/memcache1_beta2/src/lib.rs b/gen/memcache1_beta2/src/lib.rs index fe11df2b8c..09ee569e6a 100644 --- a/gen/memcache1_beta2/src/lib.rs +++ b/gen/memcache1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2-beta-1+20230103*, where *20230103* is the exact revision of the *memcache:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Memorystore for Memcached* crate version *5.0.2+20230103*, where *20230103* is the exact revision of the *memcache:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Memorystore for Memcached* *v1_beta2* API can be found at the //! [official documentation site](https://cloud.google.com/memorystore/). diff --git a/gen/metastore1_beta-cli/Cargo.toml b/gen/metastore1_beta-cli/Cargo.toml index 487f9270c4..55b5809e90 100644 --- a/gen/metastore1_beta-cli/Cargo.toml +++ b/gen/metastore1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-metastore1_beta-cli" -version = "4.0.1+20220222" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dataproc Metastore (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/metastore1_beta-cli" @@ -20,13 +20,13 @@ name = "metastore1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-metastore1_beta] path = "../metastore1_beta" -version = "4.0.1+20220222" +version = "5.0.2+20230111" + diff --git a/gen/metastore1_beta-cli/README.md b/gen/metastore1_beta-cli/README.md index 0298b4d998..4cafb27b4c 100644 --- a/gen/metastore1_beta-cli/README.md +++ b/gen/metastore1_beta-cli/README.md @@ -25,16 +25,25 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Dataproc Metastore* API at revision *20220222*. The CLI is at version *4.0.1*. +This documentation was generated from the *Dataproc Metastore* API at revision *20230111*. The CLI is at version *5.0.2*. ```bash metastore1-beta [options] projects + locations-federations-create (-r )... [-p ]... [-o ] + locations-federations-delete [-p ]... [-o ] + locations-federations-get [-p ]... [-o ] + locations-federations-get-iam-policy [-p ]... [-o ] + locations-federations-list [-p ]... [-o ] + locations-federations-patch (-r )... [-p ]... [-o ] + locations-federations-set-iam-policy (-r )... [-p ]... [-o ] + locations-federations-test-iam-permissions (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] + locations-services-alter-location (-r )... [-p ]... [-o ] locations-services-backups-create (-r )... [-p ]... [-o ] locations-services-backups-delete [-p ]... [-o ] locations-services-backups-get [-p ]... [-o ] @@ -58,7 +67,9 @@ metastore1-beta [options] locations-services-metadata-imports-get [-p ]... [-o ] locations-services-metadata-imports-list [-p ]... [-o ] locations-services-metadata-imports-patch (-r )... [-p ]... [-o ] + locations-services-move-table-to-database (-r )... [-p ]... [-o ] locations-services-patch (-r )... [-p ]... [-o ] + locations-services-query-metadata (-r )... [-p ]... [-o ] locations-services-remove-iam-policy (-r )... [-p ]... [-o ] locations-services-restore (-r )... [-p ]... [-o ] locations-services-set-iam-policy (-r )... [-p ]... [-o ] diff --git a/gen/metastore1_beta-cli/mkdocs.yml b/gen/metastore1_beta-cli/mkdocs.yml index 5bca0319f4..96fe9e54df 100644 --- a/gen/metastore1_beta-cli/mkdocs.yml +++ b/gen/metastore1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Dataproc Metastore v4.0.1+20220222 +site_name: Dataproc Metastore v5.0.2+20230111 site_url: http://byron.github.io/google-apis-rs/google-metastore1_beta-cli site_description: A complete library to interact with Dataproc Metastore (protocol v1beta) @@ -7,41 +7,53 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/metastore1_beta- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-services-backups-create.md', 'Projects', 'Locations Services Backups Create'] -- ['projects_locations-services-backups-delete.md', 'Projects', 'Locations Services Backups Delete'] -- ['projects_locations-services-backups-get.md', 'Projects', 'Locations Services Backups Get'] -- ['projects_locations-services-backups-get-iam-policy.md', 'Projects', 'Locations Services Backups Get Iam Policy'] -- ['projects_locations-services-backups-list.md', 'Projects', 'Locations Services Backups List'] -- ['projects_locations-services-backups-set-iam-policy.md', 'Projects', 'Locations Services Backups Set Iam Policy'] -- ['projects_locations-services-backups-test-iam-permissions.md', 'Projects', 'Locations Services Backups Test Iam Permissions'] -- ['projects_locations-services-create.md', 'Projects', 'Locations Services Create'] -- ['projects_locations-services-databases-get-iam-policy.md', 'Projects', 'Locations Services Databases Get Iam Policy'] -- ['projects_locations-services-databases-set-iam-policy.md', 'Projects', 'Locations Services Databases Set Iam Policy'] -- ['projects_locations-services-databases-tables-get-iam-policy.md', 'Projects', 'Locations Services Databases Tables Get Iam Policy'] -- ['projects_locations-services-databases-tables-set-iam-policy.md', 'Projects', 'Locations Services Databases Tables Set Iam Policy'] -- ['projects_locations-services-databases-tables-test-iam-permissions.md', 'Projects', 'Locations Services Databases Tables Test Iam Permissions'] -- ['projects_locations-services-databases-test-iam-permissions.md', 'Projects', 'Locations Services Databases Test Iam Permissions'] -- ['projects_locations-services-delete.md', 'Projects', 'Locations Services Delete'] -- ['projects_locations-services-export-metadata.md', 'Projects', 'Locations Services Export Metadata'] -- ['projects_locations-services-get.md', 'Projects', 'Locations Services Get'] -- ['projects_locations-services-get-iam-policy.md', 'Projects', 'Locations Services Get Iam Policy'] -- ['projects_locations-services-list.md', 'Projects', 'Locations Services List'] -- ['projects_locations-services-metadata-imports-create.md', 'Projects', 'Locations Services Metadata Imports Create'] -- ['projects_locations-services-metadata-imports-get.md', 'Projects', 'Locations Services Metadata Imports Get'] -- ['projects_locations-services-metadata-imports-list.md', 'Projects', 'Locations Services Metadata Imports List'] -- ['projects_locations-services-metadata-imports-patch.md', 'Projects', 'Locations Services Metadata Imports Patch'] -- ['projects_locations-services-patch.md', 'Projects', 'Locations Services Patch'] -- ['projects_locations-services-remove-iam-policy.md', 'Projects', 'Locations Services Remove Iam Policy'] -- ['projects_locations-services-restore.md', 'Projects', 'Locations Services Restore'] -- ['projects_locations-services-set-iam-policy.md', 'Projects', 'Locations Services Set Iam Policy'] -- ['projects_locations-services-test-iam-permissions.md', 'Projects', 'Locations Services Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Federations Create': 'projects_locations-federations-create.md' + - 'Locations Federations Delete': 'projects_locations-federations-delete.md' + - 'Locations Federations Get': 'projects_locations-federations-get.md' + - 'Locations Federations Get Iam Policy': 'projects_locations-federations-get-iam-policy.md' + - 'Locations Federations List': 'projects_locations-federations-list.md' + - 'Locations Federations Patch': 'projects_locations-federations-patch.md' + - 'Locations Federations Set Iam Policy': 'projects_locations-federations-set-iam-policy.md' + - 'Locations Federations Test Iam Permissions': 'projects_locations-federations-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Services Alter Location': 'projects_locations-services-alter-location.md' + - 'Locations Services Backups Create': 'projects_locations-services-backups-create.md' + - 'Locations Services Backups Delete': 'projects_locations-services-backups-delete.md' + - 'Locations Services Backups Get': 'projects_locations-services-backups-get.md' + - 'Locations Services Backups Get Iam Policy': 'projects_locations-services-backups-get-iam-policy.md' + - 'Locations Services Backups List': 'projects_locations-services-backups-list.md' + - 'Locations Services Backups Set Iam Policy': 'projects_locations-services-backups-set-iam-policy.md' + - 'Locations Services Backups Test Iam Permissions': 'projects_locations-services-backups-test-iam-permissions.md' + - 'Locations Services Create': 'projects_locations-services-create.md' + - 'Locations Services Databases Get Iam Policy': 'projects_locations-services-databases-get-iam-policy.md' + - 'Locations Services Databases Set Iam Policy': 'projects_locations-services-databases-set-iam-policy.md' + - 'Locations Services Databases Tables Get Iam Policy': 'projects_locations-services-databases-tables-get-iam-policy.md' + - 'Locations Services Databases Tables Set Iam Policy': 'projects_locations-services-databases-tables-set-iam-policy.md' + - 'Locations Services Databases Tables Test Iam Permissions': 'projects_locations-services-databases-tables-test-iam-permissions.md' + - 'Locations Services Databases Test Iam Permissions': 'projects_locations-services-databases-test-iam-permissions.md' + - 'Locations Services Delete': 'projects_locations-services-delete.md' + - 'Locations Services Export Metadata': 'projects_locations-services-export-metadata.md' + - 'Locations Services Get': 'projects_locations-services-get.md' + - 'Locations Services Get Iam Policy': 'projects_locations-services-get-iam-policy.md' + - 'Locations Services List': 'projects_locations-services-list.md' + - 'Locations Services Metadata Imports Create': 'projects_locations-services-metadata-imports-create.md' + - 'Locations Services Metadata Imports Get': 'projects_locations-services-metadata-imports-get.md' + - 'Locations Services Metadata Imports List': 'projects_locations-services-metadata-imports-list.md' + - 'Locations Services Metadata Imports Patch': 'projects_locations-services-metadata-imports-patch.md' + - 'Locations Services Move Table To Database': 'projects_locations-services-move-table-to-database.md' + - 'Locations Services Patch': 'projects_locations-services-patch.md' + - 'Locations Services Query Metadata': 'projects_locations-services-query-metadata.md' + - 'Locations Services Remove Iam Policy': 'projects_locations-services-remove-iam-policy.md' + - 'Locations Services Restore': 'projects_locations-services-restore.md' + - 'Locations Services Set Iam Policy': 'projects_locations-services-set-iam-policy.md' + - 'Locations Services Test Iam Permissions': 'projects_locations-services-test-iam-permissions.md' theme: readthedocs diff --git a/gen/metastore1_beta-cli/src/client.rs b/gen/metastore1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/metastore1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/metastore1_beta-cli/src/main.rs b/gen/metastore1_beta-cli/src/main.rs index c3675b256e..dece831dee 100644 --- a/gen/metastore1_beta-cli/src/main.rs +++ b/gen/metastore1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_metastore1_beta::{api, Error, oauth2}; +use google_metastore1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,607 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_federations_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "endpoint-uri" => Some(("endpointUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state-message" => Some(("stateMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "endpoint-uri", "labels", "name", "state", "state-message", "uid", "update-time", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Federation = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_federations_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + "federation-id" => { + call = call.federation_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["federation-id", "request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_federations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_federations_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_federations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_federations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_federations_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_federations_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_federations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_federations_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_federations_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "endpoint-uri" => Some(("endpointUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state-message" => Some(("stateMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "endpoint-uri", "labels", "name", "state", "state-message", "uid", "update-time", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Federation = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_federations_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_federations_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_federations_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_federations_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_federations_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or("")); @@ -113,7 +713,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -279,7 +879,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -331,6 +931,92 @@ where } } + async fn _projects_locations_services_alter_location(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "location-uri" => Some(("locationUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["location-uri", "resource-name"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::AlterMetadataResourceLocationRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_services_alter_location(request, opt.value_of("service").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_services_backups_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -380,12 +1066,13 @@ where "service-revision.release-channel" => Some(("serviceRevision.releaseChannel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-revision.state" => Some(("serviceRevision.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-revision.state-message" => Some(("serviceRevision.stateMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-revision.telemetry-config.log-format" => Some(("serviceRevision.telemetryConfig.logFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-revision.tier" => Some(("serviceRevision.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-revision.uid" => Some(("serviceRevision.uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "service-revision.update-time" => Some(("serviceRevision.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["artifact-gcs-uri", "cloud-secret", "config-overrides", "create-time", "data-catalog-config", "database-type", "day-of-week", "description", "enabled", "encryption-config", "end-time", "endpoint-protocol", "endpoint-uri", "hive-metastore-config", "hour-of-day", "kerberos-config", "keytab", "kms-key", "krb5-config-gcs-uri", "labels", "maintenance-window", "metadata-integration", "name", "network", "port", "principal", "release-channel", "restoring-services", "service-revision", "state", "state-message", "tier", "uid", "update-time", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["artifact-gcs-uri", "cloud-secret", "config-overrides", "create-time", "data-catalog-config", "database-type", "day-of-week", "description", "enabled", "encryption-config", "end-time", "endpoint-protocol", "endpoint-uri", "hive-metastore-config", "hour-of-day", "kerberos-config", "keytab", "kms-key", "krb5-config-gcs-uri", "labels", "log-format", "maintenance-window", "metadata-integration", "name", "network", "port", "principal", "release-channel", "restoring-services", "service-revision", "state", "state-message", "telemetry-config", "tier", "uid", "update-time", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -567,7 +1254,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -626,7 +1313,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -897,11 +1584,12 @@ where "release-channel" => Some(("releaseChannel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state-message" => Some(("stateMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "telemetry-config.log-format" => Some(("telemetryConfig.logFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["artifact-gcs-uri", "cloud-secret", "config-overrides", "create-time", "data-catalog-config", "database-type", "day-of-week", "enabled", "encryption-config", "endpoint-protocol", "endpoint-uri", "hive-metastore-config", "hour-of-day", "kerberos-config", "keytab", "kms-key", "krb5-config-gcs-uri", "labels", "maintenance-window", "metadata-integration", "name", "network", "port", "principal", "release-channel", "state", "state-message", "tier", "uid", "update-time", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["artifact-gcs-uri", "cloud-secret", "config-overrides", "create-time", "data-catalog-config", "database-type", "day-of-week", "enabled", "encryption-config", "endpoint-protocol", "endpoint-uri", "hive-metastore-config", "hour-of-day", "kerberos-config", "keytab", "kms-key", "krb5-config-gcs-uri", "labels", "log-format", "maintenance-window", "metadata-integration", "name", "network", "port", "principal", "release-channel", "state", "state-message", "telemetry-config", "tier", "uid", "update-time", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -975,7 +1663,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1118,7 +1806,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1626,7 +2314,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1685,7 +2373,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1903,7 +2591,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2007,7 +2695,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2059,6 +2747,93 @@ where } } + async fn _projects_locations_services_move_table_to_database(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "db-name" => Some(("dbName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination-db-name" => Some(("destinationDbName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "table-name" => Some(("tableName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["db-name", "destination-db-name", "table-name"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::MoveTableToDatabaseRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_services_move_table_to_database(request, opt.value_of("service").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_services_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2103,11 +2878,12 @@ where "release-channel" => Some(("releaseChannel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state-message" => Some(("stateMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "telemetry-config.log-format" => Some(("telemetryConfig.logFormat", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["artifact-gcs-uri", "cloud-secret", "config-overrides", "create-time", "data-catalog-config", "database-type", "day-of-week", "enabled", "encryption-config", "endpoint-protocol", "endpoint-uri", "hive-metastore-config", "hour-of-day", "kerberos-config", "keytab", "kms-key", "krb5-config-gcs-uri", "labels", "maintenance-window", "metadata-integration", "name", "network", "port", "principal", "release-channel", "state", "state-message", "tier", "uid", "update-time", "version"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["artifact-gcs-uri", "cloud-secret", "config-overrides", "create-time", "data-catalog-config", "database-type", "day-of-week", "enabled", "encryption-config", "endpoint-protocol", "endpoint-uri", "hive-metastore-config", "hour-of-day", "kerberos-config", "keytab", "kms-key", "krb5-config-gcs-uri", "labels", "log-format", "maintenance-window", "metadata-integration", "name", "network", "port", "principal", "release-channel", "state", "state-message", "telemetry-config", "tier", "uid", "update-time", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2122,7 +2898,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2174,6 +2950,91 @@ where } } + async fn _projects_locations_services_query_metadata(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "query" => Some(("query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["query"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::QueryMetadataRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_services_query_metadata(request, opt.value_of("service").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_services_remove_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2197,8 +3058,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "asynchronous" => Some(("asynchronous", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["asynchronous"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2524,6 +3386,30 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-federations-create", Some(opt)) => { + call_result = self._projects_locations_federations_create(opt, dry_run, &mut err).await; + }, + ("locations-federations-delete", Some(opt)) => { + call_result = self._projects_locations_federations_delete(opt, dry_run, &mut err).await; + }, + ("locations-federations-get", Some(opt)) => { + call_result = self._projects_locations_federations_get(opt, dry_run, &mut err).await; + }, + ("locations-federations-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_federations_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-federations-list", Some(opt)) => { + call_result = self._projects_locations_federations_list(opt, dry_run, &mut err).await; + }, + ("locations-federations-patch", Some(opt)) => { + call_result = self._projects_locations_federations_patch(opt, dry_run, &mut err).await; + }, + ("locations-federations-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_federations_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-federations-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_federations_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-get", Some(opt)) => { call_result = self._projects_locations_get(opt, dry_run, &mut err).await; }, @@ -2539,6 +3425,9 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, + ("locations-services-alter-location", Some(opt)) => { + call_result = self._projects_locations_services_alter_location(opt, dry_run, &mut err).await; + }, ("locations-services-backups-create", Some(opt)) => { call_result = self._projects_locations_services_backups_create(opt, dry_run, &mut err).await; }, @@ -2608,9 +3497,15 @@ where ("locations-services-metadata-imports-patch", Some(opt)) => { call_result = self._projects_locations_services_metadata_imports_patch(opt, dry_run, &mut err).await; }, + ("locations-services-move-table-to-database", Some(opt)) => { + call_result = self._projects_locations_services_move_table_to_database(opt, dry_run, &mut err).await; + }, ("locations-services-patch", Some(opt)) => { call_result = self._projects_locations_services_patch(opt, dry_run, &mut err).await; }, + ("locations-services-query-metadata", Some(opt)) => { + call_result = self._projects_locations_services_query_metadata(opt, dry_run, &mut err).await; + }, ("locations-services-remove-iam-policy", Some(opt)) => { call_result = self._projects_locations_services_remove_iam_policy(opt, dry_run, &mut err).await; }, @@ -2702,7 +3597,207 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-list', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-services-backups-create', 'locations-services-backups-delete', 'locations-services-backups-get', 'locations-services-backups-get-iam-policy', 'locations-services-backups-list', 'locations-services-backups-set-iam-policy', 'locations-services-backups-test-iam-permissions', 'locations-services-create', 'locations-services-databases-get-iam-policy', 'locations-services-databases-set-iam-policy', 'locations-services-databases-tables-get-iam-policy', 'locations-services-databases-tables-set-iam-policy', 'locations-services-databases-tables-test-iam-permissions', 'locations-services-databases-test-iam-permissions', 'locations-services-delete', 'locations-services-export-metadata', 'locations-services-get', 'locations-services-get-iam-policy', 'locations-services-list', 'locations-services-metadata-imports-create', 'locations-services-metadata-imports-get', 'locations-services-metadata-imports-list', 'locations-services-metadata-imports-patch', 'locations-services-patch', 'locations-services-remove-iam-policy', 'locations-services-restore', 'locations-services-set-iam-policy' and 'locations-services-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-federations-create', 'locations-federations-delete', 'locations-federations-get', 'locations-federations-get-iam-policy', 'locations-federations-list', 'locations-federations-patch', 'locations-federations-set-iam-policy', 'locations-federations-test-iam-permissions', 'locations-get', 'locations-list', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-services-alter-location', 'locations-services-backups-create', 'locations-services-backups-delete', 'locations-services-backups-get', 'locations-services-backups-get-iam-policy', 'locations-services-backups-list', 'locations-services-backups-set-iam-policy', 'locations-services-backups-test-iam-permissions', 'locations-services-create', 'locations-services-databases-get-iam-policy', 'locations-services-databases-set-iam-policy', 'locations-services-databases-tables-get-iam-policy', 'locations-services-databases-tables-set-iam-policy', 'locations-services-databases-tables-test-iam-permissions', 'locations-services-databases-test-iam-permissions', 'locations-services-delete', 'locations-services-export-metadata', 'locations-services-get', 'locations-services-get-iam-policy', 'locations-services-list', 'locations-services-metadata-imports-create', 'locations-services-metadata-imports-get', 'locations-services-metadata-imports-list', 'locations-services-metadata-imports-patch', 'locations-services-move-table-to-database', 'locations-services-patch', 'locations-services-query-metadata', 'locations-services-remove-iam-policy', 'locations-services-restore', 'locations-services-set-iam-policy' and 'locations-services-test-iam-permissions'", vec![ + ("locations-federations-create", + Some(r##"Creates a metastore federation in a project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The relative resource name of the location in which to create a federation service, in the following form:projects/{project_number}/locations/{location_id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-federations-delete", + Some(r##"Deletes a single federation."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The relative resource name of the metastore federation to delete, in the following form:projects/{project_number}/locations/{location_id}/federations/{federation_id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-federations-get", + Some(r##"Gets the details of a single federation."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The relative resource name of the metastore federation to retrieve, in the following form:projects/{project_number}/locations/{location_id}/federations/{federation_id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-federations-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-federations-list", + Some(r##"Lists federations in a project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The relative resource name of the location of metastore federations to list, in the following form: projects/{project_number}/locations/{location_id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-federations-patch", + Some(r##"Updates the fields of a federation."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Immutable. The relative resource name of the federation, of the form: projects/{project_number}/locations/{location_id}/federations/{federation_id}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-federations-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy.Can return NOT_FOUND, INVALID_ARGUMENT, and PERMISSION_DENIED errors."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-federations-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a NOT_FOUND error.Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-federations-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-get", @@ -2807,6 +3902,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-services-alter-location", + Some(r##"Alter metadata resource location. The metadata resource can be a database, table, or partition. This functionality only updates the parent directory for the respective metadata resource and does not transfer any existing data to the new location."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-services-alter-location", + vec![ + (Some(r##"service"##), + None, + Some(r##"Required. The relative resource name of the metastore service to mutate metadata, in the following format:projects/{project_id}/locations/{location_id}/services/{service_id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2891,7 +4014,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2935,7 +4058,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2963,7 +4086,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3019,7 +4142,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3041,7 +4164,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3069,7 +4192,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3091,7 +4214,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3119,7 +4242,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3147,7 +4270,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3247,7 +4370,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3379,6 +4502,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-services-move-table-to-database", + Some(r##"Move a table to another database."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-services-move-table-to-database", + vec![ + (Some(r##"service"##), + None, + Some(r##"Required. The relative resource name of the metastore service to mutate metadata, in the following format:projects/{project_id}/locations/{location_id}/services/{service_id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3391,7 +4542,35 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Immutable. The relative resource name of the metastore service, of the form:projects/{project_number}/locations/{location_id}/services/{service_id}."##), + Some(r##"Immutable. The relative resource name of the metastore service, in the following format:projects/{project_number}/locations/{location_id}/services/{service_id}."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-services-query-metadata", + Some(r##"Query DPMS metadata."##), + "Details at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli/projects_locations-services-query-metadata", + vec![ + (Some(r##"service"##), + None, + Some(r##"Required. The relative resource name of the metastore service to query metadata, in the following format:projects/{project_id}/locations/{location_id}/services/{service_id}."##), Some(true), Some(false)), @@ -3475,7 +4654,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3503,7 +4682,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See Resource names (https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3531,7 +4710,7 @@ async fn main() { let mut app = App::new("metastore1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220222") + .version("5.0.2+20230111") .about("The Dataproc Metastore API is used to manage the lifecycle and configuration of metastore services.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_metastore1_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/metastore1_beta/Cargo.toml b/gen/metastore1_beta/Cargo.toml index d6f959619a..512889c6ca 100644 --- a/gen/metastore1_beta/Cargo.toml +++ b/gen/metastore1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-metastore1_beta" -version = "5.0.2-beta-1+20230111" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with Dataproc Metastore (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/metastore1_beta" homepage = "https://cloud.google.com/dataproc-metastore/docs" -documentation = "https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111" +documentation = "https://docs.rs/google-metastore1_beta/5.0.2+20230111" license = "MIT" keywords = ["metastore", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/metastore1_beta/README.md b/gen/metastore1_beta/README.md index afb0f10700..285d7af85e 100644 --- a/gen/metastore1_beta/README.md +++ b/gen/metastore1_beta/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-metastore1_beta` library allows access to all features of the *Google Dataproc Metastore* service. -This documentation was generated from *Dataproc Metastore* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *metastore:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Dataproc Metastore* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *metastore:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Dataproc Metastore* *v1_beta* API can be found at the [official documentation site](https://cloud.google.com/dataproc-metastore/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/DataprocMetastore) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/DataprocMetastore) ... * projects - * [*locations federations create*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationCreateCall), [*locations federations delete*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationDeleteCall), [*locations federations get*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationGetCall), [*locations federations get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationGetIamPolicyCall), [*locations federations list*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationListCall), [*locations federations patch*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationPatchCall), [*locations federations set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationSetIamPolicyCall), [*locations federations test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationFederationTestIamPermissionCall), [*locations get*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationListCall), [*locations operations delete*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationOperationListCall), [*locations services alter location*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceAlterLocationCall), [*locations services backups create*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupCreateCall), [*locations services backups delete*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupDeleteCall), [*locations services backups get*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupGetCall), [*locations services backups get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupGetIamPolicyCall), [*locations services backups list*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupListCall), [*locations services backups set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupSetIamPolicyCall), [*locations services backups test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupTestIamPermissionCall), [*locations services create*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceCreateCall), [*locations services databases get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseGetIamPolicyCall), [*locations services databases set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseSetIamPolicyCall), [*locations services databases tables get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTableGetIamPolicyCall), [*locations services databases tables set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTableSetIamPolicyCall), [*locations services databases tables test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTableTestIamPermissionCall), [*locations services databases test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTestIamPermissionCall), [*locations services delete*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceDeleteCall), [*locations services export metadata*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceExportMetadataCall), [*locations services get*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceGetCall), [*locations services get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceGetIamPolicyCall), [*locations services list*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceListCall), [*locations services metadata imports create*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportCreateCall), [*locations services metadata imports get*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportGetCall), [*locations services metadata imports list*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportListCall), [*locations services metadata imports patch*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportPatchCall), [*locations services move table to database*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceMoveTableToDatabaseCall), [*locations services patch*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServicePatchCall), [*locations services query metadata*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceQueryMetadataCall), [*locations services remove iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceRemoveIamPolicyCall), [*locations services restore*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceRestoreCall), [*locations services set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceSetIamPolicyCall) and [*locations services test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/api::ProjectLocationServiceTestIamPermissionCall) + * [*locations federations create*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationCreateCall), [*locations federations delete*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationDeleteCall), [*locations federations get*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationGetCall), [*locations federations get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationGetIamPolicyCall), [*locations federations list*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationListCall), [*locations federations patch*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationPatchCall), [*locations federations set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationSetIamPolicyCall), [*locations federations test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationFederationTestIamPermissionCall), [*locations get*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationListCall), [*locations operations delete*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationOperationListCall), [*locations services alter location*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceAlterLocationCall), [*locations services backups create*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupCreateCall), [*locations services backups delete*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupDeleteCall), [*locations services backups get*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupGetCall), [*locations services backups get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupGetIamPolicyCall), [*locations services backups list*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupListCall), [*locations services backups set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupSetIamPolicyCall), [*locations services backups test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceBackupTestIamPermissionCall), [*locations services create*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceCreateCall), [*locations services databases get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseGetIamPolicyCall), [*locations services databases set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseSetIamPolicyCall), [*locations services databases tables get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTableGetIamPolicyCall), [*locations services databases tables set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTableSetIamPolicyCall), [*locations services databases tables test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTableTestIamPermissionCall), [*locations services databases test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceDatabaseTestIamPermissionCall), [*locations services delete*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceDeleteCall), [*locations services export metadata*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceExportMetadataCall), [*locations services get*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceGetCall), [*locations services get iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceGetIamPolicyCall), [*locations services list*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceListCall), [*locations services metadata imports create*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportCreateCall), [*locations services metadata imports get*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportGetCall), [*locations services metadata imports list*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportListCall), [*locations services metadata imports patch*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceMetadataImportPatchCall), [*locations services move table to database*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceMoveTableToDatabaseCall), [*locations services patch*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServicePatchCall), [*locations services query metadata*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceQueryMetadataCall), [*locations services remove iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceRemoveIamPolicyCall), [*locations services restore*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceRestoreCall), [*locations services set iam policy*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceSetIamPolicyCall) and [*locations services test iam permissions*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/api::ProjectLocationServiceTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/DataprocMetastore)** +* **[Hub](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/DataprocMetastore)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Part)** + * **[Parts](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -141,17 +141,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -161,29 +161,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-metastore1_beta/5.0.2-beta-1+20230111/google_metastore1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-metastore1_beta/5.0.2+20230111/google_metastore1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/metastore1_beta/src/api.rs b/gen/metastore1_beta/src/api.rs index 15b5034b32..16a5744991 100644 --- a/gen/metastore1_beta/src/api.rs +++ b/gen/metastore1_beta/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> DataprocMetastore { DataprocMetastore { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://metastore.googleapis.com/".to_string(), _root_url: "https://metastore.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> DataprocMetastore { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/metastore1_beta/src/client.rs b/gen/metastore1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/metastore1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/metastore1_beta/src/lib.rs b/gen/metastore1_beta/src/lib.rs index 85daecc1ad..73117a148c 100644 --- a/gen/metastore1_beta/src/lib.rs +++ b/gen/metastore1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Dataproc Metastore* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *metastore:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Dataproc Metastore* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *metastore:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Dataproc Metastore* *v1_beta* API can be found at the //! [official documentation site](https://cloud.google.com/dataproc-metastore/docs). diff --git a/gen/mirror1-cli/Cargo.toml b/gen/mirror1-cli/Cargo.toml index d7adb25824..ffdb50433e 100644 --- a/gen/mirror1-cli/Cargo.toml +++ b/gen/mirror1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mirror1-cli" -version = "4.0.1+20190424" +version = "5.0.2+20190424" authors = ["Sebastian Thiel "] description = "A complete library to interact with mirror (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mirror1-cli" @@ -20,13 +20,13 @@ name = "mirror1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mirror1] path = "../mirror1" -version = "4.0.1+20190424" +version = "5.0.2+20190424" + diff --git a/gen/mirror1-cli/README.md b/gen/mirror1-cli/README.md index b8fb7ea428..99f98053c2 100644 --- a/gen/mirror1-cli/README.md +++ b/gen/mirror1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *mirror* API at revision *20190424*. The CLI is at version *4.0.1*. +This documentation was generated from the *mirror* API at revision *20190424*. The CLI is at version *5.0.2*. ```bash mirror1 [options] diff --git a/gen/mirror1-cli/mkdocs.yml b/gen/mirror1-cli/mkdocs.yml index 0007fb3f4a..19a596121a 100644 --- a/gen/mirror1-cli/mkdocs.yml +++ b/gen/mirror1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: mirror v4.0.1+20190424 +site_name: mirror v5.0.2+20190424 site_url: http://byron.github.io/google-apis-rs/google-mirror1-cli site_description: A complete library to interact with mirror (protocol v1) @@ -7,32 +7,38 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mirror1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_insert.md', 'Accounts', 'Insert'] -- ['contacts_delete.md', 'Contacts', 'Delete'] -- ['contacts_get.md', 'Contacts', 'Get'] -- ['contacts_insert.md', 'Contacts', 'Insert'] -- ['contacts_list.md', 'Contacts', 'List'] -- ['contacts_patch.md', 'Contacts', 'Patch'] -- ['contacts_update.md', 'Contacts', 'Update'] -- ['locations_get.md', 'Locations', 'Get'] -- ['locations_list.md', 'Locations', 'List'] -- ['settings_get.md', 'Settings', 'Get'] -- ['subscriptions_delete.md', 'Subscriptions', 'Delete'] -- ['subscriptions_insert.md', 'Subscriptions', 'Insert'] -- ['subscriptions_list.md', 'Subscriptions', 'List'] -- ['subscriptions_update.md', 'Subscriptions', 'Update'] -- ['timeline_attachments-delete.md', 'Timeline', 'Attachments Delete'] -- ['timeline_attachments-get.md', 'Timeline', 'Attachments Get'] -- ['timeline_attachments-insert.md', 'Timeline', 'Attachments Insert'] -- ['timeline_attachments-list.md', 'Timeline', 'Attachments List'] -- ['timeline_delete.md', 'Timeline', 'Delete'] -- ['timeline_get.md', 'Timeline', 'Get'] -- ['timeline_insert.md', 'Timeline', 'Insert'] -- ['timeline_list.md', 'Timeline', 'List'] -- ['timeline_patch.md', 'Timeline', 'Patch'] -- ['timeline_update.md', 'Timeline', 'Update'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Insert': 'accounts_insert.md' +- 'Contacts': + - 'Delete': 'contacts_delete.md' + - 'Get': 'contacts_get.md' + - 'Insert': 'contacts_insert.md' + - 'List': 'contacts_list.md' + - 'Patch': 'contacts_patch.md' + - 'Update': 'contacts_update.md' +- 'Locations': + - 'Get': 'locations_get.md' + - 'List': 'locations_list.md' +- 'Settings': + - 'Get': 'settings_get.md' +- 'Subscriptions': + - 'Delete': 'subscriptions_delete.md' + - 'Insert': 'subscriptions_insert.md' + - 'List': 'subscriptions_list.md' + - 'Update': 'subscriptions_update.md' +- 'Timeline': + - 'Attachments Delete': 'timeline_attachments-delete.md' + - 'Attachments Get': 'timeline_attachments-get.md' + - 'Attachments Insert': 'timeline_attachments-insert.md' + - 'Attachments List': 'timeline_attachments-list.md' + - 'Delete': 'timeline_delete.md' + - 'Get': 'timeline_get.md' + - 'Insert': 'timeline_insert.md' + - 'List': 'timeline_list.md' + - 'Patch': 'timeline_patch.md' + - 'Update': 'timeline_update.md' theme: readthedocs diff --git a/gen/mirror1-cli/src/client.rs b/gen/mirror1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mirror1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mirror1-cli/src/main.rs b/gen/mirror1-cli/src/main.rs index 789112d380..7abd658247 100644 --- a/gen/mirror1-cli/src/main.rs +++ b/gen/mirror1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mirror1::{api, Error, oauth2}; +use google_mirror1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -1460,7 +1459,7 @@ where call = call.source_item_id(value.unwrap_or("")); }, "pinned-only" => { - call = call.pinned_only(arg_from_str(value.unwrap_or("false"), err, "pinned-only", "boolean")); + call = call.pinned_only( value.map(|v| arg_from_str(v, err, "pinned-only", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -1469,10 +1468,10 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, "bundle-id" => { call = call.bundle_id(value.unwrap_or("")); @@ -2555,7 +2554,7 @@ async fn main() { let mut app = App::new("mirror1") .author("Sebastian Thiel ") - .version("4.0.1+20190424") + .version("5.0.2+20190424") .about("Interacts with Glass users via the timeline.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mirror1_cli") .arg(Arg::with_name("url") diff --git a/gen/mirror1/Cargo.toml b/gen/mirror1/Cargo.toml index cf3fd3c31d..41bb7775a9 100644 --- a/gen/mirror1/Cargo.toml +++ b/gen/mirror1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mirror1" -version = "5.0.2-beta-1+20190424" +version = "5.0.2+20190424" authors = ["Sebastian Thiel "] description = "A complete library to interact with mirror (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mirror1" homepage = "https://developers.google.com/glass" -documentation = "https://docs.rs/google-mirror1/5.0.2-beta-1+20190424" +documentation = "https://docs.rs/google-mirror1/5.0.2+20190424" license = "MIT" keywords = ["mirror", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mirror1/README.md b/gen/mirror1/README.md index 293b5e1074..789ef08547 100644 --- a/gen/mirror1/README.md +++ b/gen/mirror1/README.md @@ -5,37 +5,37 @@ DO NOT EDIT ! --> The `google-mirror1` library allows access to all features of the *Google mirror* service. -This documentation was generated from *mirror* crate version *5.0.2-beta-1+20190424*, where *20190424* is the exact revision of the *mirror:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *mirror* crate version *5.0.2+20190424*, where *20190424* is the exact revision of the *mirror:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *mirror* *v1* API can be found at the [official documentation site](https://developers.google.com/glass). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/Mirror) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/Mirror) ... -* [accounts](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::Account) - * [*insert*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::AccountInsertCall) -* [contacts](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::Contact) - * [*delete*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::ContactDeleteCall), [*get*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::ContactGetCall), [*insert*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::ContactInsertCall), [*list*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::ContactListCall), [*patch*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::ContactPatchCall) and [*update*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::ContactUpdateCall) -* [locations](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::Location) - * [*get*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::LocationGetCall) and [*list*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::LocationListCall) -* [settings](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::Setting) - * [*get*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::SettingGetCall) -* [subscriptions](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::Subscription) - * [*delete*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::SubscriptionDeleteCall), [*insert*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::SubscriptionInsertCall), [*list*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::SubscriptionListCall) and [*update*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::SubscriptionUpdateCall) +* [accounts](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::Account) + * [*insert*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::AccountInsertCall) +* [contacts](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::Contact) + * [*delete*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::ContactDeleteCall), [*get*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::ContactGetCall), [*insert*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::ContactInsertCall), [*list*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::ContactListCall), [*patch*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::ContactPatchCall) and [*update*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::ContactUpdateCall) +* [locations](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::Location) + * [*get*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::LocationGetCall) and [*list*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::LocationListCall) +* [settings](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::Setting) + * [*get*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::SettingGetCall) +* [subscriptions](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::Subscription) + * [*delete*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::SubscriptionDeleteCall), [*insert*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::SubscriptionInsertCall), [*list*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::SubscriptionListCall) and [*update*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::SubscriptionUpdateCall) * timeline - * [*attachments delete*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineAttachmentDeleteCall), [*attachments get*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineAttachmentGetCall), [*attachments insert*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineAttachmentInsertCall), [*attachments list*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineAttachmentListCall), [*delete*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineDeleteCall), [*get*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineGetCall), [*insert*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineInsertCall), [*list*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineListCall), [*patch*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelinePatchCall) and [*update*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineUpdateCall) + * [*attachments delete*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineAttachmentDeleteCall), [*attachments get*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineAttachmentGetCall), [*attachments insert*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineAttachmentInsertCall), [*attachments list*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineAttachmentListCall), [*delete*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineDeleteCall), [*get*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineGetCall), [*insert*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineInsertCall), [*list*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineListCall), [*patch*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelinePatchCall) and [*update*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineUpdateCall) Upload supported by ... -* [*attachments insert timeline*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineAttachmentInsertCall) -* [*insert timeline*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineInsertCall) -* [*update timeline*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineUpdateCall) +* [*attachments insert timeline*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineAttachmentInsertCall) +* [*insert timeline*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineInsertCall) +* [*update timeline*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineUpdateCall) Download supported by ... -* [*attachments get timeline*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/api::TimelineAttachmentGetCall) +* [*attachments get timeline*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/api::TimelineAttachmentGetCall) @@ -43,17 +43,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/Mirror)** +* **[Hub](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/Mirror)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Part)** + * **[Parts](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -149,17 +149,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -169,29 +169,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::RequestValue) and -[decodable](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::RequestValue) and +[decodable](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mirror1/5.0.2-beta-1+20190424/google_mirror1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mirror1/5.0.2+20190424/google_mirror1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mirror1/src/api.rs b/gen/mirror1/src/api.rs index b0b42f51af..3b3a581942 100644 --- a/gen/mirror1/src/api.rs +++ b/gen/mirror1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Mirror { Mirror { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/mirror/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -155,7 +155,7 @@ impl<'a, S> Mirror { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mirror1/src/client.rs b/gen/mirror1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mirror1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mirror1/src/lib.rs b/gen/mirror1/src/lib.rs index 3556971ccc..8463fae35a 100644 --- a/gen/mirror1/src/lib.rs +++ b/gen/mirror1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *mirror* crate version *5.0.2-beta-1+20190424*, where *20190424* is the exact revision of the *mirror:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *mirror* crate version *5.0.2+20190424*, where *20190424* is the exact revision of the *mirror:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *mirror* *v1* API can be found at the //! [official documentation site](https://developers.google.com/glass). diff --git a/gen/ml1-cli/Cargo.toml b/gen/ml1-cli/Cargo.toml index 0296572b7f..7f694a15c1 100644 --- a/gen/ml1-cli/Cargo.toml +++ b/gen/ml1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-ml1-cli" -version = "4.0.1+20220212" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Machine Learning Engine (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ml1-cli" @@ -20,13 +20,13 @@ name = "ml1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-ml1] path = "../ml1" -version = "4.0.1+20220212" +version = "5.0.2+20230106" + diff --git a/gen/ml1-cli/README.md b/gen/ml1-cli/README.md index e3e59f6993..ccc7fb6f22 100644 --- a/gen/ml1-cli/README.md +++ b/gen/ml1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Machine Learning Engine* API at revision *20220212*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Machine Learning Engine* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash ml1 [options] diff --git a/gen/ml1-cli/mkdocs.yml b/gen/ml1-cli/mkdocs.yml index 772d5a5eec..aba8747c13 100644 --- a/gen/ml1-cli/mkdocs.yml +++ b/gen/ml1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Machine Learning Engine v4.0.1+20220212 +site_name: Cloud Machine Learning Engine v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-ml1-cli site_description: A complete library to interact with Cloud Machine Learning Engine (protocol v1) @@ -7,54 +7,55 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/ml1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_explain.md', 'Projects', 'Explain'] -- ['projects_get-config.md', 'Projects', 'Get Config'] -- ['projects_jobs-cancel.md', 'Projects', 'Jobs Cancel'] -- ['projects_jobs-create.md', 'Projects', 'Jobs Create'] -- ['projects_jobs-get.md', 'Projects', 'Jobs Get'] -- ['projects_jobs-get-iam-policy.md', 'Projects', 'Jobs Get Iam Policy'] -- ['projects_jobs-list.md', 'Projects', 'Jobs List'] -- ['projects_jobs-patch.md', 'Projects', 'Jobs Patch'] -- ['projects_jobs-set-iam-policy.md', 'Projects', 'Jobs Set Iam Policy'] -- ['projects_jobs-test-iam-permissions.md', 'Projects', 'Jobs Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-studies-create.md', 'Projects', 'Locations Studies Create'] -- ['projects_locations-studies-delete.md', 'Projects', 'Locations Studies Delete'] -- ['projects_locations-studies-get.md', 'Projects', 'Locations Studies Get'] -- ['projects_locations-studies-list.md', 'Projects', 'Locations Studies List'] -- ['projects_locations-studies-trials-add-measurement.md', 'Projects', 'Locations Studies Trials Add Measurement'] -- ['projects_locations-studies-trials-check-early-stopping-state.md', 'Projects', 'Locations Studies Trials Check Early Stopping State'] -- ['projects_locations-studies-trials-complete.md', 'Projects', 'Locations Studies Trials Complete'] -- ['projects_locations-studies-trials-create.md', 'Projects', 'Locations Studies Trials Create'] -- ['projects_locations-studies-trials-delete.md', 'Projects', 'Locations Studies Trials Delete'] -- ['projects_locations-studies-trials-get.md', 'Projects', 'Locations Studies Trials Get'] -- ['projects_locations-studies-trials-list.md', 'Projects', 'Locations Studies Trials List'] -- ['projects_locations-studies-trials-list-optimal-trials.md', 'Projects', 'Locations Studies Trials List Optimal Trials'] -- ['projects_locations-studies-trials-stop.md', 'Projects', 'Locations Studies Trials Stop'] -- ['projects_locations-studies-trials-suggest.md', 'Projects', 'Locations Studies Trials Suggest'] -- ['projects_models-create.md', 'Projects', 'Models Create'] -- ['projects_models-delete.md', 'Projects', 'Models Delete'] -- ['projects_models-get.md', 'Projects', 'Models Get'] -- ['projects_models-get-iam-policy.md', 'Projects', 'Models Get Iam Policy'] -- ['projects_models-list.md', 'Projects', 'Models List'] -- ['projects_models-patch.md', 'Projects', 'Models Patch'] -- ['projects_models-set-iam-policy.md', 'Projects', 'Models Set Iam Policy'] -- ['projects_models-test-iam-permissions.md', 'Projects', 'Models Test Iam Permissions'] -- ['projects_models-versions-create.md', 'Projects', 'Models Versions Create'] -- ['projects_models-versions-delete.md', 'Projects', 'Models Versions Delete'] -- ['projects_models-versions-get.md', 'Projects', 'Models Versions Get'] -- ['projects_models-versions-list.md', 'Projects', 'Models Versions List'] -- ['projects_models-versions-patch.md', 'Projects', 'Models Versions Patch'] -- ['projects_models-versions-set-default.md', 'Projects', 'Models Versions Set Default'] -- ['projects_operations-cancel.md', 'Projects', 'Operations Cancel'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] -- ['projects_predict.md', 'Projects', 'Predict'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Explain': 'projects_explain.md' + - 'Get Config': 'projects_get-config.md' + - 'Jobs Cancel': 'projects_jobs-cancel.md' + - 'Jobs Create': 'projects_jobs-create.md' + - 'Jobs Get': 'projects_jobs-get.md' + - 'Jobs Get Iam Policy': 'projects_jobs-get-iam-policy.md' + - 'Jobs List': 'projects_jobs-list.md' + - 'Jobs Patch': 'projects_jobs-patch.md' + - 'Jobs Set Iam Policy': 'projects_jobs-set-iam-policy.md' + - 'Jobs Test Iam Permissions': 'projects_jobs-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Studies Create': 'projects_locations-studies-create.md' + - 'Locations Studies Delete': 'projects_locations-studies-delete.md' + - 'Locations Studies Get': 'projects_locations-studies-get.md' + - 'Locations Studies List': 'projects_locations-studies-list.md' + - 'Locations Studies Trials Add Measurement': 'projects_locations-studies-trials-add-measurement.md' + - 'Locations Studies Trials Check Early Stopping State': 'projects_locations-studies-trials-check-early-stopping-state.md' + - 'Locations Studies Trials Complete': 'projects_locations-studies-trials-complete.md' + - 'Locations Studies Trials Create': 'projects_locations-studies-trials-create.md' + - 'Locations Studies Trials Delete': 'projects_locations-studies-trials-delete.md' + - 'Locations Studies Trials Get': 'projects_locations-studies-trials-get.md' + - 'Locations Studies Trials List': 'projects_locations-studies-trials-list.md' + - 'Locations Studies Trials List Optimal Trials': 'projects_locations-studies-trials-list-optimal-trials.md' + - 'Locations Studies Trials Stop': 'projects_locations-studies-trials-stop.md' + - 'Locations Studies Trials Suggest': 'projects_locations-studies-trials-suggest.md' + - 'Models Create': 'projects_models-create.md' + - 'Models Delete': 'projects_models-delete.md' + - 'Models Get': 'projects_models-get.md' + - 'Models Get Iam Policy': 'projects_models-get-iam-policy.md' + - 'Models List': 'projects_models-list.md' + - 'Models Patch': 'projects_models-patch.md' + - 'Models Set Iam Policy': 'projects_models-set-iam-policy.md' + - 'Models Test Iam Permissions': 'projects_models-test-iam-permissions.md' + - 'Models Versions Create': 'projects_models-versions-create.md' + - 'Models Versions Delete': 'projects_models-versions-delete.md' + - 'Models Versions Get': 'projects_models-versions-get.md' + - 'Models Versions List': 'projects_models-versions-list.md' + - 'Models Versions Patch': 'projects_models-versions-patch.md' + - 'Models Versions Set Default': 'projects_models-versions-set-default.md' + - 'Operations Cancel': 'projects_operations-cancel.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' + - 'Predict': 'projects_predict.md' theme: readthedocs diff --git a/gen/ml1-cli/src/client.rs b/gen/ml1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/ml1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/ml1-cli/src/main.rs b/gen/ml1-cli/src/main.rs index ce7b8f0141..acfb6d11c9 100644 --- a/gen/ml1-cli/src/main.rs +++ b/gen/ml1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_ml1::{api, Error, oauth2}; +use google_ml1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -514,7 +513,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -573,7 +572,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -762,7 +761,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1045,7 +1044,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2446,7 +2445,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2505,7 +2504,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2637,7 +2636,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3090,7 +3089,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3215,7 +3214,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3462,7 +3461,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3959,7 +3958,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4031,7 +4030,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4059,7 +4058,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4603,7 +4602,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4675,7 +4674,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4703,7 +4702,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4975,7 +4974,7 @@ async fn main() { let mut app = App::new("ml1") .author("Sebastian Thiel ") - .version("4.0.1+20220212") + .version("5.0.2+20230106") .about("An API to enable creating and using machine learning models.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_ml1_cli") .arg(Arg::with_name("url") diff --git a/gen/ml1/Cargo.toml b/gen/ml1/Cargo.toml index 731c48e859..3434e89a8f 100644 --- a/gen/ml1/Cargo.toml +++ b/gen/ml1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-ml1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Machine Learning Engine (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ml1" homepage = "https://cloud.google.com/ml/" -documentation = "https://docs.rs/google-ml1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-ml1/5.0.2+20230106" license = "MIT" keywords = ["ml", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/ml1/README.md b/gen/ml1/README.md index d4822f8685..4f9a2c2fe7 100644 --- a/gen/ml1/README.md +++ b/gen/ml1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-ml1` library allows access to all features of the *Google Cloud Machine Learning Engine* service. -This documentation was generated from *Cloud Machine Learning Engine* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *ml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Machine Learning Engine* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *ml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Machine Learning Engine* *v1* API can be found at the [official documentation site](https://cloud.google.com/ml/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/CloudMachineLearningEngine) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/CloudMachineLearningEngine) ... * projects - * [*explain*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectExplainCall), [*get config*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectGetConfigCall), [*jobs cancel*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobCancelCall), [*jobs create*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobCreateCall), [*jobs get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobGetCall), [*jobs get iam policy*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobGetIamPolicyCall), [*jobs list*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobListCall), [*jobs patch*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobPatchCall), [*jobs set iam policy*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobSetIamPolicyCall), [*jobs test iam permissions*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectJobTestIamPermissionCall), [*locations get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationOperationGetCall), [*locations studies create*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyCreateCall), [*locations studies delete*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyDeleteCall), [*locations studies get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyGetCall), [*locations studies list*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyListCall), [*locations studies trials add measurement*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialAddMeasurementCall), [*locations studies trials check early stopping state*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialCheckEarlyStoppingStateCall), [*locations studies trials complete*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialCompleteCall), [*locations studies trials create*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialCreateCall), [*locations studies trials delete*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialDeleteCall), [*locations studies trials get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialGetCall), [*locations studies trials list*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialListCall), [*locations studies trials list optimal trials*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialListOptimalTrialCall), [*locations studies trials stop*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialStopCall), [*locations studies trials suggest*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectLocationStudyTrialSuggestCall), [*models create*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelCreateCall), [*models delete*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelDeleteCall), [*models get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelGetCall), [*models get iam policy*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelGetIamPolicyCall), [*models list*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelListCall), [*models patch*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelPatchCall), [*models set iam policy*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelSetIamPolicyCall), [*models test iam permissions*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelTestIamPermissionCall), [*models versions create*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelVersionCreateCall), [*models versions delete*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelVersionDeleteCall), [*models versions get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelVersionGetCall), [*models versions list*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelVersionListCall), [*models versions patch*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelVersionPatchCall), [*models versions set default*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectModelVersionSetDefaultCall), [*operations cancel*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectOperationListCall) and [*predict*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/api::ProjectPredictCall) + * [*explain*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectExplainCall), [*get config*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectGetConfigCall), [*jobs cancel*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobCancelCall), [*jobs create*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobCreateCall), [*jobs get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobGetCall), [*jobs get iam policy*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobGetIamPolicyCall), [*jobs list*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobListCall), [*jobs patch*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobPatchCall), [*jobs set iam policy*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobSetIamPolicyCall), [*jobs test iam permissions*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectJobTestIamPermissionCall), [*locations get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationOperationCancelCall), [*locations operations get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationOperationGetCall), [*locations studies create*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyCreateCall), [*locations studies delete*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyDeleteCall), [*locations studies get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyGetCall), [*locations studies list*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyListCall), [*locations studies trials add measurement*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialAddMeasurementCall), [*locations studies trials check early stopping state*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialCheckEarlyStoppingStateCall), [*locations studies trials complete*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialCompleteCall), [*locations studies trials create*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialCreateCall), [*locations studies trials delete*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialDeleteCall), [*locations studies trials get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialGetCall), [*locations studies trials list*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialListCall), [*locations studies trials list optimal trials*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialListOptimalTrialCall), [*locations studies trials stop*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialStopCall), [*locations studies trials suggest*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectLocationStudyTrialSuggestCall), [*models create*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelCreateCall), [*models delete*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelDeleteCall), [*models get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelGetCall), [*models get iam policy*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelGetIamPolicyCall), [*models list*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelListCall), [*models patch*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelPatchCall), [*models set iam policy*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelSetIamPolicyCall), [*models test iam permissions*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelTestIamPermissionCall), [*models versions create*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelVersionCreateCall), [*models versions delete*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelVersionDeleteCall), [*models versions get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelVersionGetCall), [*models versions list*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelVersionListCall), [*models versions patch*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelVersionPatchCall), [*models versions set default*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectModelVersionSetDefaultCall), [*operations cancel*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectOperationCancelCall), [*operations get*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectOperationListCall) and [*predict*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/api::ProjectPredictCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/CloudMachineLearningEngine)** +* **[Hub](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/CloudMachineLearningEngine)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::CallBuilder) -* **[Resources](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::CallBuilder) +* **[Resources](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Part)** + * **[Parts](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Delegate) to the -[Method Builder](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Delegate) to the +[Method Builder](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::RequestValue) and -[decodable](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::RequestValue) and +[decodable](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-ml1/5.0.2-beta-1+20230106/google_ml1/client::RequestValue) are moved +* [request values](https://docs.rs/google-ml1/5.0.2+20230106/google_ml1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/ml1/src/api.rs b/gen/ml1/src/api.rs index b9ee87cf79..a64f7788b3 100644 --- a/gen/ml1/src/api.rs +++ b/gen/ml1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> CloudMachineLearningEngine { CloudMachineLearningEngine { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://ml.googleapis.com/".to_string(), _root_url: "https://ml.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> CloudMachineLearningEngine { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/ml1/src/client.rs b/gen/ml1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/ml1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/ml1/src/lib.rs b/gen/ml1/src/lib.rs index c404c6677c..19a88be1c2 100644 --- a/gen/ml1/src/lib.rs +++ b/gen/ml1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Machine Learning Engine* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *ml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Machine Learning Engine* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *ml:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Machine Learning Engine* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/ml/). diff --git a/gen/monitoring3-cli/Cargo.toml b/gen/monitoring3-cli/Cargo.toml index 9b2ff774a5..06b6c6e620 100644 --- a/gen/monitoring3-cli/Cargo.toml +++ b/gen/monitoring3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-monitoring3-cli" -version = "4.0.1+20220218" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Monitoring (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/monitoring3-cli" @@ -20,13 +20,13 @@ name = "monitoring3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-monitoring3] path = "../monitoring3" -version = "4.0.1+20220218" +version = "5.0.2+20230115" + diff --git a/gen/monitoring3-cli/README.md b/gen/monitoring3-cli/README.md index 964147f297..1c4731b30d 100644 --- a/gen/monitoring3-cli/README.md +++ b/gen/monitoring3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Monitoring* API at revision *20220218*. The CLI is at version *4.0.1*. +This documentation was generated from the *Monitoring* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash monitoring3 [options] @@ -62,6 +62,10 @@ monitoring3 [options] notification-channels-patch (-r )... [-p ]... [-o ] notification-channels-send-verification-code (-r )... [-p ]... [-o ] notification-channels-verify (-r )... [-p ]... [-o ] + snoozes-create (-r )... [-p ]... [-o ] + snoozes-get [-p ]... [-o ] + snoozes-list [-p ]... [-o ] + snoozes-patch (-r )... [-p ]... [-o ] time-series-create (-r )... [-p ]... [-o ] time-series-create-service (-r )... [-p ]... [-o ] time-series-list [-p ]... [-o ] diff --git a/gen/monitoring3-cli/mkdocs.yml b/gen/monitoring3-cli/mkdocs.yml index 0c9ccc256d..1cd58c78d4 100644 --- a/gen/monitoring3-cli/mkdocs.yml +++ b/gen/monitoring3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Monitoring v4.0.1+20220218 +site_name: Monitoring v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-monitoring3-cli site_description: A complete library to interact with Monitoring (protocol v3) @@ -7,58 +7,67 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/monitoring3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_time-series-list.md', 'Folders', 'Time Series List'] -- ['organizations_time-series-list.md', 'Organizations', 'Time Series List'] -- ['projects_alert-policies-create.md', 'Projects', 'Alert Policies Create'] -- ['projects_alert-policies-delete.md', 'Projects', 'Alert Policies Delete'] -- ['projects_alert-policies-get.md', 'Projects', 'Alert Policies Get'] -- ['projects_alert-policies-list.md', 'Projects', 'Alert Policies List'] -- ['projects_alert-policies-patch.md', 'Projects', 'Alert Policies Patch'] -- ['projects_collectd-time-series-create.md', 'Projects', 'Collectd Time Series Create'] -- ['projects_groups-create.md', 'Projects', 'Groups Create'] -- ['projects_groups-delete.md', 'Projects', 'Groups Delete'] -- ['projects_groups-get.md', 'Projects', 'Groups Get'] -- ['projects_groups-list.md', 'Projects', 'Groups List'] -- ['projects_groups-members-list.md', 'Projects', 'Groups Members List'] -- ['projects_groups-update.md', 'Projects', 'Groups Update'] -- ['projects_metric-descriptors-create.md', 'Projects', 'Metric Descriptors Create'] -- ['projects_metric-descriptors-delete.md', 'Projects', 'Metric Descriptors Delete'] -- ['projects_metric-descriptors-get.md', 'Projects', 'Metric Descriptors Get'] -- ['projects_metric-descriptors-list.md', 'Projects', 'Metric Descriptors List'] -- ['projects_monitored-resource-descriptors-get.md', 'Projects', 'Monitored Resource Descriptors Get'] -- ['projects_monitored-resource-descriptors-list.md', 'Projects', 'Monitored Resource Descriptors List'] -- ['projects_notification-channel-descriptors-get.md', 'Projects', 'Notification Channel Descriptors Get'] -- ['projects_notification-channel-descriptors-list.md', 'Projects', 'Notification Channel Descriptors List'] -- ['projects_notification-channels-create.md', 'Projects', 'Notification Channels Create'] -- ['projects_notification-channels-delete.md', 'Projects', 'Notification Channels Delete'] -- ['projects_notification-channels-get.md', 'Projects', 'Notification Channels Get'] -- ['projects_notification-channels-get-verification-code.md', 'Projects', 'Notification Channels Get Verification Code'] -- ['projects_notification-channels-list.md', 'Projects', 'Notification Channels List'] -- ['projects_notification-channels-patch.md', 'Projects', 'Notification Channels Patch'] -- ['projects_notification-channels-send-verification-code.md', 'Projects', 'Notification Channels Send Verification Code'] -- ['projects_notification-channels-verify.md', 'Projects', 'Notification Channels Verify'] -- ['projects_time-series-create.md', 'Projects', 'Time Series Create'] -- ['projects_time-series-create-service.md', 'Projects', 'Time Series Create Service'] -- ['projects_time-series-list.md', 'Projects', 'Time Series List'] -- ['projects_time-series-query.md', 'Projects', 'Time Series Query'] -- ['projects_uptime-check-configs-create.md', 'Projects', 'Uptime Check Configs Create'] -- ['projects_uptime-check-configs-delete.md', 'Projects', 'Uptime Check Configs Delete'] -- ['projects_uptime-check-configs-get.md', 'Projects', 'Uptime Check Configs Get'] -- ['projects_uptime-check-configs-list.md', 'Projects', 'Uptime Check Configs List'] -- ['projects_uptime-check-configs-patch.md', 'Projects', 'Uptime Check Configs Patch'] -- ['services_create.md', 'Services', 'Create'] -- ['services_delete.md', 'Services', 'Delete'] -- ['services_get.md', 'Services', 'Get'] -- ['services_list.md', 'Services', 'List'] -- ['services_patch.md', 'Services', 'Patch'] -- ['services_service-level-objectives-create.md', 'Services', 'Service Level Objectives Create'] -- ['services_service-level-objectives-delete.md', 'Services', 'Service Level Objectives Delete'] -- ['services_service-level-objectives-get.md', 'Services', 'Service Level Objectives Get'] -- ['services_service-level-objectives-list.md', 'Services', 'Service Level Objectives List'] -- ['services_service-level-objectives-patch.md', 'Services', 'Service Level Objectives Patch'] -- ['uptime-check-ips_list.md', 'Uptime Check Ips', 'List'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Time Series List': 'folders_time-series-list.md' +- 'Organizations': + - 'Time Series List': 'organizations_time-series-list.md' +- 'Projects': + - 'Alert Policies Create': 'projects_alert-policies-create.md' + - 'Alert Policies Delete': 'projects_alert-policies-delete.md' + - 'Alert Policies Get': 'projects_alert-policies-get.md' + - 'Alert Policies List': 'projects_alert-policies-list.md' + - 'Alert Policies Patch': 'projects_alert-policies-patch.md' + - 'Collectd Time Series Create': 'projects_collectd-time-series-create.md' + - 'Groups Create': 'projects_groups-create.md' + - 'Groups Delete': 'projects_groups-delete.md' + - 'Groups Get': 'projects_groups-get.md' + - 'Groups List': 'projects_groups-list.md' + - 'Groups Members List': 'projects_groups-members-list.md' + - 'Groups Update': 'projects_groups-update.md' + - 'Metric Descriptors Create': 'projects_metric-descriptors-create.md' + - 'Metric Descriptors Delete': 'projects_metric-descriptors-delete.md' + - 'Metric Descriptors Get': 'projects_metric-descriptors-get.md' + - 'Metric Descriptors List': 'projects_metric-descriptors-list.md' + - 'Monitored Resource Descriptors Get': 'projects_monitored-resource-descriptors-get.md' + - 'Monitored Resource Descriptors List': 'projects_monitored-resource-descriptors-list.md' + - 'Notification Channel Descriptors Get': 'projects_notification-channel-descriptors-get.md' + - 'Notification Channel Descriptors List': 'projects_notification-channel-descriptors-list.md' + - 'Notification Channels Create': 'projects_notification-channels-create.md' + - 'Notification Channels Delete': 'projects_notification-channels-delete.md' + - 'Notification Channels Get': 'projects_notification-channels-get.md' + - 'Notification Channels Get Verification Code': 'projects_notification-channels-get-verification-code.md' + - 'Notification Channels List': 'projects_notification-channels-list.md' + - 'Notification Channels Patch': 'projects_notification-channels-patch.md' + - 'Notification Channels Send Verification Code': 'projects_notification-channels-send-verification-code.md' + - 'Notification Channels Verify': 'projects_notification-channels-verify.md' + - 'Snoozes Create': 'projects_snoozes-create.md' + - 'Snoozes Get': 'projects_snoozes-get.md' + - 'Snoozes List': 'projects_snoozes-list.md' + - 'Snoozes Patch': 'projects_snoozes-patch.md' + - 'Time Series Create': 'projects_time-series-create.md' + - 'Time Series Create Service': 'projects_time-series-create-service.md' + - 'Time Series List': 'projects_time-series-list.md' + - 'Time Series Query': 'projects_time-series-query.md' + - 'Uptime Check Configs Create': 'projects_uptime-check-configs-create.md' + - 'Uptime Check Configs Delete': 'projects_uptime-check-configs-delete.md' + - 'Uptime Check Configs Get': 'projects_uptime-check-configs-get.md' + - 'Uptime Check Configs List': 'projects_uptime-check-configs-list.md' + - 'Uptime Check Configs Patch': 'projects_uptime-check-configs-patch.md' +- 'Services': + - 'Create': 'services_create.md' + - 'Delete': 'services_delete.md' + - 'Get': 'services_get.md' + - 'List': 'services_list.md' + - 'Patch': 'services_patch.md' + - 'Service Level Objectives Create': 'services_service-level-objectives-create.md' + - 'Service Level Objectives Delete': 'services_service-level-objectives-delete.md' + - 'Service Level Objectives Get': 'services_service-level-objectives-get.md' + - 'Service Level Objectives List': 'services_service-level-objectives-list.md' + - 'Service Level Objectives Patch': 'services_service-level-objectives-patch.md' +- 'Uptime Check Ips': + - 'List': 'uptime-check-ips_list.md' theme: readthedocs diff --git a/gen/monitoring3-cli/src/client.rs b/gen/monitoring3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/monitoring3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/monitoring3-cli/src/main.rs b/gen/monitoring3-cli/src/main.rs index b82e627357..44a2a0b540 100644 --- a/gen/monitoring3-cli/src/main.rs +++ b/gen/monitoring3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_monitoring3::{api, Error, oauth2}; +use google_monitoring3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -70,22 +69,22 @@ where call = call.secondary_aggregation_cross_series_reducer(value.unwrap_or("")); }, "secondary-aggregation-alignment-period" => { - call = call.secondary_aggregation_alignment_period(value.unwrap_or("")); + call = call.secondary_aggregation_alignment_period( value.map(|v| arg_from_str(v, err, "secondary-aggregation-alignment-period", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); }, "interval-start-time" => { - call = call.interval_start_time(value.unwrap_or("")); + call = call.interval_start_time( value.map(|v| arg_from_str(v, err, "interval-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "interval-end-time" => { - call = call.interval_end_time(value.unwrap_or("")); + call = call.interval_end_time( value.map(|v| arg_from_str(v, err, "interval-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -100,7 +99,7 @@ where call = call.aggregation_cross_series_reducer(value.unwrap_or("")); }, "aggregation-alignment-period" => { - call = call.aggregation_alignment_period(value.unwrap_or("")); + call = call.aggregation_alignment_period( value.map(|v| arg_from_str(v, err, "aggregation-alignment-period", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -168,22 +167,22 @@ where call = call.secondary_aggregation_cross_series_reducer(value.unwrap_or("")); }, "secondary-aggregation-alignment-period" => { - call = call.secondary_aggregation_alignment_period(value.unwrap_or("")); + call = call.secondary_aggregation_alignment_period( value.map(|v| arg_from_str(v, err, "secondary-aggregation-alignment-period", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); }, "interval-start-time" => { - call = call.interval_start_time(value.unwrap_or("")); + call = call.interval_start_time( value.map(|v| arg_from_str(v, err, "interval-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "interval-end-time" => { - call = call.interval_end_time(value.unwrap_or("")); + call = call.interval_end_time( value.map(|v| arg_from_str(v, err, "interval-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -198,7 +197,7 @@ where call = call.aggregation_cross_series_reducer(value.unwrap_or("")); }, "aggregation-alignment-period" => { - call = call.aggregation_alignment_period(value.unwrap_or("")); + call = call.aggregation_alignment_period( value.map(|v| arg_from_str(v, err, "aggregation-alignment-period", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -461,7 +460,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -571,7 +570,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -751,7 +750,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -807,7 +806,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "recursive" => { - call = call.recursive(arg_from_str(value.unwrap_or("false"), err, "recursive", "boolean")); + call = call.recursive( value.map(|v| arg_from_str(v, err, "recursive", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -918,7 +917,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "descendants-of-group" => { call = call.descendants_of_group(value.unwrap_or("")); @@ -986,13 +985,13 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "interval-start-time" => { - call = call.interval_start_time(value.unwrap_or("")); + call = call.interval_start_time( value.map(|v| arg_from_str(v, err, "interval-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "interval-end-time" => { - call = call.interval_end_time(value.unwrap_or("")); + call = call.interval_end_time( value.map(|v| arg_from_str(v, err, "interval-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1088,7 +1087,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1347,7 +1346,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1461,7 +1460,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1575,7 +1574,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1725,7 +1724,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "force" => { - call = call.force(arg_from_str(value.unwrap_or("false"), err, "force", "boolean")); + call = call.force( value.map(|v| arg_from_str(v, err, "force", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1921,7 +1920,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2025,7 +2024,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2243,6 +2242,302 @@ where } } + async fn _projects_snoozes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "criteria.policies" => Some(("criteria.policies", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "interval.end-time" => Some(("interval.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "interval.start-time" => Some(("interval.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["criteria", "display-name", "end-time", "interval", "name", "policies", "start-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Snooze = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().snoozes_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_snoozes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().snoozes_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_snoozes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().snoozes_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_snoozes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "criteria.policies" => Some(("criteria.policies", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "interval.end-time" => Some(("interval.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "interval.start-time" => Some(("interval.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["criteria", "display-name", "end-time", "interval", "name", "policies", "start-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Snooze = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().snoozes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_time_series_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2430,22 +2725,22 @@ where call = call.secondary_aggregation_cross_series_reducer(value.unwrap_or("")); }, "secondary-aggregation-alignment-period" => { - call = call.secondary_aggregation_alignment_period(value.unwrap_or("")); + call = call.secondary_aggregation_alignment_period( value.map(|v| arg_from_str(v, err, "secondary-aggregation-alignment-period", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); }, "interval-start-time" => { - call = call.interval_start_time(value.unwrap_or("")); + call = call.interval_start_time( value.map(|v| arg_from_str(v, err, "interval-start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "interval-end-time" => { - call = call.interval_end_time(value.unwrap_or("")); + call = call.interval_end_time( value.map(|v| arg_from_str(v, err, "interval-end-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2460,7 +2755,7 @@ where call = call.aggregation_cross_series_reducer(value.unwrap_or("")); }, "aggregation-alignment-period" => { - call = call.aggregation_alignment_period(value.unwrap_or("")); + call = call.aggregation_alignment_period( value.map(|v| arg_from_str(v, err, "aggregation-alignment-period", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -2628,6 +2923,7 @@ where "http-check.headers" => Some(("httpCheck.headers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "http-check.mask-headers" => Some(("httpCheck.maskHeaders", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "http-check.path" => Some(("httpCheck.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-check.ping-config.pings-count" => Some(("httpCheck.pingConfig.pingsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "http-check.port" => Some(("httpCheck.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "http-check.request-method" => Some(("httpCheck.requestMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "http-check.use-ssl" => Some(("httpCheck.useSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2640,10 +2936,12 @@ where "resource-group.group-id" => Some(("resourceGroup.groupId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-group.resource-type" => Some(("resourceGroup.resourceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "selected-regions" => Some(("selectedRegions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "tcp-check.ping-config.pings-count" => Some(("tcpCheck.pingConfig.pingsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "tcp-check.port" => Some(("tcpCheck.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "timeout" => Some(("timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-labels" => Some(("userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auth-info", "body", "checker-type", "content-type", "display-name", "group-id", "headers", "http-check", "is-internal", "labels", "mask-headers", "monitored-resource", "name", "password", "path", "period", "port", "request-method", "resource-group", "resource-type", "selected-regions", "tcp-check", "timeout", "type", "use-ssl", "username", "validate-ssl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auth-info", "body", "checker-type", "content-type", "display-name", "group-id", "headers", "http-check", "is-internal", "labels", "mask-headers", "monitored-resource", "name", "password", "path", "period", "ping-config", "pings-count", "port", "request-method", "resource-group", "resource-type", "selected-regions", "tcp-check", "timeout", "type", "use-ssl", "user-labels", "username", "validate-ssl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2817,7 +3115,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); }, _ => { let mut found = false; @@ -2832,7 +3133,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -2898,6 +3199,7 @@ where "http-check.headers" => Some(("httpCheck.headers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "http-check.mask-headers" => Some(("httpCheck.maskHeaders", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "http-check.path" => Some(("httpCheck.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "http-check.ping-config.pings-count" => Some(("httpCheck.pingConfig.pingsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "http-check.port" => Some(("httpCheck.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "http-check.request-method" => Some(("httpCheck.requestMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "http-check.use-ssl" => Some(("httpCheck.useSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2910,10 +3212,12 @@ where "resource-group.group-id" => Some(("resourceGroup.groupId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-group.resource-type" => Some(("resourceGroup.resourceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "selected-regions" => Some(("selectedRegions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "tcp-check.ping-config.pings-count" => Some(("tcpCheck.pingConfig.pingsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "tcp-check.port" => Some(("tcpCheck.port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "timeout" => Some(("timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-labels" => Some(("userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["auth-info", "body", "checker-type", "content-type", "display-name", "group-id", "headers", "http-check", "is-internal", "labels", "mask-headers", "monitored-resource", "name", "password", "path", "period", "port", "request-method", "resource-group", "resource-type", "selected-regions", "tcp-check", "timeout", "type", "use-ssl", "username", "validate-ssl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["auth-info", "body", "checker-type", "content-type", "display-name", "group-id", "headers", "http-check", "is-internal", "labels", "mask-headers", "monitored-resource", "name", "password", "path", "period", "ping-config", "pings-count", "port", "request-method", "resource-group", "resource-type", "selected-regions", "tcp-check", "timeout", "type", "use-ssl", "user-labels", "username", "validate-ssl"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2928,7 +3232,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3001,12 +3305,31 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "app-engine.module-id" => Some(("appEngine.moduleId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "basic-service.service-labels" => Some(("basicService.serviceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "basic-service.service-type" => Some(("basicService.serviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-endpoints.service" => Some(("cloudEndpoints.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloud-run.location" => Some(("cloudRun.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloud-run.service-name" => Some(("cloudRun.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.cluster-name" => Some(("clusterIstio.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.location" => Some(("clusterIstio.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.service-name" => Some(("clusterIstio.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.service-namespace" => Some(("clusterIstio.serviceNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.cluster-name" => Some(("gkeNamespace.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.location" => Some(("gkeNamespace.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.namespace-name" => Some(("gkeNamespace.namespaceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.project-id" => Some(("gkeNamespace.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.cluster-name" => Some(("gkeService.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.location" => Some(("gkeService.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.namespace-name" => Some(("gkeService.namespaceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.project-id" => Some(("gkeService.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.service-name" => Some(("gkeService.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.cluster-name" => Some(("gkeWorkload.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.location" => Some(("gkeWorkload.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.namespace-name" => Some(("gkeWorkload.namespaceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.project-id" => Some(("gkeWorkload.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.top-level-controller-name" => Some(("gkeWorkload.topLevelControllerName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.top-level-controller-type" => Some(("gkeWorkload.topLevelControllerType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "istio-canonical-service.canonical-service" => Some(("istioCanonicalService.canonicalService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "istio-canonical-service.canonical-service-namespace" => Some(("istioCanonicalService.canonicalServiceNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "istio-canonical-service.mesh-uid" => Some(("istioCanonicalService.meshUid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3017,7 +3340,7 @@ where "telemetry.resource-name" => Some(("telemetry.resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-labels" => Some(("userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine", "canonical-service", "canonical-service-namespace", "cloud-endpoints", "cluster-istio", "cluster-name", "display-name", "istio-canonical-service", "location", "mesh-istio", "mesh-uid", "module-id", "name", "resource-name", "service", "service-name", "service-namespace", "telemetry", "user-labels"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine", "basic-service", "canonical-service", "canonical-service-namespace", "cloud-endpoints", "cloud-run", "cluster-istio", "cluster-name", "display-name", "gke-namespace", "gke-service", "gke-workload", "istio-canonical-service", "location", "mesh-istio", "mesh-uid", "module-id", "name", "namespace-name", "project-id", "resource-name", "service", "service-labels", "service-name", "service-namespace", "service-type", "telemetry", "top-level-controller-name", "top-level-controller-type", "user-labels"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3195,7 +3518,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3271,12 +3594,31 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "app-engine.module-id" => Some(("appEngine.moduleId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "basic-service.service-labels" => Some(("basicService.serviceLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "basic-service.service-type" => Some(("basicService.serviceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cloud-endpoints.service" => Some(("cloudEndpoints.service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloud-run.location" => Some(("cloudRun.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "cloud-run.service-name" => Some(("cloudRun.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.cluster-name" => Some(("clusterIstio.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.location" => Some(("clusterIstio.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.service-name" => Some(("clusterIstio.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "cluster-istio.service-namespace" => Some(("clusterIstio.serviceNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.cluster-name" => Some(("gkeNamespace.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.location" => Some(("gkeNamespace.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.namespace-name" => Some(("gkeNamespace.namespaceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-namespace.project-id" => Some(("gkeNamespace.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.cluster-name" => Some(("gkeService.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.location" => Some(("gkeService.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.namespace-name" => Some(("gkeService.namespaceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.project-id" => Some(("gkeService.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-service.service-name" => Some(("gkeService.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.cluster-name" => Some(("gkeWorkload.clusterName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.location" => Some(("gkeWorkload.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.namespace-name" => Some(("gkeWorkload.namespaceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.project-id" => Some(("gkeWorkload.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.top-level-controller-name" => Some(("gkeWorkload.topLevelControllerName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gke-workload.top-level-controller-type" => Some(("gkeWorkload.topLevelControllerType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "istio-canonical-service.canonical-service" => Some(("istioCanonicalService.canonicalService", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "istio-canonical-service.canonical-service-namespace" => Some(("istioCanonicalService.canonicalServiceNamespace", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "istio-canonical-service.mesh-uid" => Some(("istioCanonicalService.meshUid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3287,7 +3629,7 @@ where "telemetry.resource-name" => Some(("telemetry.resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-labels" => Some(("userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine", "canonical-service", "canonical-service-namespace", "cloud-endpoints", "cluster-istio", "cluster-name", "display-name", "istio-canonical-service", "location", "mesh-istio", "mesh-uid", "module-id", "name", "resource-name", "service", "service-name", "service-namespace", "telemetry", "user-labels"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine", "basic-service", "canonical-service", "canonical-service-namespace", "cloud-endpoints", "cloud-run", "cluster-istio", "cluster-name", "display-name", "gke-namespace", "gke-service", "gke-workload", "istio-canonical-service", "location", "mesh-istio", "mesh-uid", "module-id", "name", "namespace-name", "project-id", "resource-name", "service", "service-labels", "service-name", "service-namespace", "service-type", "telemetry", "top-level-controller-name", "top-level-controller-type", "user-labels"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3302,7 +3644,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3595,7 +3937,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3721,7 +4063,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3780,7 +4122,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3942,6 +4284,18 @@ where ("notification-channels-verify", Some(opt)) => { call_result = self._projects_notification_channels_verify(opt, dry_run, &mut err).await; }, + ("snoozes-create", Some(opt)) => { + call_result = self._projects_snoozes_create(opt, dry_run, &mut err).await; + }, + ("snoozes-get", Some(opt)) => { + call_result = self._projects_snoozes_get(opt, dry_run, &mut err).await; + }, + ("snoozes-list", Some(opt)) => { + call_result = self._projects_snoozes_list(opt, dry_run, &mut err).await; + }, + ("snoozes-patch", Some(opt)) => { + call_result = self._projects_snoozes_patch(opt, dry_run, &mut err).await; + }, ("time-series-create", Some(opt)) => { call_result = self._projects_time_series_create(opt, dry_run, &mut err).await; }, @@ -4147,14 +4501,14 @@ async fn main() { ]), ]), - ("projects", "methods: 'alert-policies-create', 'alert-policies-delete', 'alert-policies-get', 'alert-policies-list', 'alert-policies-patch', 'collectd-time-series-create', 'groups-create', 'groups-delete', 'groups-get', 'groups-list', 'groups-members-list', 'groups-update', 'metric-descriptors-create', 'metric-descriptors-delete', 'metric-descriptors-get', 'metric-descriptors-list', 'monitored-resource-descriptors-get', 'monitored-resource-descriptors-list', 'notification-channel-descriptors-get', 'notification-channel-descriptors-list', 'notification-channels-create', 'notification-channels-delete', 'notification-channels-get', 'notification-channels-get-verification-code', 'notification-channels-list', 'notification-channels-patch', 'notification-channels-send-verification-code', 'notification-channels-verify', 'time-series-create', 'time-series-create-service', 'time-series-list', 'time-series-query', 'uptime-check-configs-create', 'uptime-check-configs-delete', 'uptime-check-configs-get', 'uptime-check-configs-list' and 'uptime-check-configs-patch'", vec![ + ("projects", "methods: 'alert-policies-create', 'alert-policies-delete', 'alert-policies-get', 'alert-policies-list', 'alert-policies-patch', 'collectd-time-series-create', 'groups-create', 'groups-delete', 'groups-get', 'groups-list', 'groups-members-list', 'groups-update', 'metric-descriptors-create', 'metric-descriptors-delete', 'metric-descriptors-get', 'metric-descriptors-list', 'monitored-resource-descriptors-get', 'monitored-resource-descriptors-list', 'notification-channel-descriptors-get', 'notification-channel-descriptors-list', 'notification-channels-create', 'notification-channels-delete', 'notification-channels-get', 'notification-channels-get-verification-code', 'notification-channels-list', 'notification-channels-patch', 'notification-channels-send-verification-code', 'notification-channels-verify', 'snoozes-create', 'snoozes-get', 'snoozes-list', 'snoozes-patch', 'time-series-create', 'time-series-create-service', 'time-series-list', 'time-series-query', 'uptime-check-configs-create', 'uptime-check-configs-delete', 'uptime-check-configs-get', 'uptime-check-configs-list' and 'uptime-check-configs-patch'", vec![ ("alert-policies-create", - Some(r##"Creates a new alerting policy."##), + Some(r##"Creates a new alerting policy.Design your application to single-thread API calls that modify the state of alerting policies in a single project. This includes calls to CreateAlertPolicy, DeleteAlertPolicy and UpdateAlertPolicy."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_alert-policies-create", vec![ (Some(r##"name"##), None, - Some(r##"Required. The project (https://cloud.google.com/monitoring/api/v3#project_name) in which to create the alerting policy. The format is: projects/[PROJECT_ID_OR_NUMBER] Note that this field names the parent container in which the alerting policy will be written, not the name of the created policy. |name| must be a host project of a workspace, otherwise INVALID_ARGUMENT error will return. The alerting policy that is returned will have a name that contains a normalized representation of this name as a prefix but adds a suffix of the form /alertPolicies/[ALERT_POLICY_ID], identifying the policy in the container."##), + Some(r##"Required. The project (https://cloud.google.com/monitoring/api/v3#project_name) in which to create the alerting policy. The format is: projects/[PROJECT_ID_OR_NUMBER] Note that this field names the parent container in which the alerting policy will be written, not the name of the created policy. |name| must be a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will return. The alerting policy that is returned will have a name that contains a normalized representation of this name as a prefix but adds a suffix of the form /alertPolicies/[ALERT_POLICY_ID], identifying the policy in the container."##), Some(true), Some(false)), @@ -4177,7 +4531,7 @@ async fn main() { Some(false)), ]), ("alert-policies-delete", - Some(r##"Deletes an alerting policy."##), + Some(r##"Deletes an alerting policy.Design your application to single-thread API calls that modify the state of alerting policies in a single project. This includes calls to CreateAlertPolicy, DeleteAlertPolicy and UpdateAlertPolicy."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_alert-policies-delete", vec![ (Some(r##"name"##), @@ -4243,12 +4597,12 @@ async fn main() { Some(false)), ]), ("alert-policies-patch", - Some(r##"Updates an alerting policy. You can either replace the entire policy with a new one or replace only certain fields in the current alerting policy by specifying the fields to be updated via updateMask. Returns the updated alerting policy."##), + Some(r##"Updates an alerting policy. You can either replace the entire policy with a new one or replace only certain fields in the current alerting policy by specifying the fields to be updated via updateMask. Returns the updated alerting policy.Design your application to single-thread API calls that modify the state of alerting policies in a single project. This includes calls to CreateAlertPolicy, DeleteAlertPolicy and UpdateAlertPolicy."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_alert-policies-patch", vec![ (Some(r##"name"##), None, - Some(r##"Required if the policy exists. The resource name for this policy. The format is: projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] [ALERT_POLICY_ID] is assigned by Stackdriver Monitoring when the policy is created. When calling the alertPolicies.create method, do not include the name field in the alerting policy passed as part of the request."##), + Some(r##"Required if the policy exists. The resource name for this policy. The format is: projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID] [ALERT_POLICY_ID] is assigned by Cloud Monitoring when the policy is created. When calling the alertPolicies.create method, do not include the name field in the alerting policy passed as part of the request."##), Some(true), Some(false)), @@ -4271,7 +4625,7 @@ async fn main() { Some(false)), ]), ("collectd-time-series-create", - Some(r##"Stackdriver Monitoring Agent only: Creates a new time series.This method is only for use by the Stackdriver Monitoring Agent. Use projects.timeSeries.create instead."##), + Some(r##"Cloud Monitoring Agent only: Creates a new time series.This method is only for use by the Cloud Monitoring Agent. Use projects.timeSeries.create instead."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_collectd-time-series-create", vec![ (Some(r##"name"##), @@ -4443,7 +4797,7 @@ async fn main() { Some(false)), ]), ("metric-descriptors-create", - Some(r##"Creates a new metric descriptor. The creation is executed asynchronously and callers may check the returned operation to track its progress. User-created metric descriptors define custom metrics (https://cloud.google.com/monitoring/custom-metrics)."##), + Some(r##"Creates a new metric descriptor. The creation is executed asynchronously. User-created metric descriptors define custom metrics (https://cloud.google.com/monitoring/custom-metrics). The metric descriptor is updated if it already exists, except that metric labels are never removed."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_metric-descriptors-create", vec![ (Some(r##"name"##), @@ -4625,7 +4979,7 @@ async fn main() { Some(false)), ]), ("notification-channels-create", - Some(r##"Creates a new notification channel, representing a single notification endpoint such as an email address, SMS number, or PagerDuty service."##), + Some(r##"Creates a new notification channel, representing a single notification endpoint such as an email address, SMS number, or PagerDuty service.Design your application to single-thread API calls that modify the state of notification channels in a single project. This includes calls to CreateNotificationChannel, DeleteNotificationChannel and UpdateNotificationChannel."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_notification-channels-create", vec![ (Some(r##"name"##), @@ -4653,7 +5007,7 @@ async fn main() { Some(false)), ]), ("notification-channels-delete", - Some(r##"Deletes a notification channel."##), + Some(r##"Deletes a notification channel.Design your application to single-thread API calls that modify the state of notification channels in a single project. This includes calls to CreateNotificationChannel, DeleteNotificationChannel and UpdateNotificationChannel."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_notification-channels-delete", vec![ (Some(r##"name"##), @@ -4747,7 +5101,7 @@ async fn main() { Some(false)), ]), ("notification-channels-patch", - Some(r##"Updates a notification channel. Fields not specified in the field mask remain unchanged."##), + Some(r##"Updates a notification channel. Fields not specified in the field mask remain unchanged.Design your application to single-thread API calls that modify the state of notification channels in a single project. This includes calls to CreateNotificationChannel, DeleteNotificationChannel and UpdateNotificationChannel."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_notification-channels-patch", vec![ (Some(r##"name"##), @@ -4824,6 +5178,106 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("snoozes-create", + Some(r##"Creates a Snooze that will prevent alerts, which match the provided criteria, from being opened. The Snooze applies for a specific time interval."##), + "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_snoozes-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project (https://cloud.google.com/monitoring/api/v3#project_name) in which a Snooze should be created. The format is: projects/[PROJECT_ID_OR_NUMBER] "##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("snoozes-get", + Some(r##"Retrieves a Snooze by name."##), + "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_snoozes-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The ID of the Snooze to retrieve. The format is: projects/[PROJECT_ID_OR_NUMBER]/snoozes/[SNOOZE_ID] "##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("snoozes-list", + Some(r##"Lists the Snoozes associated with a project. Can optionally pass in filter, which specifies predicates to match Snoozes."##), + "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_snoozes-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project (https://cloud.google.com/monitoring/api/v3#project_name) whose Snoozes should be listed. The format is: projects/[PROJECT_ID_OR_NUMBER] "##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("snoozes-patch", + Some(r##"Updates a Snooze, identified by its name, with the parameters in the given Snooze object."##), + "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/projects_snoozes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the Snooze. The format is: projects/[PROJECT_ID_OR_NUMBER]/snoozes/[SNOOZE_ID] The ID of the Snooze will be generated by the system."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5067,7 +5521,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name (https://cloud.google.com/monitoring/api/v3#project_name) of the parent workspace. The format is: projects/[PROJECT_ID_OR_NUMBER] "##), + Some(r##"Required. Resource name (https://cloud.google.com/monitoring/api/v3#project_name) of the parent Metrics Scope. The format is: projects/[PROJECT_ID_OR_NUMBER] "##), Some(true), Some(false)), @@ -5134,12 +5588,12 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"List Services for this workspace."##), + Some(r##"List Services for this Metrics Scope."##), "Details at http://byron.github.io/google-apis-rs/google_monitoring3_cli/services_list", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the parent containing the listed services, either a project (https://cloud.google.com/monitoring/api/v3#project_name) or a Monitoring Workspace. The formats are: projects/[PROJECT_ID_OR_NUMBER] workspaces/[HOST_PROJECT_ID_OR_NUMBER] "##), + Some(r##"Required. Resource name of the parent containing the listed services, either a project (https://cloud.google.com/monitoring/api/v3#project_name) or a Monitoring Metrics Scope. The formats are: projects/[PROJECT_ID_OR_NUMBER] workspaces/[HOST_PROJECT_ID_OR_NUMBER] "##), Some(true), Some(false)), @@ -5261,7 +5715,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the parent containing the listed SLOs, either a project or a Monitoring Workspace. The formats are: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- "##), + Some(r##"Required. Resource name of the parent containing the listed SLOs, either a project or a Monitoring Metrics Scope. The formats are: projects/[PROJECT_ID_OR_NUMBER]/services/[SERVICE_ID] workspaces/[HOST_PROJECT_ID_OR_NUMBER]/services/- "##), Some(true), Some(false)), @@ -5330,7 +5784,7 @@ async fn main() { let mut app = App::new("monitoring3") .author("Sebastian Thiel ") - .version("4.0.1+20220218") + .version("5.0.2+20230115") .about("Manages your Cloud Monitoring data and configurations.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_monitoring3_cli") .arg(Arg::with_name("url") diff --git a/gen/monitoring3/Cargo.toml b/gen/monitoring3/Cargo.toml index fb2f25ae7b..3daf6aaca1 100644 --- a/gen/monitoring3/Cargo.toml +++ b/gen/monitoring3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-monitoring3" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Monitoring (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/monitoring3" homepage = "https://cloud.google.com/monitoring/api/" -documentation = "https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-monitoring3/5.0.2+20230115" license = "MIT" keywords = ["monitoring", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/monitoring3/README.md b/gen/monitoring3/README.md index 12973c4308..b88faa00e2 100644 --- a/gen/monitoring3/README.md +++ b/gen/monitoring3/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-monitoring3` library allows access to all features of the *Google Monitoring* service. -This documentation was generated from *Monitoring* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *monitoring:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Monitoring* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *monitoring:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Monitoring* *v3* API can be found at the [official documentation site](https://cloud.google.com/monitoring/api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/Monitoring) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/Monitoring) ... * folders - * [*time series list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::FolderTimeSeryListCall) + * [*time series list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::FolderTimeSeryListCall) * organizations - * [*time series list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::OrganizationTimeSeryListCall) + * [*time series list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::OrganizationTimeSeryListCall) * projects - * [*alert policies create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectAlertPolicyCreateCall), [*alert policies delete*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectAlertPolicyDeleteCall), [*alert policies get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectAlertPolicyGetCall), [*alert policies list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectAlertPolicyListCall), [*alert policies patch*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectAlertPolicyPatchCall), [*collectd time series create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectCollectdTimeSeryCreateCall), [*groups create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectGroupCreateCall), [*groups delete*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectGroupDeleteCall), [*groups get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectGroupGetCall), [*groups list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectGroupListCall), [*groups members list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectGroupMemberListCall), [*groups update*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectGroupUpdateCall), [*metric descriptors create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectMetricDescriptorCreateCall), [*metric descriptors delete*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectMetricDescriptorDeleteCall), [*metric descriptors get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectMetricDescriptorGetCall), [*metric descriptors list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectMetricDescriptorListCall), [*monitored resource descriptors get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectMonitoredResourceDescriptorGetCall), [*monitored resource descriptors list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectMonitoredResourceDescriptorListCall), [*notification channel descriptors get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelDescriptorGetCall), [*notification channel descriptors list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelDescriptorListCall), [*notification channels create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelCreateCall), [*notification channels delete*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelDeleteCall), [*notification channels get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelGetCall), [*notification channels get verification code*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelGetVerificationCodeCall), [*notification channels list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelListCall), [*notification channels patch*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelPatchCall), [*notification channels send verification code*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelSendVerificationCodeCall), [*notification channels verify*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectNotificationChannelVerifyCall), [*snoozes create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectSnoozeCreateCall), [*snoozes get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectSnoozeGetCall), [*snoozes list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectSnoozeListCall), [*snoozes patch*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectSnoozePatchCall), [*time series create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectTimeSeryCreateCall), [*time series create service*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectTimeSeryCreateServiceCall), [*time series list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectTimeSeryListCall), [*time series query*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectTimeSeryQueryCall), [*uptime check configs create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectUptimeCheckConfigCreateCall), [*uptime check configs delete*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectUptimeCheckConfigDeleteCall), [*uptime check configs get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectUptimeCheckConfigGetCall), [*uptime check configs list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectUptimeCheckConfigListCall) and [*uptime check configs patch*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ProjectUptimeCheckConfigPatchCall) -* [services](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::Service) - * [*create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceCreateCall), [*delete*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceDeleteCall), [*get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceGetCall), [*list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceListCall), [*patch*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServicePatchCall), [*service level objectives create*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveCreateCall), [*service level objectives delete*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveDeleteCall), [*service level objectives get*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveGetCall), [*service level objectives list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveListCall) and [*service level objectives patch*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::ServiceServiceLevelObjectivePatchCall) -* [uptime check ips](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::UptimeCheckIp) - * [*list*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/api::UptimeCheckIpListCall) + * [*alert policies create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectAlertPolicyCreateCall), [*alert policies delete*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectAlertPolicyDeleteCall), [*alert policies get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectAlertPolicyGetCall), [*alert policies list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectAlertPolicyListCall), [*alert policies patch*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectAlertPolicyPatchCall), [*collectd time series create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectCollectdTimeSeryCreateCall), [*groups create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectGroupCreateCall), [*groups delete*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectGroupDeleteCall), [*groups get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectGroupGetCall), [*groups list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectGroupListCall), [*groups members list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectGroupMemberListCall), [*groups update*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectGroupUpdateCall), [*metric descriptors create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectMetricDescriptorCreateCall), [*metric descriptors delete*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectMetricDescriptorDeleteCall), [*metric descriptors get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectMetricDescriptorGetCall), [*metric descriptors list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectMetricDescriptorListCall), [*monitored resource descriptors get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectMonitoredResourceDescriptorGetCall), [*monitored resource descriptors list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectMonitoredResourceDescriptorListCall), [*notification channel descriptors get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelDescriptorGetCall), [*notification channel descriptors list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelDescriptorListCall), [*notification channels create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelCreateCall), [*notification channels delete*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelDeleteCall), [*notification channels get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelGetCall), [*notification channels get verification code*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelGetVerificationCodeCall), [*notification channels list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelListCall), [*notification channels patch*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelPatchCall), [*notification channels send verification code*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelSendVerificationCodeCall), [*notification channels verify*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectNotificationChannelVerifyCall), [*snoozes create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectSnoozeCreateCall), [*snoozes get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectSnoozeGetCall), [*snoozes list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectSnoozeListCall), [*snoozes patch*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectSnoozePatchCall), [*time series create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectTimeSeryCreateCall), [*time series create service*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectTimeSeryCreateServiceCall), [*time series list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectTimeSeryListCall), [*time series query*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectTimeSeryQueryCall), [*uptime check configs create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectUptimeCheckConfigCreateCall), [*uptime check configs delete*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectUptimeCheckConfigDeleteCall), [*uptime check configs get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectUptimeCheckConfigGetCall), [*uptime check configs list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectUptimeCheckConfigListCall) and [*uptime check configs patch*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ProjectUptimeCheckConfigPatchCall) +* [services](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::Service) + * [*create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceCreateCall), [*delete*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceDeleteCall), [*get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceGetCall), [*list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceListCall), [*patch*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServicePatchCall), [*service level objectives create*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveCreateCall), [*service level objectives delete*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveDeleteCall), [*service level objectives get*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveGetCall), [*service level objectives list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceServiceLevelObjectiveListCall) and [*service level objectives patch*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::ServiceServiceLevelObjectivePatchCall) +* [uptime check ips](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::UptimeCheckIp) + * [*list*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/api::UptimeCheckIpListCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/Monitoring)** +* **[Hub](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/Monitoring)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::CallBuilder) -* **[Resources](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::CallBuilder) +* **[Resources](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Part)** + * **[Parts](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -139,17 +139,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -159,29 +159,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Delegate) to the -[Method Builder](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Delegate) to the +[Method Builder](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::RequestValue) and -[decodable](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::RequestValue) and +[decodable](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-monitoring3/5.0.2-beta-1+20230115/google_monitoring3/client::RequestValue) are moved +* [request values](https://docs.rs/google-monitoring3/5.0.2+20230115/google_monitoring3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/monitoring3/src/api.rs b/gen/monitoring3/src/api.rs index ca68f7437e..2f7dda03f9 100644 --- a/gen/monitoring3/src/api.rs +++ b/gen/monitoring3/src/api.rs @@ -135,7 +135,7 @@ impl<'a, S> Monitoring { Monitoring { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://monitoring.googleapis.com/".to_string(), _root_url: "https://monitoring.googleapis.com/".to_string(), } @@ -158,7 +158,7 @@ impl<'a, S> Monitoring { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/monitoring3/src/client.rs b/gen/monitoring3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/monitoring3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/monitoring3/src/lib.rs b/gen/monitoring3/src/lib.rs index c1a71fea43..32cb902a78 100644 --- a/gen/monitoring3/src/lib.rs +++ b/gen/monitoring3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Monitoring* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *monitoring:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Monitoring* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *monitoring:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Monitoring* *v3* API can be found at the //! [official documentation site](https://cloud.google.com/monitoring/api/). diff --git a/gen/mybusiness4-cli/Cargo.toml b/gen/mybusiness4-cli/Cargo.toml index 119bddefde..6bb0b337ec 100644 --- a/gen/mybusiness4-cli/Cargo.toml +++ b/gen/mybusiness4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusiness4-cli" -version = "4.0.1+0" +version = "5.0.2+0" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusiness4-cli" @@ -20,13 +20,13 @@ name = "mybusiness4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusiness4] path = "../mybusiness4" -version = "4.0.1+0" +version = "5.0.2+0" + diff --git a/gen/mybusiness4-cli/README.md b/gen/mybusiness4-cli/README.md index 0c43407134..f0de1d0239 100644 --- a/gen/mybusiness4-cli/README.md +++ b/gen/mybusiness4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business* API at revision *0*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business* API at revision *0*. The CLI is at version *5.0.2*. ```bash mybusiness4 [options] diff --git a/gen/mybusiness4-cli/mkdocs.yml b/gen/mybusiness4-cli/mkdocs.yml index aa66b531db..44c1f0bbdd 100644 --- a/gen/mybusiness4-cli/mkdocs.yml +++ b/gen/mybusiness4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business v4.0.1+0 +site_name: My Business v5.0.2+0 site_url: http://byron.github.io/google-apis-rs/google-mybusiness4-cli site_description: A complete library to interact with My Business (protocol v4) @@ -7,78 +7,84 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusiness4-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_admins-create.md', 'Accounts', 'Admins Create'] -- ['accounts_admins-delete.md', 'Accounts', 'Admins Delete'] -- ['accounts_admins-list.md', 'Accounts', 'Admins List'] -- ['accounts_admins-patch.md', 'Accounts', 'Admins Patch'] -- ['accounts_create.md', 'Accounts', 'Create'] -- ['accounts_delete-notifications.md', 'Accounts', 'Delete Notifications'] -- ['accounts_generate-account-number.md', 'Accounts', 'Generate Account Number'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_get-notifications.md', 'Accounts', 'Get Notifications'] -- ['accounts_invitations-accept.md', 'Accounts', 'Invitations Accept'] -- ['accounts_invitations-decline.md', 'Accounts', 'Invitations Decline'] -- ['accounts_invitations-list.md', 'Accounts', 'Invitations List'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_list-recommend-google-locations.md', 'Accounts', 'List Recommend Google Locations'] -- ['accounts_locations-admins-create.md', 'Accounts', 'Locations Admins Create'] -- ['accounts_locations-admins-delete.md', 'Accounts', 'Locations Admins Delete'] -- ['accounts_locations-admins-list.md', 'Accounts', 'Locations Admins List'] -- ['accounts_locations-admins-patch.md', 'Accounts', 'Locations Admins Patch'] -- ['accounts_locations-associate.md', 'Accounts', 'Locations Associate'] -- ['accounts_locations-batch-get.md', 'Accounts', 'Locations Batch Get'] -- ['accounts_locations-batch-get-reviews.md', 'Accounts', 'Locations Batch Get Reviews'] -- ['accounts_locations-clear-association.md', 'Accounts', 'Locations Clear Association'] -- ['accounts_locations-create.md', 'Accounts', 'Locations Create'] -- ['accounts_locations-delete.md', 'Accounts', 'Locations Delete'] -- ['accounts_locations-fetch-verification-options.md', 'Accounts', 'Locations Fetch Verification Options'] -- ['accounts_locations-find-matches.md', 'Accounts', 'Locations Find Matches'] -- ['accounts_locations-followers-get-metadata.md', 'Accounts', 'Locations Followers Get Metadata'] -- ['accounts_locations-get.md', 'Accounts', 'Locations Get'] -- ['accounts_locations-get-google-updated.md', 'Accounts', 'Locations Get Google Updated'] -- ['accounts_locations-list.md', 'Accounts', 'Locations List'] -- ['accounts_locations-local-posts-create.md', 'Accounts', 'Locations Local Posts Create'] -- ['accounts_locations-local-posts-delete.md', 'Accounts', 'Locations Local Posts Delete'] -- ['accounts_locations-local-posts-get.md', 'Accounts', 'Locations Local Posts Get'] -- ['accounts_locations-local-posts-list.md', 'Accounts', 'Locations Local Posts List'] -- ['accounts_locations-local-posts-patch.md', 'Accounts', 'Locations Local Posts Patch'] -- ['accounts_locations-local-posts-report-insights.md', 'Accounts', 'Locations Local Posts Report Insights'] -- ['accounts_locations-media-create.md', 'Accounts', 'Locations Media Create'] -- ['accounts_locations-media-customers-get.md', 'Accounts', 'Locations Media Customers Get'] -- ['accounts_locations-media-customers-list.md', 'Accounts', 'Locations Media Customers List'] -- ['accounts_locations-media-delete.md', 'Accounts', 'Locations Media Delete'] -- ['accounts_locations-media-get.md', 'Accounts', 'Locations Media Get'] -- ['accounts_locations-media-list.md', 'Accounts', 'Locations Media List'] -- ['accounts_locations-media-patch.md', 'Accounts', 'Locations Media Patch'] -- ['accounts_locations-media-start-upload.md', 'Accounts', 'Locations Media Start Upload'] -- ['accounts_locations-patch.md', 'Accounts', 'Locations Patch'] -- ['accounts_locations-questions-answers-delete.md', 'Accounts', 'Locations Questions Answers Delete'] -- ['accounts_locations-questions-answers-list.md', 'Accounts', 'Locations Questions Answers List'] -- ['accounts_locations-questions-answers-upsert.md', 'Accounts', 'Locations Questions Answers Upsert'] -- ['accounts_locations-questions-create.md', 'Accounts', 'Locations Questions Create'] -- ['accounts_locations-questions-delete.md', 'Accounts', 'Locations Questions Delete'] -- ['accounts_locations-questions-list.md', 'Accounts', 'Locations Questions List'] -- ['accounts_locations-questions-patch.md', 'Accounts', 'Locations Questions Patch'] -- ['accounts_locations-report-insights.md', 'Accounts', 'Locations Report Insights'] -- ['accounts_locations-reviews-delete-reply.md', 'Accounts', 'Locations Reviews Delete Reply'] -- ['accounts_locations-reviews-get.md', 'Accounts', 'Locations Reviews Get'] -- ['accounts_locations-reviews-list.md', 'Accounts', 'Locations Reviews List'] -- ['accounts_locations-reviews-update-reply.md', 'Accounts', 'Locations Reviews Update Reply'] -- ['accounts_locations-transfer.md', 'Accounts', 'Locations Transfer'] -- ['accounts_locations-verifications-complete.md', 'Accounts', 'Locations Verifications Complete'] -- ['accounts_locations-verifications-list.md', 'Accounts', 'Locations Verifications List'] -- ['accounts_locations-verify.md', 'Accounts', 'Locations Verify'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['accounts_update-notifications.md', 'Accounts', 'Update Notifications'] -- ['attributes_list.md', 'Attributes', 'List'] -- ['categories_list.md', 'Categories', 'List'] -- ['chains_get.md', 'Chains', 'Get'] -- ['chains_search.md', 'Chains', 'Search'] -- ['google-locations_report.md', 'Google Locations', 'Report'] -- ['google-locations_search.md', 'Google Locations', 'Search'] -- ['verification-tokens_generate.md', 'Verification Tokens', 'Generate'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Admins Create': 'accounts_admins-create.md' + - 'Admins Delete': 'accounts_admins-delete.md' + - 'Admins List': 'accounts_admins-list.md' + - 'Admins Patch': 'accounts_admins-patch.md' + - 'Create': 'accounts_create.md' + - 'Delete Notifications': 'accounts_delete-notifications.md' + - 'Generate Account Number': 'accounts_generate-account-number.md' + - 'Get': 'accounts_get.md' + - 'Get Notifications': 'accounts_get-notifications.md' + - 'Invitations Accept': 'accounts_invitations-accept.md' + - 'Invitations Decline': 'accounts_invitations-decline.md' + - 'Invitations List': 'accounts_invitations-list.md' + - 'List': 'accounts_list.md' + - 'List Recommend Google Locations': 'accounts_list-recommend-google-locations.md' + - 'Locations Admins Create': 'accounts_locations-admins-create.md' + - 'Locations Admins Delete': 'accounts_locations-admins-delete.md' + - 'Locations Admins List': 'accounts_locations-admins-list.md' + - 'Locations Admins Patch': 'accounts_locations-admins-patch.md' + - 'Locations Associate': 'accounts_locations-associate.md' + - 'Locations Batch Get': 'accounts_locations-batch-get.md' + - 'Locations Batch Get Reviews': 'accounts_locations-batch-get-reviews.md' + - 'Locations Clear Association': 'accounts_locations-clear-association.md' + - 'Locations Create': 'accounts_locations-create.md' + - 'Locations Delete': 'accounts_locations-delete.md' + - 'Locations Fetch Verification Options': 'accounts_locations-fetch-verification-options.md' + - 'Locations Find Matches': 'accounts_locations-find-matches.md' + - 'Locations Followers Get Metadata': 'accounts_locations-followers-get-metadata.md' + - 'Locations Get': 'accounts_locations-get.md' + - 'Locations Get Google Updated': 'accounts_locations-get-google-updated.md' + - 'Locations List': 'accounts_locations-list.md' + - 'Locations Local Posts Create': 'accounts_locations-local-posts-create.md' + - 'Locations Local Posts Delete': 'accounts_locations-local-posts-delete.md' + - 'Locations Local Posts Get': 'accounts_locations-local-posts-get.md' + - 'Locations Local Posts List': 'accounts_locations-local-posts-list.md' + - 'Locations Local Posts Patch': 'accounts_locations-local-posts-patch.md' + - 'Locations Local Posts Report Insights': 'accounts_locations-local-posts-report-insights.md' + - 'Locations Media Create': 'accounts_locations-media-create.md' + - 'Locations Media Customers Get': 'accounts_locations-media-customers-get.md' + - 'Locations Media Customers List': 'accounts_locations-media-customers-list.md' + - 'Locations Media Delete': 'accounts_locations-media-delete.md' + - 'Locations Media Get': 'accounts_locations-media-get.md' + - 'Locations Media List': 'accounts_locations-media-list.md' + - 'Locations Media Patch': 'accounts_locations-media-patch.md' + - 'Locations Media Start Upload': 'accounts_locations-media-start-upload.md' + - 'Locations Patch': 'accounts_locations-patch.md' + - 'Locations Questions Answers Delete': 'accounts_locations-questions-answers-delete.md' + - 'Locations Questions Answers List': 'accounts_locations-questions-answers-list.md' + - 'Locations Questions Answers Upsert': 'accounts_locations-questions-answers-upsert.md' + - 'Locations Questions Create': 'accounts_locations-questions-create.md' + - 'Locations Questions Delete': 'accounts_locations-questions-delete.md' + - 'Locations Questions List': 'accounts_locations-questions-list.md' + - 'Locations Questions Patch': 'accounts_locations-questions-patch.md' + - 'Locations Report Insights': 'accounts_locations-report-insights.md' + - 'Locations Reviews Delete Reply': 'accounts_locations-reviews-delete-reply.md' + - 'Locations Reviews Get': 'accounts_locations-reviews-get.md' + - 'Locations Reviews List': 'accounts_locations-reviews-list.md' + - 'Locations Reviews Update Reply': 'accounts_locations-reviews-update-reply.md' + - 'Locations Transfer': 'accounts_locations-transfer.md' + - 'Locations Verifications Complete': 'accounts_locations-verifications-complete.md' + - 'Locations Verifications List': 'accounts_locations-verifications-list.md' + - 'Locations Verify': 'accounts_locations-verify.md' + - 'Update': 'accounts_update.md' + - 'Update Notifications': 'accounts_update-notifications.md' +- 'Attributes': + - 'List': 'attributes_list.md' +- 'Categories': + - 'List': 'categories_list.md' +- 'Chains': + - 'Get': 'chains_get.md' + - 'Search': 'chains_search.md' +- 'Google Locations': + - 'Report': 'google-locations_report.md' + - 'Search': 'google-locations_search.md' +- 'Verification Tokens': + - 'Generate': 'verification-tokens_generate.md' theme: readthedocs diff --git a/gen/mybusiness4-cli/src/client.rs b/gen/mybusiness4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusiness4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusiness4-cli/src/main.rs b/gen/mybusiness4-cli/src/main.rs index c13219a3ba..8693a01b8a 100644 --- a/gen/mybusiness4-cli/src/main.rs +++ b/gen/mybusiness4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusiness4::{api, Error, oauth2}; +use google_mybusiness4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -878,7 +877,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -940,7 +939,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1682,7 +1681,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2114,7 +2113,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2387,7 +2386,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2501,7 +2500,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2788,7 +2787,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2942,7 +2941,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3044,7 +3043,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3268,13 +3267,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "attribute-mask" => { - call = call.attribute_mask(value.unwrap_or("")); + call = call.attribute_mask( value.map(|v| arg_from_str(v, err, "attribute-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3379,7 +3378,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3666,7 +3665,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3675,7 +3674,7 @@ where call = call.filter(value.unwrap_or("")); }, "answers-per-question" => { - call = call.answers_per_question(arg_from_str(value.unwrap_or("-0"), err, "answers-per-question", "integer")); + call = call.answers_per_question( value.map(|v| arg_from_str(v, err, "answers-per-question", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4005,7 +4004,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4311,7 +4310,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4515,7 +4514,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -4655,7 +4654,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -4729,7 +4728,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -4834,7 +4833,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "result-count" => { - call = call.result_count(arg_from_str(value.unwrap_or("-0"), err, "result-count", "integer")); + call = call.result_count( value.map(|v| arg_from_str(v, err, "result-count", "int32")).unwrap_or(-0)); }, "chain-display-name" => { call = call.chain_display_name(value.unwrap_or("")); @@ -7431,7 +7430,7 @@ async fn main() { let mut app = App::new("mybusiness4") .author("Sebastian Thiel ") - .version("4.0.1+0") + .version("5.0.2+0") .about("The Google My Business API provides an interface for managing business location information on Google.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusiness4_cli") .arg(Arg::with_name("folder") diff --git a/gen/mybusiness4/Cargo.toml b/gen/mybusiness4/Cargo.toml index 85267a346a..a1dd1e7fca 100644 --- a/gen/mybusiness4/Cargo.toml +++ b/gen/mybusiness4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusiness4" -version = "5.0.2-beta-1+0" +version = "5.0.2+0" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusiness4" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusiness4/5.0.2-beta-1+0" +documentation = "https://docs.rs/google-mybusiness4/5.0.2+0" license = "MIT" keywords = ["mybusiness", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusiness4/README.md b/gen/mybusiness4/README.md index d66ef2b697..a6f45548c4 100644 --- a/gen/mybusiness4/README.md +++ b/gen/mybusiness4/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-mybusiness4` library allows access to all features of the *Google My Business* service. -This documentation was generated from *My Business* crate version *5.0.2-beta-1+0*, where *0* is the exact revision of the *mybusiness:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business* crate version *5.0.2+0*, where *0* is the exact revision of the *mybusiness:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business* *v4* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/MyBusiness) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/MyBusiness) ... -* [accounts](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::Account) - * [*admins create*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountAdminCreateCall), [*admins delete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountAdminDeleteCall), [*admins list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountAdminListCall), [*admins patch*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountAdminPatchCall), [*create*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountCreateCall), [*delete notifications*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountDeleteNotificationCall), [*generate account number*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountGenerateAccountNumberCall), [*get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountGetCall), [*get notifications*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountGetNotificationCall), [*invitations accept*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountInvitationAcceptCall), [*invitations decline*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountInvitationDeclineCall), [*invitations list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountInvitationListCall), [*list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountListCall), [*list recommend google locations*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountListRecommendGoogleLocationCall), [*locations admins create*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationAdminCreateCall), [*locations admins delete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationAdminDeleteCall), [*locations admins list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationAdminListCall), [*locations admins patch*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationAdminPatchCall), [*locations associate*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationAssociateCall), [*locations batch get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationBatchGetCall), [*locations batch get reviews*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationBatchGetReviewCall), [*locations clear association*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationClearAssociationCall), [*locations create*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationCreateCall), [*locations delete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationDeleteCall), [*locations fetch verification options*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationFetchVerificationOptionCall), [*locations find matches*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationFindMatchCall), [*locations followers get metadata*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationFollowerGetMetadataCall), [*locations get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationGetCall), [*locations get google updated*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationGetGoogleUpdatedCall), [*locations list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationListCall), [*locations local posts create*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationLocalPostCreateCall), [*locations local posts delete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationLocalPostDeleteCall), [*locations local posts get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationLocalPostGetCall), [*locations local posts list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationLocalPostListCall), [*locations local posts patch*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationLocalPostPatchCall), [*locations local posts report insights*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationLocalPostReportInsightCall), [*locations media create*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaCreateCall), [*locations media customers get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaCustomerGetCall), [*locations media customers list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaCustomerListCall), [*locations media delete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaDeleteCall), [*locations media get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaGetCall), [*locations media list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaListCall), [*locations media patch*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaPatchCall), [*locations media start upload*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationMediaStartUploadCall), [*locations patch*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationPatchCall), [*locations questions answers delete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationQuestionAnswerDeleteCall), [*locations questions answers list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationQuestionAnswerListCall), [*locations questions answers upsert*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationQuestionAnswerUpsertCall), [*locations questions create*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationQuestionCreateCall), [*locations questions delete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationQuestionDeleteCall), [*locations questions list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationQuestionListCall), [*locations questions patch*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationQuestionPatchCall), [*locations report insights*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationReportInsightCall), [*locations reviews delete reply*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationReviewDeleteReplyCall), [*locations reviews get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationReviewGetCall), [*locations reviews list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationReviewListCall), [*locations reviews update reply*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationReviewUpdateReplyCall), [*locations transfer*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationTransferCall), [*locations verifications complete*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationVerificationCompleteCall), [*locations verifications list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationVerificationListCall), [*locations verify*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountLocationVerifyCall), [*update*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountUpdateCall) and [*update notifications*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AccountUpdateNotificationCall) -* [attributes](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::Attribute) - * [*list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::AttributeListCall) -* [categories](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::Category) - * [*list*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::CategoryListCall) -* [chains](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::Chain) - * [*get*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::ChainGetCall) and [*search*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::ChainSearchCall) -* [google locations](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::GoogleLocation) - * [*report*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::GoogleLocationReportCall) and [*search*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::GoogleLocationSearchCall) -* [verification tokens](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::VerificationToken) - * [*generate*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/api::VerificationTokenGenerateCall) +* [accounts](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::Account) + * [*admins create*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountAdminCreateCall), [*admins delete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountAdminDeleteCall), [*admins list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountAdminListCall), [*admins patch*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountAdminPatchCall), [*create*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountCreateCall), [*delete notifications*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountDeleteNotificationCall), [*generate account number*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountGenerateAccountNumberCall), [*get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountGetCall), [*get notifications*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountGetNotificationCall), [*invitations accept*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountInvitationAcceptCall), [*invitations decline*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountInvitationDeclineCall), [*invitations list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountInvitationListCall), [*list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountListCall), [*list recommend google locations*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountListRecommendGoogleLocationCall), [*locations admins create*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationAdminCreateCall), [*locations admins delete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationAdminDeleteCall), [*locations admins list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationAdminListCall), [*locations admins patch*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationAdminPatchCall), [*locations associate*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationAssociateCall), [*locations batch get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationBatchGetCall), [*locations batch get reviews*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationBatchGetReviewCall), [*locations clear association*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationClearAssociationCall), [*locations create*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationCreateCall), [*locations delete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationDeleteCall), [*locations fetch verification options*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationFetchVerificationOptionCall), [*locations find matches*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationFindMatchCall), [*locations followers get metadata*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationFollowerGetMetadataCall), [*locations get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationGetCall), [*locations get google updated*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationGetGoogleUpdatedCall), [*locations list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationListCall), [*locations local posts create*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationLocalPostCreateCall), [*locations local posts delete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationLocalPostDeleteCall), [*locations local posts get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationLocalPostGetCall), [*locations local posts list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationLocalPostListCall), [*locations local posts patch*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationLocalPostPatchCall), [*locations local posts report insights*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationLocalPostReportInsightCall), [*locations media create*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaCreateCall), [*locations media customers get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaCustomerGetCall), [*locations media customers list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaCustomerListCall), [*locations media delete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaDeleteCall), [*locations media get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaGetCall), [*locations media list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaListCall), [*locations media patch*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaPatchCall), [*locations media start upload*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationMediaStartUploadCall), [*locations patch*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationPatchCall), [*locations questions answers delete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationQuestionAnswerDeleteCall), [*locations questions answers list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationQuestionAnswerListCall), [*locations questions answers upsert*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationQuestionAnswerUpsertCall), [*locations questions create*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationQuestionCreateCall), [*locations questions delete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationQuestionDeleteCall), [*locations questions list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationQuestionListCall), [*locations questions patch*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationQuestionPatchCall), [*locations report insights*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationReportInsightCall), [*locations reviews delete reply*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationReviewDeleteReplyCall), [*locations reviews get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationReviewGetCall), [*locations reviews list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationReviewListCall), [*locations reviews update reply*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationReviewUpdateReplyCall), [*locations transfer*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationTransferCall), [*locations verifications complete*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationVerificationCompleteCall), [*locations verifications list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationVerificationListCall), [*locations verify*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountLocationVerifyCall), [*update*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountUpdateCall) and [*update notifications*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AccountUpdateNotificationCall) +* [attributes](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::Attribute) + * [*list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::AttributeListCall) +* [categories](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::Category) + * [*list*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::CategoryListCall) +* [chains](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::Chain) + * [*get*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::ChainGetCall) and [*search*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::ChainSearchCall) +* [google locations](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::GoogleLocation) + * [*report*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::GoogleLocationReportCall) and [*search*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::GoogleLocationSearchCall) +* [verification tokens](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::VerificationToken) + * [*generate*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/api::VerificationTokenGenerateCall) @@ -33,17 +33,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/MyBusiness)** +* **[Hub](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/MyBusiness)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Part)** + * **[Parts](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -195,17 +195,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -215,29 +215,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::RequestValue) and -[decodable](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::RequestValue) and +[decodable](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusiness4/5.0.2-beta-1+0/google_mybusiness4/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusiness4/5.0.2+0/google_mybusiness4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusiness4/src/api.rs b/gen/mybusiness4/src/api.rs index 08d0d481a8..905aaac08b 100644 --- a/gen/mybusiness4/src/api.rs +++ b/gen/mybusiness4/src/api.rs @@ -102,7 +102,7 @@ impl<'a, S> MyBusiness { MyBusiness { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusiness.googleapis.com/".to_string(), _root_url: "https://mybusiness.googleapis.com/".to_string(), } @@ -128,7 +128,7 @@ impl<'a, S> MyBusiness { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusiness4/src/client.rs b/gen/mybusiness4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusiness4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusiness4/src/lib.rs b/gen/mybusiness4/src/lib.rs index 17309acd1e..b32514c833 100644 --- a/gen/mybusiness4/src/lib.rs +++ b/gen/mybusiness4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business* crate version *5.0.2-beta-1+0*, where *0* is the exact revision of the *mybusiness:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business* crate version *5.0.2+0*, where *0* is the exact revision of the *mybusiness:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business* *v4* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/mybusinessaccountmanagement1-cli/Cargo.toml b/gen/mybusinessaccountmanagement1-cli/Cargo.toml index f7e29a7ed0..7d5e941c4a 100644 --- a/gen/mybusinessaccountmanagement1-cli/Cargo.toml +++ b/gen/mybusinessaccountmanagement1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusinessaccountmanagement1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Account Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessaccountmanagement1-cli" @@ -20,13 +20,13 @@ name = "mybusinessaccountmanagement1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusinessaccountmanagement1] path = "../mybusinessaccountmanagement1" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/mybusinessaccountmanagement1-cli/README.md b/gen/mybusinessaccountmanagement1-cli/README.md index 5bb2816261..221e792b2e 100644 --- a/gen/mybusinessaccountmanagement1-cli/README.md +++ b/gen/mybusinessaccountmanagement1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business Account Management* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business Account Management* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash mybusinessaccountmanagement1 [options] diff --git a/gen/mybusinessaccountmanagement1-cli/mkdocs.yml b/gen/mybusinessaccountmanagement1-cli/mkdocs.yml index 8fb0a66ac3..049b037a79 100644 --- a/gen/mybusinessaccountmanagement1-cli/mkdocs.yml +++ b/gen/mybusinessaccountmanagement1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business Account Management v4.0.1+20220305 +site_name: My Business Account Management v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-mybusinessaccountmanagement1-cli site_description: A complete library to interact with My Business Account Management (protocol v1) @@ -7,24 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessaccoun docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_admins-create.md', 'Accounts', 'Admins Create'] -- ['accounts_admins-delete.md', 'Accounts', 'Admins Delete'] -- ['accounts_admins-list.md', 'Accounts', 'Admins List'] -- ['accounts_admins-patch.md', 'Accounts', 'Admins Patch'] -- ['accounts_create.md', 'Accounts', 'Create'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_invitations-accept.md', 'Accounts', 'Invitations Accept'] -- ['accounts_invitations-decline.md', 'Accounts', 'Invitations Decline'] -- ['accounts_invitations-list.md', 'Accounts', 'Invitations List'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_patch.md', 'Accounts', 'Patch'] -- ['locations_admins-create.md', 'Locations', 'Admins Create'] -- ['locations_admins-delete.md', 'Locations', 'Admins Delete'] -- ['locations_admins-list.md', 'Locations', 'Admins List'] -- ['locations_admins-patch.md', 'Locations', 'Admins Patch'] -- ['locations_transfer.md', 'Locations', 'Transfer'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Admins Create': 'accounts_admins-create.md' + - 'Admins Delete': 'accounts_admins-delete.md' + - 'Admins List': 'accounts_admins-list.md' + - 'Admins Patch': 'accounts_admins-patch.md' + - 'Create': 'accounts_create.md' + - 'Get': 'accounts_get.md' + - 'Invitations Accept': 'accounts_invitations-accept.md' + - 'Invitations Decline': 'accounts_invitations-decline.md' + - 'Invitations List': 'accounts_invitations-list.md' + - 'List': 'accounts_list.md' + - 'Patch': 'accounts_patch.md' +- 'Locations': + - 'Admins Create': 'locations_admins-create.md' + - 'Admins Delete': 'locations_admins-delete.md' + - 'Admins List': 'locations_admins-list.md' + - 'Admins Patch': 'locations_admins-patch.md' + - 'Transfer': 'locations_transfer.md' theme: readthedocs diff --git a/gen/mybusinessaccountmanagement1-cli/src/client.rs b/gen/mybusinessaccountmanagement1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusinessaccountmanagement1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusinessaccountmanagement1-cli/src/main.rs b/gen/mybusinessaccountmanagement1-cli/src/main.rs index 0b26d9f563..cfa208a383 100644 --- a/gen/mybusinessaccountmanagement1-cli/src/main.rs +++ b/gen/mybusinessaccountmanagement1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusinessaccountmanagement1::{api, Error, oauth2}; +use google_mybusinessaccountmanagement1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -74,12 +73,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "account" => Some(("account", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "admin" => Some(("admin", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pending-invitation" => Some(("pendingInvitation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin", "name", "pending-invitation", "role"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account", "admin", "name", "pending-invitation", "role"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -257,12 +257,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "account" => Some(("account", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "admin" => Some(("admin", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pending-invitation" => Some(("pendingInvitation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin", "name", "pending-invitation", "role"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account", "admin", "name", "pending-invitation", "role"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -277,7 +278,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -703,7 +704,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -813,10 +814,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -885,12 +886,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "account" => Some(("account", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "admin" => Some(("admin", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pending-invitation" => Some(("pendingInvitation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin", "name", "pending-invitation", "role"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account", "admin", "name", "pending-invitation", "role"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1068,12 +1070,13 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "account" => Some(("account", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "admin" => Some(("admin", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "pending-invitation" => Some(("pendingInvitation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "role" => Some(("role", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["admin", "name", "pending-invitation", "role"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account", "admin", "name", "pending-invitation", "role"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1088,7 +1091,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1762,8 +1765,8 @@ async fn main() { let mut app = App::new("mybusinessaccountmanagement1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("The My Business Account Management API provides an interface for managing access to a location on Google.") + .version("5.0.2+20230124") + .about("The My Business Account Management API provides an interface for managing access to a location on Google. Note - If you have a quota of 0 after enabling the API, please request for GBP API access.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusinessaccountmanagement1_cli") .arg(Arg::with_name("folder") .long("config-dir") diff --git a/gen/mybusinessaccountmanagement1/Cargo.toml b/gen/mybusinessaccountmanagement1/Cargo.toml index 095cb60e72..eb6f08277f 100644 --- a/gen/mybusinessaccountmanagement1/Cargo.toml +++ b/gen/mybusinessaccountmanagement1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusinessaccountmanagement1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Account Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessaccountmanagement1" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124" license = "MIT" keywords = ["mybusinessaccountman", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusinessaccountmanagement1/README.md b/gen/mybusinessaccountmanagement1/README.md index 13eea1bbdc..23f3848e71 100644 --- a/gen/mybusinessaccountmanagement1/README.md +++ b/gen/mybusinessaccountmanagement1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-mybusinessaccountmanagement1` library allows access to all features of the *Google My Business Account Management* service. -This documentation was generated from *My Business Account Management* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessaccountmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business Account Management* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessaccountmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business Account Management* *v1* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/MyBusinessAccountManagement) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/MyBusinessAccountManagement) ... -* [accounts](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::Account) - * [*admins create*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountAdminCreateCall), [*admins delete*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountAdminDeleteCall), [*admins list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountAdminListCall), [*admins patch*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountAdminPatchCall), [*create*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountCreateCall), [*get*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountGetCall), [*invitations accept*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountInvitationAcceptCall), [*invitations decline*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountInvitationDeclineCall), [*invitations list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountInvitationListCall), [*list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountListCall) and [*patch*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::AccountPatchCall) +* [accounts](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::Account) + * [*admins create*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountAdminCreateCall), [*admins delete*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountAdminDeleteCall), [*admins list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountAdminListCall), [*admins patch*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountAdminPatchCall), [*create*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountCreateCall), [*get*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountGetCall), [*invitations accept*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountInvitationAcceptCall), [*invitations decline*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountInvitationDeclineCall), [*invitations list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountInvitationListCall), [*list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountListCall) and [*patch*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::AccountPatchCall) * locations - * [*admins create*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::LocationAdminCreateCall), [*admins delete*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::LocationAdminDeleteCall), [*admins list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::LocationAdminListCall), [*admins patch*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::LocationAdminPatchCall) and [*transfer*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/api::LocationTransferCall) + * [*admins create*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::LocationAdminCreateCall), [*admins delete*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::LocationAdminDeleteCall), [*admins list*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::LocationAdminListCall), [*admins patch*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::LocationAdminPatchCall) and [*transfer*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/api::LocationTransferCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/MyBusinessAccountManagement)** +* **[Hub](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/MyBusinessAccountManagement)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Part)** + * **[Parts](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::RequestValue) and -[decodable](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::RequestValue) and +[decodable](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2-beta-1+20230124/google_mybusinessaccountmanagement1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusinessaccountmanagement1/5.0.2+20230124/google_mybusinessaccountmanagement1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusinessaccountmanagement1/src/api.rs b/gen/mybusinessaccountmanagement1/src/api.rs index 6d944e9fd2..31ebb96a24 100644 --- a/gen/mybusinessaccountmanagement1/src/api.rs +++ b/gen/mybusinessaccountmanagement1/src/api.rs @@ -101,7 +101,7 @@ impl<'a, S> MyBusinessAccountManagement { MyBusinessAccountManagement { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusinessaccountmanagement.googleapis.com/".to_string(), _root_url: "https://mybusinessaccountmanagement.googleapis.com/".to_string(), } @@ -115,7 +115,7 @@ impl<'a, S> MyBusinessAccountManagement { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusinessaccountmanagement1/src/client.rs b/gen/mybusinessaccountmanagement1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusinessaccountmanagement1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusinessaccountmanagement1/src/lib.rs b/gen/mybusinessaccountmanagement1/src/lib.rs index 2db741a5ea..f4ef07507e 100644 --- a/gen/mybusinessaccountmanagement1/src/lib.rs +++ b/gen/mybusinessaccountmanagement1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business Account Management* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessaccountmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business Account Management* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessaccountmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business Account Management* *v1* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/mybusinessbusinesscalls1-cli/Cargo.toml b/gen/mybusinessbusinesscalls1-cli/Cargo.toml index d91f563228..4ed55c8e8b 100644 --- a/gen/mybusinessbusinesscalls1-cli/Cargo.toml +++ b/gen/mybusinessbusinesscalls1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusinessbusinesscalls1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Business Calls (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessbusinesscalls1-cli" @@ -20,13 +20,13 @@ name = "mybusinessbusinesscalls1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusinessbusinesscalls1] path = "../mybusinessbusinesscalls1" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/mybusinessbusinesscalls1-cli/README.md b/gen/mybusinessbusinesscalls1-cli/README.md index 9a8563aaab..d7250b20bd 100644 --- a/gen/mybusinessbusinesscalls1-cli/README.md +++ b/gen/mybusinessbusinesscalls1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business Business Calls* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business Business Calls* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash mybusinessbusinesscalls1 [options] diff --git a/gen/mybusinessbusinesscalls1-cli/mkdocs.yml b/gen/mybusinessbusinesscalls1-cli/mkdocs.yml index f640f51779..7df255ff6e 100644 --- a/gen/mybusinessbusinesscalls1-cli/mkdocs.yml +++ b/gen/mybusinessbusinesscalls1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business Business Calls v4.0.1+20220305 +site_name: My Business Business Calls v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-mybusinessbusinesscalls1-cli site_description: A complete library to interact with My Business Business Calls (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessbusine docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['locations_businesscallsinsights-list.md', 'Locations', 'Businesscallsinsights List'] -- ['locations_get-businesscallssettings.md', 'Locations', 'Get Businesscallssettings'] -- ['locations_update-businesscallssettings.md', 'Locations', 'Update Businesscallssettings'] +nav: +- Home: 'index.md' +- 'Locations': + - 'Businesscallsinsights List': 'locations_businesscallsinsights-list.md' + - 'Get Businesscallssettings': 'locations_get-businesscallssettings.md' + - 'Update Businesscallssettings': 'locations_update-businesscallssettings.md' theme: readthedocs diff --git a/gen/mybusinessbusinesscalls1-cli/src/client.rs b/gen/mybusinessbusinesscalls1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusinessbusinesscalls1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusinessbusinesscalls1-cli/src/main.rs b/gen/mybusinessbusinesscalls1-cli/src/main.rs index 6c763f3847..0c1fec7f56 100644 --- a/gen/mybusinessbusinesscalls1-cli/src/main.rs +++ b/gen/mybusinessbusinesscalls1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusinessbusinesscalls1::{api, Error, oauth2}; +use google_mybusinessbusinesscalls1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -201,7 +200,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -421,8 +420,8 @@ async fn main() { let mut app = App::new("mybusinessbusinesscalls1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("The My Business Business Calls API manages business calls information of a location on Google.") + .version("5.0.2+20230124") + .about("The My Business Business Calls API manages business calls information of a location on Google and collect insights like the number of missed calls to their location. Additional information about Business calls can be found at https://support.google.com/business/answer/9688285?p=call_history. If the Google Business Profile links to a Google Ads account and call history is turned on, calls that last longer than a specific time, and that can be attributed to an ad interaction, will show in the linked Google Ads account under the \"Calls from Ads\" conversion. If smart bidding and call conversions are used in the optimization strategy, there could be a change in ad spend. Learn more about smart bidding. To view and perform actions on a location's calls, you need to be a `OWNER`, `CO_OWNER` or `MANAGER` of the location. Note - If you have a quota of 0 after enabling the API, please request for GBP API access.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusinessbusinesscalls1_cli") .arg(Arg::with_name("folder") .long("config-dir") diff --git a/gen/mybusinessbusinesscalls1/Cargo.toml b/gen/mybusinessbusinesscalls1/Cargo.toml index 43ee2bb546..7489ab071a 100644 --- a/gen/mybusinessbusinesscalls1/Cargo.toml +++ b/gen/mybusinessbusinesscalls1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusinessbusinesscalls1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Business Calls (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessbusinesscalls1" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124" license = "MIT" keywords = ["mybusinessbusinessca", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusinessbusinesscalls1/README.md b/gen/mybusinessbusinesscalls1/README.md index e3aa18885e..6ef4b0f8ce 100644 --- a/gen/mybusinessbusinesscalls1/README.md +++ b/gen/mybusinessbusinesscalls1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-mybusinessbusinesscalls1` library allows access to all features of the *Google My Business Business Calls* service. -This documentation was generated from *My Business Business Calls* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessbusinesscalls:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business Business Calls* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessbusinesscalls:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business Business Calls* *v1* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/MyBusinessBusinessCalls) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/MyBusinessBusinessCalls) ... * locations - * [*businesscallsinsights list*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/api::LocationBusinesscallsinsightListCall), [*get businesscallssettings*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/api::LocationGetBusinesscallssettingCall) and [*update businesscallssettings*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/api::LocationUpdateBusinesscallssettingCall) + * [*businesscallsinsights list*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/api::LocationBusinesscallsinsightListCall), [*get businesscallssettings*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/api::LocationGetBusinesscallssettingCall) and [*update businesscallssettings*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/api::LocationUpdateBusinesscallssettingCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/MyBusinessBusinessCalls)** +* **[Hub](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/MyBusinessBusinessCalls)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Part)** + * **[Parts](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::RequestValue) and -[decodable](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::RequestValue) and +[decodable](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2-beta-1+20230124/google_mybusinessbusinesscalls1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusinessbusinesscalls1/5.0.2+20230124/google_mybusinessbusinesscalls1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusinessbusinesscalls1/src/api.rs b/gen/mybusinessbusinesscalls1/src/api.rs index d33b5c596e..bba6071d6f 100644 --- a/gen/mybusinessbusinesscalls1/src/api.rs +++ b/gen/mybusinessbusinesscalls1/src/api.rs @@ -104,7 +104,7 @@ impl<'a, S> MyBusinessBusinessCalls { MyBusinessBusinessCalls { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusinessbusinesscalls.googleapis.com/".to_string(), _root_url: "https://mybusinessbusinesscalls.googleapis.com/".to_string(), } @@ -115,7 +115,7 @@ impl<'a, S> MyBusinessBusinessCalls { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusinessbusinesscalls1/src/client.rs b/gen/mybusinessbusinesscalls1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusinessbusinesscalls1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusinessbusinesscalls1/src/lib.rs b/gen/mybusinessbusinesscalls1/src/lib.rs index e98c7ddabb..73eabd81a2 100644 --- a/gen/mybusinessbusinesscalls1/src/lib.rs +++ b/gen/mybusinessbusinesscalls1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business Business Calls* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessbusinesscalls:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business Business Calls* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessbusinesscalls:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business Business Calls* *v1* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/mybusinessbusinessinformation1-cli/Cargo.toml b/gen/mybusinessbusinessinformation1-cli/Cargo.toml index aed17854f5..a892994b4e 100644 --- a/gen/mybusinessbusinessinformation1-cli/Cargo.toml +++ b/gen/mybusinessbusinessinformation1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusinessbusinessinformation1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Business Information (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessbusinessinformation1-cli" @@ -20,13 +20,13 @@ name = "mybusinessbusinessinformation1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusinessbusinessinformation1] path = "../mybusinessbusinessinformation1" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/mybusinessbusinessinformation1-cli/README.md b/gen/mybusinessbusinessinformation1-cli/README.md index 598c065af8..88e85f67ed 100644 --- a/gen/mybusinessbusinessinformation1-cli/README.md +++ b/gen/mybusinessbusinessinformation1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business Business Information* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business Business Information* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash mybusinessbusinessinformation1 [options] diff --git a/gen/mybusinessbusinessinformation1-cli/mkdocs.yml b/gen/mybusinessbusinessinformation1-cli/mkdocs.yml index b65d8cdec7..92b29945da 100644 --- a/gen/mybusinessbusinessinformation1-cli/mkdocs.yml +++ b/gen/mybusinessbusinessinformation1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business Business Information v4.0.1+20220305 +site_name: My Business Business Information v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-mybusinessbusinessinformation1-cli site_description: A complete library to interact with My Business Business Information (protocol v1) @@ -7,25 +7,31 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessbusine docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_locations-create.md', 'Accounts', 'Locations Create'] -- ['accounts_locations-list.md', 'Accounts', 'Locations List'] -- ['attributes_list.md', 'Attributes', 'List'] -- ['categories_batch-get.md', 'Categories', 'Batch Get'] -- ['categories_list.md', 'Categories', 'List'] -- ['chains_get.md', 'Chains', 'Get'] -- ['chains_search.md', 'Chains', 'Search'] -- ['google-locations_search.md', 'Google Locations', 'Search'] -- ['locations_associate.md', 'Locations', 'Associate'] -- ['locations_attributes-get-google-updated.md', 'Locations', 'Attributes Get Google Updated'] -- ['locations_clear-location-association.md', 'Locations', 'Clear Location Association'] -- ['locations_delete.md', 'Locations', 'Delete'] -- ['locations_get.md', 'Locations', 'Get'] -- ['locations_get-attributes.md', 'Locations', 'Get Attributes'] -- ['locations_get-google-updated.md', 'Locations', 'Get Google Updated'] -- ['locations_patch.md', 'Locations', 'Patch'] -- ['locations_update-attributes.md', 'Locations', 'Update Attributes'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Locations Create': 'accounts_locations-create.md' + - 'Locations List': 'accounts_locations-list.md' +- 'Attributes': + - 'List': 'attributes_list.md' +- 'Categories': + - 'Batch Get': 'categories_batch-get.md' + - 'List': 'categories_list.md' +- 'Chains': + - 'Get': 'chains_get.md' + - 'Search': 'chains_search.md' +- 'Google Locations': + - 'Search': 'google-locations_search.md' +- 'Locations': + - 'Associate': 'locations_associate.md' + - 'Attributes Get Google Updated': 'locations_attributes-get-google-updated.md' + - 'Clear Location Association': 'locations_clear-location-association.md' + - 'Delete': 'locations_delete.md' + - 'Get': 'locations_get.md' + - 'Get Attributes': 'locations_get-attributes.md' + - 'Get Google Updated': 'locations_get-google-updated.md' + - 'Patch': 'locations_patch.md' + - 'Update Attributes': 'locations_update-attributes.md' theme: readthedocs diff --git a/gen/mybusinessbusinessinformation1-cli/src/client.rs b/gen/mybusinessbusinessinformation1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusinessbusinessinformation1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusinessbusinessinformation1-cli/src/main.rs b/gen/mybusinessbusinessinformation1-cli/src/main.rs index e143a26b97..acb6d31980 100644 --- a/gen/mybusinessbusinessinformation1-cli/src/main.rs +++ b/gen/mybusinessbusinessinformation1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusinessbusinessinformation1::{api, Error, oauth2}; +use google_mybusinessbusinessinformation1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -139,7 +138,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -195,13 +194,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -260,7 +259,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-all" => { - call = call.show_all(arg_from_str(value.unwrap_or("false"), err, "show-all", "boolean")); + call = call.show_all( value.map(|v| arg_from_str(v, err, "show-all", "boolean")).unwrap_or(false)); }, "region-code" => { call = call.region_code(value.unwrap_or("")); @@ -272,7 +271,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -402,7 +401,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -510,7 +509,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "chain-name" => { call = call.chain_name(value.unwrap_or("")); @@ -959,7 +958,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1061,7 +1060,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1195,10 +1194,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1284,7 +1283,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "attribute-mask" => { - call = call.attribute_mask(value.unwrap_or("")); + call = call.attribute_mask( value.map(|v| arg_from_str(v, err, "attribute-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1909,8 +1908,8 @@ async fn main() { let mut app = App::new("mybusinessbusinessinformation1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("The My Business Business Information API provides an interface for managing business information on Google.") + .version("5.0.2+20230124") + .about("The My Business Business Information API provides an interface for managing business information. Note - If you have a quota of 0 after enabling the API, please request for GBP API access.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusinessbusinessinformation1_cli") .arg(Arg::with_name("folder") .long("config-dir") diff --git a/gen/mybusinessbusinessinformation1/Cargo.toml b/gen/mybusinessbusinessinformation1/Cargo.toml index 25c2538a3f..cfa54393e7 100644 --- a/gen/mybusinessbusinessinformation1/Cargo.toml +++ b/gen/mybusinessbusinessinformation1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusinessbusinessinformation1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Business Information (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessbusinessinformation1" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124" license = "MIT" keywords = ["mybusinessbusinessin", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusinessbusinessinformation1/README.md b/gen/mybusinessbusinessinformation1/README.md index aba9d65a7b..a2a330911b 100644 --- a/gen/mybusinessbusinessinformation1/README.md +++ b/gen/mybusinessbusinessinformation1/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-mybusinessbusinessinformation1` library allows access to all features of the *Google My Business Business Information* service. -This documentation was generated from *My Business Business Information* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessbusinessinformation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business Business Information* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessbusinessinformation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business Business Information* *v1* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/MyBusinessBusinessInformation) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/MyBusinessBusinessInformation) ... * accounts - * [*locations create*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::AccountLocationCreateCall) and [*locations list*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::AccountLocationListCall) -* [attributes](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::Attribute) - * [*list*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::AttributeListCall) -* [categories](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::Category) - * [*batch get*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::CategoryBatchGetCall) and [*list*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::CategoryListCall) -* [chains](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::Chain) - * [*get*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::ChainGetCall) and [*search*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::ChainSearchCall) -* [google locations](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::GoogleLocation) - * [*search*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::GoogleLocationSearchCall) -* [locations](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::Location) - * [*associate*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationAssociateCall), [*attributes get google updated*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationAttributeGetGoogleUpdatedCall), [*clear location association*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationClearLocationAssociationCall), [*delete*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationDeleteCall), [*get*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationGetCall), [*get attributes*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationGetAttributeCall), [*get google updated*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationGetGoogleUpdatedCall), [*patch*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationPatchCall) and [*update attributes*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/api::LocationUpdateAttributeCall) + * [*locations create*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::AccountLocationCreateCall) and [*locations list*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::AccountLocationListCall) +* [attributes](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::Attribute) + * [*list*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::AttributeListCall) +* [categories](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::Category) + * [*batch get*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::CategoryBatchGetCall) and [*list*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::CategoryListCall) +* [chains](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::Chain) + * [*get*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::ChainGetCall) and [*search*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::ChainSearchCall) +* [google locations](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::GoogleLocation) + * [*search*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::GoogleLocationSearchCall) +* [locations](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::Location) + * [*associate*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationAssociateCall), [*attributes get google updated*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationAttributeGetGoogleUpdatedCall), [*clear location association*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationClearLocationAssociationCall), [*delete*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationDeleteCall), [*get*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationGetCall), [*get attributes*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationGetAttributeCall), [*get google updated*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationGetGoogleUpdatedCall), [*patch*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationPatchCall) and [*update attributes*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/api::LocationUpdateAttributeCall) @@ -33,17 +33,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/MyBusinessBusinessInformation)** +* **[Hub](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/MyBusinessBusinessInformation)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Part)** + * **[Parts](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -145,17 +145,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -165,29 +165,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::RequestValue) and -[decodable](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::RequestValue) and +[decodable](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2-beta-1+20230124/google_mybusinessbusinessinformation1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusinessbusinessinformation1/5.0.2+20230124/google_mybusinessbusinessinformation1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusinessbusinessinformation1/src/api.rs b/gen/mybusinessbusinessinformation1/src/api.rs index 89f116b6c5..59bd8d3566 100644 --- a/gen/mybusinessbusinessinformation1/src/api.rs +++ b/gen/mybusinessbusinessinformation1/src/api.rs @@ -105,7 +105,7 @@ impl<'a, S> MyBusinessBusinessInformation { MyBusinessBusinessInformation { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusinessbusinessinformation.googleapis.com/".to_string(), _root_url: "https://mybusinessbusinessinformation.googleapis.com/".to_string(), } @@ -131,7 +131,7 @@ impl<'a, S> MyBusinessBusinessInformation { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusinessbusinessinformation1/src/client.rs b/gen/mybusinessbusinessinformation1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusinessbusinessinformation1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusinessbusinessinformation1/src/lib.rs b/gen/mybusinessbusinessinformation1/src/lib.rs index 7739ac6270..a7a5c5b3ab 100644 --- a/gen/mybusinessbusinessinformation1/src/lib.rs +++ b/gen/mybusinessbusinessinformation1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business Business Information* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessbusinessinformation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business Business Information* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessbusinessinformation:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business Business Information* *v1* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/mybusinesslodging1-cli/Cargo.toml b/gen/mybusinesslodging1-cli/Cargo.toml index cf6e319990..490ae034f1 100644 --- a/gen/mybusinesslodging1-cli/Cargo.toml +++ b/gen/mybusinesslodging1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusinesslodging1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Lodging (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinesslodging1-cli" @@ -20,13 +20,13 @@ name = "mybusinesslodging1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusinesslodging1] path = "../mybusinesslodging1" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/mybusinesslodging1-cli/README.md b/gen/mybusinesslodging1-cli/README.md index 8b70791eea..722abd2372 100644 --- a/gen/mybusinesslodging1-cli/README.md +++ b/gen/mybusinesslodging1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business Lodging* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business Lodging* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash mybusinesslodging1 [options] diff --git a/gen/mybusinesslodging1-cli/mkdocs.yml b/gen/mybusinesslodging1-cli/mkdocs.yml index 5c81286e51..562a56efb6 100644 --- a/gen/mybusinesslodging1-cli/mkdocs.yml +++ b/gen/mybusinesslodging1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business Lodging v4.0.1+20220305 +site_name: My Business Lodging v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-mybusinesslodging1-cli site_description: A complete library to interact with My Business Lodging (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinesslodgin docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['locations_get-lodging.md', 'Locations', 'Get Lodging'] -- ['locations_lodging-get-google-updated.md', 'Locations', 'Lodging Get Google Updated'] -- ['locations_update-lodging.md', 'Locations', 'Update Lodging'] +nav: +- Home: 'index.md' +- 'Locations': + - 'Get Lodging': 'locations_get-lodging.md' + - 'Lodging Get Google Updated': 'locations_lodging-get-google-updated.md' + - 'Update Lodging': 'locations_update-lodging.md' theme: readthedocs diff --git a/gen/mybusinesslodging1-cli/src/client.rs b/gen/mybusinesslodging1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusinesslodging1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusinesslodging1-cli/src/main.rs b/gen/mybusinesslodging1-cli/src/main.rs index 1107772d7f..78e6acf6bd 100644 --- a/gen/mybusinesslodging1-cli/src/main.rs +++ b/gen/mybusinesslodging1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusinesslodging1::{api, Error, oauth2}; +use google_mybusinesslodging1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -111,7 +110,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -532,6 +531,8 @@ where "families.kids-activities-exception" => Some(("families.kidsActivitiesException", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "families.kids-club" => Some(("families.kidsClub", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "families.kids-club-exception" => Some(("families.kidsClubException", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "families.kids-friendly" => Some(("families.kidsFriendly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "families.kids-friendly-exception" => Some(("families.kidsFriendlyException", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "food-and-drink.bar" => Some(("foodAndDrink.bar", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "food-and-drink.bar-exception" => Some(("foodAndDrink.barException", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "food-and-drink.breakfast-available" => Some(("foodAndDrink.breakfastAvailable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1009,7 +1010,7 @@ where "wellness.weight-machine" => Some(("wellness.weightMachine", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "wellness.weight-machine-exception" => Some(("wellness.weightMachineException", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["accessibility", "activities", "ada-compliant-unit", "ada-compliant-unit-exception", "adult-pool", "adult-pool-exception", "air-conditioning", "air-conditioning-exception", "airport-shuttle", "airport-shuttle-exception", "all-inclusive-available", "all-inclusive-available-exception", "all-inclusive-only", "all-inclusive-only-exception", "all-units", "babysitting", "babysitting-exception", "baggage-storage", "baggage-storage-exception", "balcony", "balcony-exception", "bar", "bar-exception", "bathtub", "bathtub-exception", "beach-access", "beach-access-exception", "beach-front", "beach-front-exception", "beach-view", "beach-view-exception", "beds-count", "beds-count-exception", "bicycle-rental", "bicycle-rental-exception", "bidet", "bidet-exception", "boutique-stores", "boutique-stores-exception", "breakfast-available", "breakfast-available-exception", "breakfast-buffet", "breakfast-buffet-exception", "breeam-certification", "breeam-certification-exception", "buffet", "buffet-exception", "built-year", "built-year-exception", "bungalow-or-villa", "bungalow-or-villa-exception", "bunk-beds-count", "bunk-beds-count-exception", "business", "business-center", "business-center-exception", "car-rental-on-property", "car-rental-on-property-exception", "carbon-free-energy-sources", "carbon-free-energy-sources-exception", "cash", "cash-exception", "casino", "casino-exception", "cats-allowed", "cats-allowed-exception", "checkin-time", "checkin-time-exception", "checkout-time", "checkout-time-exception", "cheque", "cheque-exception", "city-view", "city-view-exception", "coffee-maker", "coffee-maker-exception", "commercial-grade-disinfectant-cleaning", "commercial-grade-disinfectant-cleaning-exception", "common-areas-enhanced-cleaning", "common-areas-enhanced-cleaning-exception", "common-areas-offer-sanitizing-items", "common-areas-offer-sanitizing-items-exception", "common-areas-physical-distancing-arranged", "common-areas-physical-distancing-arranged-exception", "common-living-area", "compostable-food-containers-and-cutlery", "compostable-food-containers-and-cutlery-exception", "composts-excess-food", "composts-excess-food-exception", "concierge", "concierge-exception", "connecting-unit-available", "connecting-unit-available-exception", "connectivity", "contactless-checkin-checkout", "contactless-checkin-checkout-exception", "convenience-store", "convenience-store-exception", "cookware", "cookware-exception", "credit-card", "credit-card-exception", "cribs-count", "cribs-count-exception", "currency-exchange", "currency-exchange-exception", "daily-housekeeping", "daily-housekeeping-exception", "debit-card", "debit-card-exception", "digital-guest-room-keys", "digital-guest-room-keys-exception", "dining-areas-additional-sanitation", "dining-areas-additional-sanitation-exception", "dinner-buffet", "dinner-buffet-exception", "dishwasher", "dishwasher-exception", "disposable-flatware", "disposable-flatware-exception", "doctor-on-call", "doctor-on-call-exception", "dogs-allowed", "dogs-allowed-exception", "donates-excess-food", "donates-excess-food-exception", "double-beds-count", "double-beds-count-exception", "dryer", "dryer-exception", "eating", "eco-friendly-toiletries", "eco-friendly-toiletries-exception", "electric-car-charging-stations", "electric-car-charging-stations-exception", "electronic-room-key", "electronic-room-key-exception", "elevator", "elevator-exception", "elliptical-machine", "elliptical-machine-exception", "employees-trained-cleaning-procedures", "employees-trained-cleaning-procedures-exception", "employees-trained-thorough-hand-washing", "employees-trained-thorough-hand-washing-exception", "employees-wear-protective-equipment", "employees-wear-protective-equipment-exception", "energy-conservation-program", "energy-conservation-program-exception", "energy-efficiency", "energy-efficient-heating-and-cooling-systems", "energy-efficient-heating-and-cooling-systems-exception", "energy-efficient-lighting", "energy-efficient-lighting-exception", "energy-saving-thermostats", "energy-saving-thermostats-exception", "enhanced-cleaning", "executive-floor", "executive-floor-exception", "face-mask-required", "face-mask-required-exception", "families", "feather-pillows", "feather-pillows-exception", "features", "fireplace", "fireplace-exception", "fitness-center", "fitness-center-exception", "floors-count", "floors-count-exception", "food-and-drink", "food-preparation-and-serving-additional-safety", "food-preparation-and-serving-additional-safety-exception", "food-waste-reduction-program", "food-waste-reduction-program-exception", "free-airport-shuttle", "free-airport-shuttle-exception", "free-bicycle-rental", "free-bicycle-rental-exception", "free-breakfast", "free-breakfast-exception", "free-fitness-center", "free-fitness-center-exception", "free-parking", "free-parking-exception", "free-private-car-service", "free-private-car-service-exception", "free-self-parking", "free-self-parking-exception", "free-valet-parking", "free-valet-parking-exception", "free-watercraft-rental", "free-watercraft-rental-exception", "free-weights", "free-weights-exception", "free-wifi", "free-wifi-exception", "front-desk", "front-desk-exception", "full-service-laundry", "full-service-laundry-exception", "game-room", "game-room-exception", "garden-view", "garden-view-exception", "gift-shop", "gift-shop-exception", "golf", "golf-exception", "green-building-design", "green-building-design-exception", "guest-room-hygiene-kits-available", "guest-room-hygiene-kits-available-exception", "guest-rooms-enhanced-cleaning", "guest-rooms-enhanced-cleaning-exception", "hairdryer", "hairdryer-exception", "health-and-safety", "hearing-accessible-doorbell", "hearing-accessible-doorbell-exception", "hearing-accessible-fire-alarm", "hearing-accessible-fire-alarm-exception", "hearing-accessible-unit", "hearing-accessible-unit-exception", "heating", "heating-exception", "horseback-riding", "horseback-riding-exception", "hot-tub", "hot-tub-exception", "hours", "housekeeping", "housekeeping-available", "housekeeping-available-exception", "housekeeping-scheduled-request-only", "housekeeping-scheduled-request-only-exception", "hypoallergenic-bedding", "hypoallergenic-bedding-exception", "increased-food-safety", "independent-organization-audits-energy-use", "independent-organization-audits-energy-use-exception", "independent-organization-audits-water-use", "independent-organization-audits-water-use-exception", "individual-packaged-meals", "individual-packaged-meals-exception", "indoor-grill", "indoor-grill-exception", "indoor-pool", "indoor-pool-exception", "indoor-pools-count", "indoor-pools-count-exception", "inunit-safe", "inunit-safe-exception", "inunit-wifi-available", "inunit-wifi-available-exception", "ironing-equipment", "ironing-equipment-exception", "kettle", "kettle-exception", "kids-activities", "kids-activities-exception", "kids-club", "kids-club-exception", "kids-stay-free", "kids-stay-free-exception", "king-beds-count", "king-beds-count-exception", "kitchen-available", "kitchen-available-exception", "lake-view", "lake-view-exception", "landmark-view", "landmark-view-exception", "last-renovated-year", "last-renovated-year-exception", "layout", "lazy-river", "lazy-river-exception", "leed-certification", "leed-certification-exception", "lifeguard", "lifeguard-exception", "linen-reuse-program", "linen-reuse-program-exception", "living-area-sq-meters", "living-area-sq-meters-exception", "local-shuttle", "local-shuttle-exception", "locally-sourced-food-and-beverages", "locally-sourced-food-and-beverages-exception", "loft", "loft-exception", "massage", "massage-exception", "max-adult-occupants-count", "max-adult-occupants-count-exception", "max-child-age", "max-child-age-exception", "max-child-occupants-count", "max-child-occupants-count-exception", "max-kids-stay-free-count", "max-kids-stay-free-count-exception", "max-occupants-count", "max-occupants-count-exception", "meeting-rooms", "meeting-rooms-count", "meeting-rooms-count-exception", "meeting-rooms-exception", "memory-foam-pillows", "memory-foam-pillows-exception", "metadata", "microwave", "microwave-exception", "minibar", "minibar-exception", "minimized-contact", "minutes", "mobile-nfc", "mobile-nfc-exception", "mobility-accessible", "mobility-accessible-bathtub", "mobility-accessible-bathtub-exception", "mobility-accessible-elevator", "mobility-accessible-elevator-exception", "mobility-accessible-exception", "mobility-accessible-parking", "mobility-accessible-parking-exception", "mobility-accessible-pool", "mobility-accessible-pool-exception", "mobility-accessible-shower", "mobility-accessible-shower-exception", "mobility-accessible-toilet", "mobility-accessible-toilet-exception", "mobility-accessible-unit", "mobility-accessible-unit-exception", "name", "nanos", "nightclub", "nightclub-exception", "no-high-touch-items-common-areas", "no-high-touch-items-common-areas-exception", "no-high-touch-items-guest-rooms", "no-high-touch-items-guest-rooms-exception", "no-single-use-plastic-straws", "no-single-use-plastic-straws-exception", "no-single-use-plastic-water-bottles", "no-single-use-plastic-water-bottles-exception", "no-styrofoam-food-containers", "no-styrofoam-food-containers-exception", "non-smoking", "non-smoking-exception", "ocean-view", "ocean-view-exception", "organic-cage-free-eggs", "organic-cage-free-eggs-exception", "organic-food-and-beverages", "organic-food-and-beverages-exception", "other-beds-count", "other-beds-count-exception", "outdoor-grill", "outdoor-grill-exception", "outdoor-pool", "outdoor-pool-exception", "outdoor-pools-count", "outdoor-pools-count-exception", "oven", "oven-exception", "parking", "parking-available", "parking-available-exception", "patio", "patio-exception", "pay-per-view-movies", "pay-per-view-movies-exception", "payment-options", "personal-protection", "pets", "pets-allowed", "pets-allowed-exception", "pets-allowed-free", "pets-allowed-free-exception", "physical-distancing", "physical-distancing-required", "physical-distancing-required-exception", "plastic-keycards-disinfected", "plastic-keycards-disinfected-exception", "policies", "pool", "pool-exception", "pool-view", "pool-view-exception", "pools", "pools-count", "pools-count-exception", "private-bathroom", "private-bathroom-exception", "private-beach", "private-beach-exception", "private-car-service", "private-car-service-exception", "private-home", "private-home-exception", "property", "protective-equipment-available", "protective-equipment-available-exception", "public-area-wifi-available", "public-area-wifi-available-exception", "public-internet-terminal", "public-internet-terminal-exception", "queen-beds-count", "queen-beds-count-exception", "recycling-program", "recycling-program-exception", "refillable-toiletry-containers", "refillable-toiletry-containers-exception", "refrigerator", "refrigerator-exception", "responsible-purchasing-policy", "responsible-purchasing-policy-exception", "responsibly-sources-seafood", "responsibly-sources-seafood-exception", "restaurant", "restaurant-exception", "restaurants-count", "restaurants-count-exception", "roll-away-beds-count", "roll-away-beds-count-exception", "room-bookings-buffer", "room-bookings-buffer-exception", "room-service", "room-service-exception", "rooms-count", "rooms-count-exception", "safely-disposes-batteries", "safely-disposes-batteries-exception", "safely-disposes-electronics", "safely-disposes-electronics-exception", "safely-disposes-lightbulbs", "safely-disposes-lightbulbs-exception", "safely-handles-hazardous-substances", "safely-handles-hazardous-substances-exception", "safety-dividers", "safety-dividers-exception", "salon", "salon-exception", "sauna", "sauna-exception", "scuba", "scuba-exception", "seconds", "self-parking-available", "self-parking-available-exception", "self-service-laundry", "self-service-laundry-exception", "services", "shared-areas-limited-occupancy", "shared-areas-limited-occupancy-exception", "shower", "shower-exception", "single-or-twin-beds-count", "single-or-twin-beds-count-exception", "single-use-food-menus", "single-use-food-menus-exception", "sink", "sink-exception", "sleeping", "smoke-free-property", "smoke-free-property-exception", "snackbar", "snackbar-exception", "snorkeling", "snorkeling-exception", "soap-donation-program", "soap-donation-program-exception", "social-hour", "social-hour-exception", "sofa-beds-count", "sofa-beds-count-exception", "some-units", "spa", "spa-exception", "stairs", "stairs-exception", "stove", "stove-exception", "suite", "suite-exception", "sustainability", "sustainability-certifications", "sustainable-sourcing", "synthetic-pillows", "synthetic-pillows-exception", "table-service", "table-service-exception", "tea-station", "tea-station-exception", "tennis", "tennis-exception", "tier", "tier-exception", "toaster", "toaster-exception", "toilet", "toilet-exception", "toiletry-donation-program", "toiletry-donation-program-exception", "total-living-areas", "towel-reuse-program", "towel-reuse-program-exception", "transfer", "transfer-exception", "transportation", "treadmill", "treadmill-exception", "turndown-service", "turndown-service-exception", "tv", "tv-casting", "tv-casting-exception", "tv-exception", "tv-streaming", "tv-streaming-exception", "twenty-four-hour-front-desk", "twenty-four-hour-front-desk-exception", "twenty-four-hour-room-service", "twenty-four-hour-room-service-exception", "universal-power-adapters", "universal-power-adapters-exception", "update-time", "valet-parking-available", "valet-parking-available-exception", "valley-view", "valley-view-exception", "vegan-meals", "vegan-meals-exception", "vegetarian-meals", "vegetarian-meals-exception", "vending-machine", "vending-machine-exception", "views", "wading-pool", "wading-pool-exception", "wake-up-calls", "wake-up-calls-exception", "washer", "washer-exception", "waste-reduction", "water-bottle-filling-stations", "water-bottle-filling-stations-exception", "water-conservation", "water-park", "water-park-exception", "water-saving-showers", "water-saving-showers-exception", "water-saving-sinks", "water-saving-sinks-exception", "water-saving-toilets", "water-saving-toilets-exception", "water-skiing", "water-skiing-exception", "watercraft-rental", "watercraft-rental-exception", "waterslide", "waterslide-exception", "wave-pool", "wave-pool-exception", "weight-machine", "weight-machine-exception", "wellness", "wellness-areas-have-private-spaces", "wellness-areas-have-private-spaces-exception", "wifi-available", "wifi-available-exception"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["accessibility", "activities", "ada-compliant-unit", "ada-compliant-unit-exception", "adult-pool", "adult-pool-exception", "air-conditioning", "air-conditioning-exception", "airport-shuttle", "airport-shuttle-exception", "all-inclusive-available", "all-inclusive-available-exception", "all-inclusive-only", "all-inclusive-only-exception", "all-units", "babysitting", "babysitting-exception", "baggage-storage", "baggage-storage-exception", "balcony", "balcony-exception", "bar", "bar-exception", "bathtub", "bathtub-exception", "beach-access", "beach-access-exception", "beach-front", "beach-front-exception", "beach-view", "beach-view-exception", "beds-count", "beds-count-exception", "bicycle-rental", "bicycle-rental-exception", "bidet", "bidet-exception", "boutique-stores", "boutique-stores-exception", "breakfast-available", "breakfast-available-exception", "breakfast-buffet", "breakfast-buffet-exception", "breeam-certification", "breeam-certification-exception", "buffet", "buffet-exception", "built-year", "built-year-exception", "bungalow-or-villa", "bungalow-or-villa-exception", "bunk-beds-count", "bunk-beds-count-exception", "business", "business-center", "business-center-exception", "car-rental-on-property", "car-rental-on-property-exception", "carbon-free-energy-sources", "carbon-free-energy-sources-exception", "cash", "cash-exception", "casino", "casino-exception", "cats-allowed", "cats-allowed-exception", "checkin-time", "checkin-time-exception", "checkout-time", "checkout-time-exception", "cheque", "cheque-exception", "city-view", "city-view-exception", "coffee-maker", "coffee-maker-exception", "commercial-grade-disinfectant-cleaning", "commercial-grade-disinfectant-cleaning-exception", "common-areas-enhanced-cleaning", "common-areas-enhanced-cleaning-exception", "common-areas-offer-sanitizing-items", "common-areas-offer-sanitizing-items-exception", "common-areas-physical-distancing-arranged", "common-areas-physical-distancing-arranged-exception", "common-living-area", "compostable-food-containers-and-cutlery", "compostable-food-containers-and-cutlery-exception", "composts-excess-food", "composts-excess-food-exception", "concierge", "concierge-exception", "connecting-unit-available", "connecting-unit-available-exception", "connectivity", "contactless-checkin-checkout", "contactless-checkin-checkout-exception", "convenience-store", "convenience-store-exception", "cookware", "cookware-exception", "credit-card", "credit-card-exception", "cribs-count", "cribs-count-exception", "currency-exchange", "currency-exchange-exception", "daily-housekeeping", "daily-housekeeping-exception", "debit-card", "debit-card-exception", "digital-guest-room-keys", "digital-guest-room-keys-exception", "dining-areas-additional-sanitation", "dining-areas-additional-sanitation-exception", "dinner-buffet", "dinner-buffet-exception", "dishwasher", "dishwasher-exception", "disposable-flatware", "disposable-flatware-exception", "doctor-on-call", "doctor-on-call-exception", "dogs-allowed", "dogs-allowed-exception", "donates-excess-food", "donates-excess-food-exception", "double-beds-count", "double-beds-count-exception", "dryer", "dryer-exception", "eating", "eco-friendly-toiletries", "eco-friendly-toiletries-exception", "electric-car-charging-stations", "electric-car-charging-stations-exception", "electronic-room-key", "electronic-room-key-exception", "elevator", "elevator-exception", "elliptical-machine", "elliptical-machine-exception", "employees-trained-cleaning-procedures", "employees-trained-cleaning-procedures-exception", "employees-trained-thorough-hand-washing", "employees-trained-thorough-hand-washing-exception", "employees-wear-protective-equipment", "employees-wear-protective-equipment-exception", "energy-conservation-program", "energy-conservation-program-exception", "energy-efficiency", "energy-efficient-heating-and-cooling-systems", "energy-efficient-heating-and-cooling-systems-exception", "energy-efficient-lighting", "energy-efficient-lighting-exception", "energy-saving-thermostats", "energy-saving-thermostats-exception", "enhanced-cleaning", "executive-floor", "executive-floor-exception", "face-mask-required", "face-mask-required-exception", "families", "feather-pillows", "feather-pillows-exception", "features", "fireplace", "fireplace-exception", "fitness-center", "fitness-center-exception", "floors-count", "floors-count-exception", "food-and-drink", "food-preparation-and-serving-additional-safety", "food-preparation-and-serving-additional-safety-exception", "food-waste-reduction-program", "food-waste-reduction-program-exception", "free-airport-shuttle", "free-airport-shuttle-exception", "free-bicycle-rental", "free-bicycle-rental-exception", "free-breakfast", "free-breakfast-exception", "free-fitness-center", "free-fitness-center-exception", "free-parking", "free-parking-exception", "free-private-car-service", "free-private-car-service-exception", "free-self-parking", "free-self-parking-exception", "free-valet-parking", "free-valet-parking-exception", "free-watercraft-rental", "free-watercraft-rental-exception", "free-weights", "free-weights-exception", "free-wifi", "free-wifi-exception", "front-desk", "front-desk-exception", "full-service-laundry", "full-service-laundry-exception", "game-room", "game-room-exception", "garden-view", "garden-view-exception", "gift-shop", "gift-shop-exception", "golf", "golf-exception", "green-building-design", "green-building-design-exception", "guest-room-hygiene-kits-available", "guest-room-hygiene-kits-available-exception", "guest-rooms-enhanced-cleaning", "guest-rooms-enhanced-cleaning-exception", "hairdryer", "hairdryer-exception", "health-and-safety", "hearing-accessible-doorbell", "hearing-accessible-doorbell-exception", "hearing-accessible-fire-alarm", "hearing-accessible-fire-alarm-exception", "hearing-accessible-unit", "hearing-accessible-unit-exception", "heating", "heating-exception", "horseback-riding", "horseback-riding-exception", "hot-tub", "hot-tub-exception", "hours", "housekeeping", "housekeeping-available", "housekeeping-available-exception", "housekeeping-scheduled-request-only", "housekeeping-scheduled-request-only-exception", "hypoallergenic-bedding", "hypoallergenic-bedding-exception", "increased-food-safety", "independent-organization-audits-energy-use", "independent-organization-audits-energy-use-exception", "independent-organization-audits-water-use", "independent-organization-audits-water-use-exception", "individual-packaged-meals", "individual-packaged-meals-exception", "indoor-grill", "indoor-grill-exception", "indoor-pool", "indoor-pool-exception", "indoor-pools-count", "indoor-pools-count-exception", "inunit-safe", "inunit-safe-exception", "inunit-wifi-available", "inunit-wifi-available-exception", "ironing-equipment", "ironing-equipment-exception", "kettle", "kettle-exception", "kids-activities", "kids-activities-exception", "kids-club", "kids-club-exception", "kids-friendly", "kids-friendly-exception", "kids-stay-free", "kids-stay-free-exception", "king-beds-count", "king-beds-count-exception", "kitchen-available", "kitchen-available-exception", "lake-view", "lake-view-exception", "landmark-view", "landmark-view-exception", "last-renovated-year", "last-renovated-year-exception", "layout", "lazy-river", "lazy-river-exception", "leed-certification", "leed-certification-exception", "lifeguard", "lifeguard-exception", "linen-reuse-program", "linen-reuse-program-exception", "living-area-sq-meters", "living-area-sq-meters-exception", "local-shuttle", "local-shuttle-exception", "locally-sourced-food-and-beverages", "locally-sourced-food-and-beverages-exception", "loft", "loft-exception", "massage", "massage-exception", "max-adult-occupants-count", "max-adult-occupants-count-exception", "max-child-age", "max-child-age-exception", "max-child-occupants-count", "max-child-occupants-count-exception", "max-kids-stay-free-count", "max-kids-stay-free-count-exception", "max-occupants-count", "max-occupants-count-exception", "meeting-rooms", "meeting-rooms-count", "meeting-rooms-count-exception", "meeting-rooms-exception", "memory-foam-pillows", "memory-foam-pillows-exception", "metadata", "microwave", "microwave-exception", "minibar", "minibar-exception", "minimized-contact", "minutes", "mobile-nfc", "mobile-nfc-exception", "mobility-accessible", "mobility-accessible-bathtub", "mobility-accessible-bathtub-exception", "mobility-accessible-elevator", "mobility-accessible-elevator-exception", "mobility-accessible-exception", "mobility-accessible-parking", "mobility-accessible-parking-exception", "mobility-accessible-pool", "mobility-accessible-pool-exception", "mobility-accessible-shower", "mobility-accessible-shower-exception", "mobility-accessible-toilet", "mobility-accessible-toilet-exception", "mobility-accessible-unit", "mobility-accessible-unit-exception", "name", "nanos", "nightclub", "nightclub-exception", "no-high-touch-items-common-areas", "no-high-touch-items-common-areas-exception", "no-high-touch-items-guest-rooms", "no-high-touch-items-guest-rooms-exception", "no-single-use-plastic-straws", "no-single-use-plastic-straws-exception", "no-single-use-plastic-water-bottles", "no-single-use-plastic-water-bottles-exception", "no-styrofoam-food-containers", "no-styrofoam-food-containers-exception", "non-smoking", "non-smoking-exception", "ocean-view", "ocean-view-exception", "organic-cage-free-eggs", "organic-cage-free-eggs-exception", "organic-food-and-beverages", "organic-food-and-beverages-exception", "other-beds-count", "other-beds-count-exception", "outdoor-grill", "outdoor-grill-exception", "outdoor-pool", "outdoor-pool-exception", "outdoor-pools-count", "outdoor-pools-count-exception", "oven", "oven-exception", "parking", "parking-available", "parking-available-exception", "patio", "patio-exception", "pay-per-view-movies", "pay-per-view-movies-exception", "payment-options", "personal-protection", "pets", "pets-allowed", "pets-allowed-exception", "pets-allowed-free", "pets-allowed-free-exception", "physical-distancing", "physical-distancing-required", "physical-distancing-required-exception", "plastic-keycards-disinfected", "plastic-keycards-disinfected-exception", "policies", "pool", "pool-exception", "pool-view", "pool-view-exception", "pools", "pools-count", "pools-count-exception", "private-bathroom", "private-bathroom-exception", "private-beach", "private-beach-exception", "private-car-service", "private-car-service-exception", "private-home", "private-home-exception", "property", "protective-equipment-available", "protective-equipment-available-exception", "public-area-wifi-available", "public-area-wifi-available-exception", "public-internet-terminal", "public-internet-terminal-exception", "queen-beds-count", "queen-beds-count-exception", "recycling-program", "recycling-program-exception", "refillable-toiletry-containers", "refillable-toiletry-containers-exception", "refrigerator", "refrigerator-exception", "responsible-purchasing-policy", "responsible-purchasing-policy-exception", "responsibly-sources-seafood", "responsibly-sources-seafood-exception", "restaurant", "restaurant-exception", "restaurants-count", "restaurants-count-exception", "roll-away-beds-count", "roll-away-beds-count-exception", "room-bookings-buffer", "room-bookings-buffer-exception", "room-service", "room-service-exception", "rooms-count", "rooms-count-exception", "safely-disposes-batteries", "safely-disposes-batteries-exception", "safely-disposes-electronics", "safely-disposes-electronics-exception", "safely-disposes-lightbulbs", "safely-disposes-lightbulbs-exception", "safely-handles-hazardous-substances", "safely-handles-hazardous-substances-exception", "safety-dividers", "safety-dividers-exception", "salon", "salon-exception", "sauna", "sauna-exception", "scuba", "scuba-exception", "seconds", "self-parking-available", "self-parking-available-exception", "self-service-laundry", "self-service-laundry-exception", "services", "shared-areas-limited-occupancy", "shared-areas-limited-occupancy-exception", "shower", "shower-exception", "single-or-twin-beds-count", "single-or-twin-beds-count-exception", "single-use-food-menus", "single-use-food-menus-exception", "sink", "sink-exception", "sleeping", "smoke-free-property", "smoke-free-property-exception", "snackbar", "snackbar-exception", "snorkeling", "snorkeling-exception", "soap-donation-program", "soap-donation-program-exception", "social-hour", "social-hour-exception", "sofa-beds-count", "sofa-beds-count-exception", "some-units", "spa", "spa-exception", "stairs", "stairs-exception", "stove", "stove-exception", "suite", "suite-exception", "sustainability", "sustainability-certifications", "sustainable-sourcing", "synthetic-pillows", "synthetic-pillows-exception", "table-service", "table-service-exception", "tea-station", "tea-station-exception", "tennis", "tennis-exception", "tier", "tier-exception", "toaster", "toaster-exception", "toilet", "toilet-exception", "toiletry-donation-program", "toiletry-donation-program-exception", "total-living-areas", "towel-reuse-program", "towel-reuse-program-exception", "transfer", "transfer-exception", "transportation", "treadmill", "treadmill-exception", "turndown-service", "turndown-service-exception", "tv", "tv-casting", "tv-casting-exception", "tv-exception", "tv-streaming", "tv-streaming-exception", "twenty-four-hour-front-desk", "twenty-four-hour-front-desk-exception", "twenty-four-hour-room-service", "twenty-four-hour-room-service-exception", "universal-power-adapters", "universal-power-adapters-exception", "update-time", "valet-parking-available", "valet-parking-available-exception", "valley-view", "valley-view-exception", "vegan-meals", "vegan-meals-exception", "vegetarian-meals", "vegetarian-meals-exception", "vending-machine", "vending-machine-exception", "views", "wading-pool", "wading-pool-exception", "wake-up-calls", "wake-up-calls-exception", "washer", "washer-exception", "waste-reduction", "water-bottle-filling-stations", "water-bottle-filling-stations-exception", "water-conservation", "water-park", "water-park-exception", "water-saving-showers", "water-saving-showers-exception", "water-saving-sinks", "water-saving-sinks-exception", "water-saving-toilets", "water-saving-toilets-exception", "water-skiing", "water-skiing-exception", "watercraft-rental", "watercraft-rental-exception", "waterslide", "waterslide-exception", "wave-pool", "wave-pool-exception", "weight-machine", "weight-machine-exception", "wellness", "wellness-areas-have-private-spaces", "wellness-areas-have-private-spaces-exception", "wifi-available", "wifi-available-exception"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1024,7 +1025,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1244,8 +1245,8 @@ async fn main() { let mut app = App::new("mybusinesslodging1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("The My Business Lodging API enables managing lodging business information on Google.") + .version("5.0.2+20230124") + .about("The My Business Lodging API enables managing lodging business information on Google. Note - If you have a quota of 0 after enabling the API, please request for GBP API access.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusinesslodging1_cli") .arg(Arg::with_name("folder") .long("config-dir") diff --git a/gen/mybusinesslodging1/Cargo.toml b/gen/mybusinesslodging1/Cargo.toml index 4488571d0a..fe75510867 100644 --- a/gen/mybusinesslodging1/Cargo.toml +++ b/gen/mybusinesslodging1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusinesslodging1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Lodging (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinesslodging1" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-mybusinesslodging1/5.0.2+20230124" license = "MIT" keywords = ["mybusinesslodging", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusinesslodging1/README.md b/gen/mybusinesslodging1/README.md index 2ad9781dbb..eadecd1bc7 100644 --- a/gen/mybusinesslodging1/README.md +++ b/gen/mybusinesslodging1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-mybusinesslodging1` library allows access to all features of the *Google My Business Lodging* service. -This documentation was generated from *My Business Lodging* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinesslodging:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business Lodging* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinesslodging:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business Lodging* *v1* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/MyBusinessLodging) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/MyBusinessLodging) ... * locations - * [*get lodging*](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/api::LocationGetLodgingCall), [*lodging get google updated*](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/api::LocationLodgingGetGoogleUpdatedCall) and [*update lodging*](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/api::LocationUpdateLodgingCall) + * [*get lodging*](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/api::LocationGetLodgingCall), [*lodging get google updated*](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/api::LocationLodgingGetGoogleUpdatedCall) and [*update lodging*](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/api::LocationUpdateLodgingCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/MyBusinessLodging)** +* **[Hub](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/MyBusinessLodging)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Part)** + * **[Parts](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::RequestValue) and -[decodable](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::RequestValue) and +[decodable](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusinesslodging1/5.0.2-beta-1+20230124/google_mybusinesslodging1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusinesslodging1/5.0.2+20230124/google_mybusinesslodging1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusinesslodging1/src/api.rs b/gen/mybusinesslodging1/src/api.rs index a91d673eee..95e8c8c984 100644 --- a/gen/mybusinesslodging1/src/api.rs +++ b/gen/mybusinesslodging1/src/api.rs @@ -104,7 +104,7 @@ impl<'a, S> MyBusinessLodging { MyBusinessLodging { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusinesslodging.googleapis.com/".to_string(), _root_url: "https://mybusinesslodging.googleapis.com/".to_string(), } @@ -115,7 +115,7 @@ impl<'a, S> MyBusinessLodging { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusinesslodging1/src/client.rs b/gen/mybusinesslodging1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusinesslodging1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusinesslodging1/src/lib.rs b/gen/mybusinesslodging1/src/lib.rs index 39d6568a02..480c03f0d4 100644 --- a/gen/mybusinesslodging1/src/lib.rs +++ b/gen/mybusinesslodging1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business Lodging* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinesslodging:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business Lodging* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinesslodging:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business Lodging* *v1* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/mybusinessnotifications1-cli/Cargo.toml b/gen/mybusinessnotifications1-cli/Cargo.toml index c6743d71f9..05c0ccdb77 100644 --- a/gen/mybusinessnotifications1-cli/Cargo.toml +++ b/gen/mybusinessnotifications1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusinessnotifications1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Notification Settings (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessnotifications1-cli" @@ -20,13 +20,13 @@ name = "mybusinessnotifications1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusinessnotifications1] path = "../mybusinessnotifications1" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/mybusinessnotifications1-cli/README.md b/gen/mybusinessnotifications1-cli/README.md index ed03835456..b8397142ac 100644 --- a/gen/mybusinessnotifications1-cli/README.md +++ b/gen/mybusinessnotifications1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business Notification Settings* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business Notification Settings* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash mybusinessnotifications1 [options] diff --git a/gen/mybusinessnotifications1-cli/mkdocs.yml b/gen/mybusinessnotifications1-cli/mkdocs.yml index 95a426d537..ddf5014fd9 100644 --- a/gen/mybusinessnotifications1-cli/mkdocs.yml +++ b/gen/mybusinessnotifications1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business Notification Settings v4.0.1+20220305 +site_name: My Business Notification Settings v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-mybusinessnotifications1-cli site_description: A complete library to interact with My Business Notification Settings (protocol v1) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessnotifi docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_get-notification-setting.md', 'Accounts', 'Get Notification Setting'] -- ['accounts_update-notification-setting.md', 'Accounts', 'Update Notification Setting'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Get Notification Setting': 'accounts_get-notification-setting.md' + - 'Update Notification Setting': 'accounts_update-notification-setting.md' theme: readthedocs diff --git a/gen/mybusinessnotifications1-cli/src/client.rs b/gen/mybusinessnotifications1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusinessnotifications1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusinessnotifications1-cli/src/main.rs b/gen/mybusinessnotifications1-cli/src/main.rs index 857ef4f9ce..9cc7be6cff 100644 --- a/gen/mybusinessnotifications1-cli/src/main.rs +++ b/gen/mybusinessnotifications1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusinessnotifications1::{api, Error, oauth2}; +use google_mybusinessnotifications1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -142,7 +141,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -337,8 +336,8 @@ async fn main() { let mut app = App::new("mybusinessnotifications1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("The My Business Notification Settings API enables managing notification settings for business accounts.") + .version("5.0.2+20230124") + .about("The My Business Notification Settings API enables managing notification settings for business accounts. Note - If you have a quota of 0 after enabling the API, please request for GBP API access.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusinessnotifications1_cli") .arg(Arg::with_name("folder") .long("config-dir") diff --git a/gen/mybusinessnotifications1/Cargo.toml b/gen/mybusinessnotifications1/Cargo.toml index 78526e9c10..ebe258c74c 100644 --- a/gen/mybusinessnotifications1/Cargo.toml +++ b/gen/mybusinessnotifications1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusinessnotifications1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Notification Settings (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessnotifications1" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124" license = "MIT" keywords = ["mybusinessnotificati", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusinessnotifications1/README.md b/gen/mybusinessnotifications1/README.md index 3f6ee2edad..273c46be25 100644 --- a/gen/mybusinessnotifications1/README.md +++ b/gen/mybusinessnotifications1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-mybusinessnotifications1` library allows access to all features of the *Google My Business Notification Settings* service. -This documentation was generated from *My Business Notification Settings* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessnotifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business Notification Settings* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessnotifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business Notification Settings* *v1* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/MyBusinessNotificationSettings) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/MyBusinessNotificationSettings) ... * accounts - * [*get notification setting*](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/api::AccountGetNotificationSettingCall) and [*update notification setting*](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/api::AccountUpdateNotificationSettingCall) + * [*get notification setting*](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/api::AccountGetNotificationSettingCall) and [*update notification setting*](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/api::AccountUpdateNotificationSettingCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/MyBusinessNotificationSettings)** +* **[Hub](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/MyBusinessNotificationSettings)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Part)** + * **[Parts](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::RequestValue) and -[decodable](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::RequestValue) and +[decodable](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusinessnotifications1/5.0.2-beta-1+20230124/google_mybusinessnotifications1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusinessnotifications1/5.0.2+20230124/google_mybusinessnotifications1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusinessnotifications1/src/api.rs b/gen/mybusinessnotifications1/src/api.rs index 300307d296..c7535c70f4 100644 --- a/gen/mybusinessnotifications1/src/api.rs +++ b/gen/mybusinessnotifications1/src/api.rs @@ -104,7 +104,7 @@ impl<'a, S> MyBusinessNotificationSettings { MyBusinessNotificationSettings { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusinessnotifications.googleapis.com/".to_string(), _root_url: "https://mybusinessnotifications.googleapis.com/".to_string(), } @@ -115,7 +115,7 @@ impl<'a, S> MyBusinessNotificationSettings { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusinessnotifications1/src/client.rs b/gen/mybusinessnotifications1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusinessnotifications1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusinessnotifications1/src/lib.rs b/gen/mybusinessnotifications1/src/lib.rs index b86acdf4bc..0dd842040c 100644 --- a/gen/mybusinessnotifications1/src/lib.rs +++ b/gen/mybusinessnotifications1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business Notification Settings* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessnotifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business Notification Settings* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessnotifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business Notification Settings* *v1* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/mybusinessplaceactions1-cli/Cargo.toml b/gen/mybusinessplaceactions1-cli/Cargo.toml index 84e676f2fa..58cd77770d 100644 --- a/gen/mybusinessplaceactions1-cli/Cargo.toml +++ b/gen/mybusinessplaceactions1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusinessplaceactions1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Place Actions (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessplaceactions1-cli" @@ -20,13 +20,13 @@ name = "mybusinessplaceactions1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusinessplaceactions1] path = "../mybusinessplaceactions1" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/mybusinessplaceactions1-cli/README.md b/gen/mybusinessplaceactions1-cli/README.md index 362b59cb8c..eb5cea1ff5 100644 --- a/gen/mybusinessplaceactions1-cli/README.md +++ b/gen/mybusinessplaceactions1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business Place Actions* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business Place Actions* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash mybusinessplaceactions1 [options] diff --git a/gen/mybusinessplaceactions1-cli/mkdocs.yml b/gen/mybusinessplaceactions1-cli/mkdocs.yml index 0da55fd0ff..258634dd4e 100644 --- a/gen/mybusinessplaceactions1-cli/mkdocs.yml +++ b/gen/mybusinessplaceactions1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business Place Actions v4.0.1+20220305 +site_name: My Business Place Actions v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-mybusinessplaceactions1-cli site_description: A complete library to interact with My Business Place Actions (protocol v1) @@ -7,14 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessplacea docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['locations_place-action-links-create.md', 'Locations', 'Place Action Links Create'] -- ['locations_place-action-links-delete.md', 'Locations', 'Place Action Links Delete'] -- ['locations_place-action-links-get.md', 'Locations', 'Place Action Links Get'] -- ['locations_place-action-links-list.md', 'Locations', 'Place Action Links List'] -- ['locations_place-action-links-patch.md', 'Locations', 'Place Action Links Patch'] -- ['place-action-type-metadata_list.md', 'Place Action Type Metadata', 'List'] +nav: +- Home: 'index.md' +- 'Locations': + - 'Place Action Links Create': 'locations_place-action-links-create.md' + - 'Place Action Links Delete': 'locations_place-action-links-delete.md' + - 'Place Action Links Get': 'locations_place-action-links-get.md' + - 'Place Action Links List': 'locations_place-action-links-list.md' + - 'Place Action Links Patch': 'locations_place-action-links-patch.md' +- 'Place Action Type Metadata': + - 'List': 'place-action-type-metadata_list.md' theme: readthedocs diff --git a/gen/mybusinessplaceactions1-cli/src/client.rs b/gen/mybusinessplaceactions1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusinessplaceactions1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusinessplaceactions1-cli/src/main.rs b/gen/mybusinessplaceactions1-cli/src/main.rs index 7ce5e49b5e..e995d42c21 100644 --- a/gen/mybusinessplaceactions1-cli/src/main.rs +++ b/gen/mybusinessplaceactions1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusinessplaceactions1::{api, Error, oauth2}; +use google_mybusinessplaceactions1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -248,7 +247,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -344,7 +343,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -400,7 +399,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "language-code" => { call = call.language_code(value.unwrap_or("")); @@ -712,8 +711,8 @@ async fn main() { let mut app = App::new("mybusinessplaceactions1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("The My Business Place Actions API provides an interface for managing place action links of a location on Google.") + .version("5.0.2+20230124") + .about("The My Business Place Actions API provides an interface for managing place action links of a location on Google. Note - If you have a quota of 0 after enabling the API, please request for GBP API access.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusinessplaceactions1_cli") .arg(Arg::with_name("folder") .long("config-dir") diff --git a/gen/mybusinessplaceactions1/Cargo.toml b/gen/mybusinessplaceactions1/Cargo.toml index 491137532f..dc68c47e6f 100644 --- a/gen/mybusinessplaceactions1/Cargo.toml +++ b/gen/mybusinessplaceactions1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusinessplaceactions1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Place Actions (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessplaceactions1" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124" license = "MIT" keywords = ["mybusinessplaceactio", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusinessplaceactions1/README.md b/gen/mybusinessplaceactions1/README.md index f41e26e979..cf734c1dd8 100644 --- a/gen/mybusinessplaceactions1/README.md +++ b/gen/mybusinessplaceactions1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-mybusinessplaceactions1` library allows access to all features of the *Google My Business Place Actions* service. -This documentation was generated from *My Business Place Actions* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessplaceactions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business Place Actions* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessplaceactions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business Place Actions* *v1* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/MyBusinessPlaceActions) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/MyBusinessPlaceActions) ... * locations - * [*place action links create*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkCreateCall), [*place action links delete*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkDeleteCall), [*place action links get*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkGetCall), [*place action links list*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkListCall) and [*place action links patch*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkPatchCall) -* [place action type metadata](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/api::PlaceActionTypeMetadata) - * [*list*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/api::PlaceActionTypeMetadataListCall) + * [*place action links create*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkCreateCall), [*place action links delete*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkDeleteCall), [*place action links get*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkGetCall), [*place action links list*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkListCall) and [*place action links patch*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/api::LocationPlaceActionLinkPatchCall) +* [place action type metadata](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/api::PlaceActionTypeMetadata) + * [*list*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/api::PlaceActionTypeMetadataListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/MyBusinessPlaceActions)** +* **[Hub](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/MyBusinessPlaceActions)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Part)** + * **[Parts](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::RequestValue) and -[decodable](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::RequestValue) and +[decodable](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusinessplaceactions1/5.0.2-beta-1+20230124/google_mybusinessplaceactions1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusinessplaceactions1/5.0.2+20230124/google_mybusinessplaceactions1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusinessplaceactions1/src/api.rs b/gen/mybusinessplaceactions1/src/api.rs index 738fa638c2..f87519f690 100644 --- a/gen/mybusinessplaceactions1/src/api.rs +++ b/gen/mybusinessplaceactions1/src/api.rs @@ -104,7 +104,7 @@ impl<'a, S> MyBusinessPlaceActions { MyBusinessPlaceActions { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusinessplaceactions.googleapis.com/".to_string(), _root_url: "https://mybusinessplaceactions.googleapis.com/".to_string(), } @@ -118,7 +118,7 @@ impl<'a, S> MyBusinessPlaceActions { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusinessplaceactions1/src/client.rs b/gen/mybusinessplaceactions1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusinessplaceactions1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusinessplaceactions1/src/lib.rs b/gen/mybusinessplaceactions1/src/lib.rs index 1d2d96fff5..faba01ebc1 100644 --- a/gen/mybusinessplaceactions1/src/lib.rs +++ b/gen/mybusinessplaceactions1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business Place Actions* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessplaceactions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business Place Actions* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessplaceactions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business Place Actions* *v1* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/mybusinessverifications1-cli/Cargo.toml b/gen/mybusinessverifications1-cli/Cargo.toml index 8998931d4c..0babc954a8 100644 --- a/gen/mybusinessverifications1-cli/Cargo.toml +++ b/gen/mybusinessverifications1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-mybusinessverifications1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Verifications (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessverifications1-cli" @@ -20,13 +20,13 @@ name = "mybusinessverifications1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-mybusinessverifications1] path = "../mybusinessverifications1" -version = "4.0.1+20220305" +version = "5.0.2+20230124" + diff --git a/gen/mybusinessverifications1-cli/README.md b/gen/mybusinessverifications1-cli/README.md index 2358f27f3b..2b3b963b7f 100644 --- a/gen/mybusinessverifications1-cli/README.md +++ b/gen/mybusinessverifications1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *My Business Verifications* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *My Business Verifications* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash mybusinessverifications1 [options] diff --git a/gen/mybusinessverifications1-cli/mkdocs.yml b/gen/mybusinessverifications1-cli/mkdocs.yml index 1fdf5241c5..08be339961 100644 --- a/gen/mybusinessverifications1-cli/mkdocs.yml +++ b/gen/mybusinessverifications1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: My Business Verifications v4.0.1+20220305 +site_name: My Business Verifications v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-mybusinessverifications1-cli site_description: A complete library to interact with My Business Verifications (protocol v1) @@ -7,14 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessverifi docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['locations_fetch-verification-options.md', 'Locations', 'Fetch Verification Options'] -- ['locations_get-voice-of-merchant-state.md', 'Locations', 'Get Voice Of Merchant State'] -- ['locations_verifications-complete.md', 'Locations', 'Verifications Complete'] -- ['locations_verifications-list.md', 'Locations', 'Verifications List'] -- ['locations_verify.md', 'Locations', 'Verify'] -- ['verification-tokens_generate.md', 'Verification Tokens', 'Generate'] +nav: +- Home: 'index.md' +- 'Locations': + - 'Fetch Verification Options': 'locations_fetch-verification-options.md' + - 'Get Voice Of Merchant State': 'locations_get-voice-of-merchant-state.md' + - 'Verifications Complete': 'locations_verifications-complete.md' + - 'Verifications List': 'locations_verifications-list.md' + - 'Verify': 'locations_verify.md' +- 'Verification Tokens': + - 'Generate': 'verification-tokens_generate.md' theme: readthedocs diff --git a/gen/mybusinessverifications1-cli/src/client.rs b/gen/mybusinessverifications1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/mybusinessverifications1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/mybusinessverifications1-cli/src/main.rs b/gen/mybusinessverifications1-cli/src/main.rs index 35bc30c748..cd729bb973 100644 --- a/gen/mybusinessverifications1-cli/src/main.rs +++ b/gen/mybusinessverifications1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_mybusinessverifications1::{api, Error, oauth2}; +use google_mybusinessverifications1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -285,7 +284,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -797,7 +796,7 @@ async fn main() { let mut app = App::new("mybusinessverifications1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230124") .about("The My Business Verifications API provides an interface for taking verifications related actions for locations.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_mybusinessverifications1_cli") .arg(Arg::with_name("folder") diff --git a/gen/mybusinessverifications1/Cargo.toml b/gen/mybusinessverifications1/Cargo.toml index 18a0a795f2..f4bf239290 100644 --- a/gen/mybusinessverifications1/Cargo.toml +++ b/gen/mybusinessverifications1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-mybusinessverifications1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with My Business Verifications (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/mybusinessverifications1" homepage = "https://developers.google.com/my-business/" -documentation = "https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-mybusinessverifications1/5.0.2+20230124" license = "MIT" keywords = ["mybusinessverificati", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/mybusinessverifications1/README.md b/gen/mybusinessverifications1/README.md index a9c67eb73b..f51ace10d5 100644 --- a/gen/mybusinessverifications1/README.md +++ b/gen/mybusinessverifications1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-mybusinessverifications1` library allows access to all features of the *Google My Business Verifications* service. -This documentation was generated from *My Business Verifications* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessverifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *My Business Verifications* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessverifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *My Business Verifications* *v1* API can be found at the [official documentation site](https://developers.google.com/my-business/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/MyBusinessVerifications) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/MyBusinessVerifications) ... -* [locations](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::Location) - * [*fetch verification options*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::LocationFetchVerificationOptionCall), [*get voice of merchant state*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::LocationGetVoiceOfMerchantStateCall), [*verifications complete*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::LocationVerificationCompleteCall), [*verifications list*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::LocationVerificationListCall) and [*verify*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::LocationVerifyCall) -* [verification tokens](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::VerificationToken) - * [*generate*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/api::VerificationTokenGenerateCall) +* [locations](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::Location) + * [*fetch verification options*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::LocationFetchVerificationOptionCall), [*get voice of merchant state*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::LocationGetVoiceOfMerchantStateCall), [*verifications complete*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::LocationVerificationCompleteCall), [*verifications list*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::LocationVerificationListCall) and [*verify*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::LocationVerifyCall) +* [verification tokens](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::VerificationToken) + * [*generate*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/api::VerificationTokenGenerateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/MyBusinessVerifications)** +* **[Hub](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/MyBusinessVerifications)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::CallBuilder) -* **[Resources](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::CallBuilder) +* **[Resources](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Part)** + * **[Parts](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Delegate) to the -[Method Builder](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Delegate) to the +[Method Builder](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::RequestValue) and -[decodable](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::RequestValue) and +[decodable](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-mybusinessverifications1/5.0.2-beta-1+20230124/google_mybusinessverifications1/client::RequestValue) are moved +* [request values](https://docs.rs/google-mybusinessverifications1/5.0.2+20230124/google_mybusinessverifications1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/mybusinessverifications1/src/api.rs b/gen/mybusinessverifications1/src/api.rs index 98a5c6840b..edb60f92f8 100644 --- a/gen/mybusinessverifications1/src/api.rs +++ b/gen/mybusinessverifications1/src/api.rs @@ -99,7 +99,7 @@ impl<'a, S> MyBusinessVerifications { MyBusinessVerifications { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://mybusinessverifications.googleapis.com/".to_string(), _root_url: "https://mybusinessverifications.googleapis.com/".to_string(), } @@ -113,7 +113,7 @@ impl<'a, S> MyBusinessVerifications { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/mybusinessverifications1/src/client.rs b/gen/mybusinessverifications1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/mybusinessverifications1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/mybusinessverifications1/src/lib.rs b/gen/mybusinessverifications1/src/lib.rs index c46936fef4..1a13542e92 100644 --- a/gen/mybusinessverifications1/src/lib.rs +++ b/gen/mybusinessverifications1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *My Business Verifications* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *mybusinessverifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *My Business Verifications* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *mybusinessverifications:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *My Business Verifications* *v1* API can be found at the //! [official documentation site](https://developers.google.com/my-business/). diff --git a/gen/networkconnectivity1-cli/Cargo.toml b/gen/networkconnectivity1-cli/Cargo.toml index 7472e88e97..b88c9eb3c3 100644 --- a/gen/networkconnectivity1-cli/Cargo.toml +++ b/gen/networkconnectivity1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-networkconnectivity1-cli" -version = "4.0.1+20220210" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with networkconnectivity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkconnectivity1-cli" @@ -20,13 +20,13 @@ name = "networkconnectivity1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-networkconnectivity1] path = "../networkconnectivity1" -version = "4.0.1+20220210" +version = "5.0.2+20230105" + diff --git a/gen/networkconnectivity1-cli/README.md b/gen/networkconnectivity1-cli/README.md index 922d2d4879..ac7ca2e406 100644 --- a/gen/networkconnectivity1-cli/README.md +++ b/gen/networkconnectivity1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *networkconnectivity* API at revision *20220210*. The CLI is at version *4.0.1*. +This documentation was generated from the *networkconnectivity* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash networkconnectivity1 [options] @@ -42,11 +42,25 @@ networkconnectivity1 [options] locations-global-policy-based-routes-get-iam-policy [-p ]... [-o ] locations-global-policy-based-routes-set-iam-policy (-r )... [-p ]... [-o ] locations-global-policy-based-routes-test-iam-permissions (-r )... [-p ]... [-o ] + locations-internal-ranges-create (-r )... [-p ]... [-o ] + locations-internal-ranges-delete [-p ]... [-o ] + locations-internal-ranges-get [-p ]... [-o ] + locations-internal-ranges-list [-p ]... [-o ] + locations-internal-ranges-patch (-r )... [-p ]... [-o ] locations-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] + locations-service-classes-get-iam-policy [-p ]... [-o ] + locations-service-classes-set-iam-policy (-r )... [-p ]... [-o ] + locations-service-classes-test-iam-permissions (-r )... [-p ]... [-o ] + locations-service-connection-maps-get-iam-policy [-p ]... [-o ] + locations-service-connection-maps-set-iam-policy (-r )... [-p ]... [-o ] + locations-service-connection-maps-test-iam-permissions (-r )... [-p ]... [-o ] + locations-service-connection-policies-get-iam-policy [-p ]... [-o ] + locations-service-connection-policies-set-iam-policy (-r )... [-p ]... [-o ] + locations-service-connection-policies-test-iam-permissions (-r )... [-p ]... [-o ] locations-spokes-create (-r )... [-p ]... [-o ] locations-spokes-delete [-p ]... [-o ] locations-spokes-get [-p ]... [-o ] diff --git a/gen/networkconnectivity1-cli/mkdocs.yml b/gen/networkconnectivity1-cli/mkdocs.yml index 58aae9302d..0f2ead31d5 100644 --- a/gen/networkconnectivity1-cli/mkdocs.yml +++ b/gen/networkconnectivity1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: networkconnectivity v4.0.1+20220210 +site_name: networkconnectivity v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-networkconnectivity1-cli site_description: A complete library to interact with networkconnectivity (protocol v1) @@ -7,33 +7,48 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/networkconnectiv docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-global-hubs-create.md', 'Projects', 'Locations Global Hubs Create'] -- ['projects_locations-global-hubs-delete.md', 'Projects', 'Locations Global Hubs Delete'] -- ['projects_locations-global-hubs-get.md', 'Projects', 'Locations Global Hubs Get'] -- ['projects_locations-global-hubs-get-iam-policy.md', 'Projects', 'Locations Global Hubs Get Iam Policy'] -- ['projects_locations-global-hubs-list.md', 'Projects', 'Locations Global Hubs List'] -- ['projects_locations-global-hubs-patch.md', 'Projects', 'Locations Global Hubs Patch'] -- ['projects_locations-global-hubs-set-iam-policy.md', 'Projects', 'Locations Global Hubs Set Iam Policy'] -- ['projects_locations-global-hubs-test-iam-permissions.md', 'Projects', 'Locations Global Hubs Test Iam Permissions'] -- ['projects_locations-global-policy-based-routes-get-iam-policy.md', 'Projects', 'Locations Global Policy Based Routes Get Iam Policy'] -- ['projects_locations-global-policy-based-routes-set-iam-policy.md', 'Projects', 'Locations Global Policy Based Routes Set Iam Policy'] -- ['projects_locations-global-policy-based-routes-test-iam-permissions.md', 'Projects', 'Locations Global Policy Based Routes Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-spokes-create.md', 'Projects', 'Locations Spokes Create'] -- ['projects_locations-spokes-delete.md', 'Projects', 'Locations Spokes Delete'] -- ['projects_locations-spokes-get.md', 'Projects', 'Locations Spokes Get'] -- ['projects_locations-spokes-get-iam-policy.md', 'Projects', 'Locations Spokes Get Iam Policy'] -- ['projects_locations-spokes-list.md', 'Projects', 'Locations Spokes List'] -- ['projects_locations-spokes-patch.md', 'Projects', 'Locations Spokes Patch'] -- ['projects_locations-spokes-set-iam-policy.md', 'Projects', 'Locations Spokes Set Iam Policy'] -- ['projects_locations-spokes-test-iam-permissions.md', 'Projects', 'Locations Spokes Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Global Hubs Create': 'projects_locations-global-hubs-create.md' + - 'Locations Global Hubs Delete': 'projects_locations-global-hubs-delete.md' + - 'Locations Global Hubs Get': 'projects_locations-global-hubs-get.md' + - 'Locations Global Hubs Get Iam Policy': 'projects_locations-global-hubs-get-iam-policy.md' + - 'Locations Global Hubs List': 'projects_locations-global-hubs-list.md' + - 'Locations Global Hubs Patch': 'projects_locations-global-hubs-patch.md' + - 'Locations Global Hubs Set Iam Policy': 'projects_locations-global-hubs-set-iam-policy.md' + - 'Locations Global Hubs Test Iam Permissions': 'projects_locations-global-hubs-test-iam-permissions.md' + - 'Locations Global Policy Based Routes Get Iam Policy': 'projects_locations-global-policy-based-routes-get-iam-policy.md' + - 'Locations Global Policy Based Routes Set Iam Policy': 'projects_locations-global-policy-based-routes-set-iam-policy.md' + - 'Locations Global Policy Based Routes Test Iam Permissions': 'projects_locations-global-policy-based-routes-test-iam-permissions.md' + - 'Locations Internal Ranges Create': 'projects_locations-internal-ranges-create.md' + - 'Locations Internal Ranges Delete': 'projects_locations-internal-ranges-delete.md' + - 'Locations Internal Ranges Get': 'projects_locations-internal-ranges-get.md' + - 'Locations Internal Ranges List': 'projects_locations-internal-ranges-list.md' + - 'Locations Internal Ranges Patch': 'projects_locations-internal-ranges-patch.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Service Classes Get Iam Policy': 'projects_locations-service-classes-get-iam-policy.md' + - 'Locations Service Classes Set Iam Policy': 'projects_locations-service-classes-set-iam-policy.md' + - 'Locations Service Classes Test Iam Permissions': 'projects_locations-service-classes-test-iam-permissions.md' + - 'Locations Service Connection Maps Get Iam Policy': 'projects_locations-service-connection-maps-get-iam-policy.md' + - 'Locations Service Connection Maps Set Iam Policy': 'projects_locations-service-connection-maps-set-iam-policy.md' + - 'Locations Service Connection Maps Test Iam Permissions': 'projects_locations-service-connection-maps-test-iam-permissions.md' + - 'Locations Service Connection Policies Get Iam Policy': 'projects_locations-service-connection-policies-get-iam-policy.md' + - 'Locations Service Connection Policies Set Iam Policy': 'projects_locations-service-connection-policies-set-iam-policy.md' + - 'Locations Service Connection Policies Test Iam Permissions': 'projects_locations-service-connection-policies-test-iam-permissions.md' + - 'Locations Spokes Create': 'projects_locations-spokes-create.md' + - 'Locations Spokes Delete': 'projects_locations-spokes-delete.md' + - 'Locations Spokes Get': 'projects_locations-spokes-get.md' + - 'Locations Spokes Get Iam Policy': 'projects_locations-spokes-get-iam-policy.md' + - 'Locations Spokes List': 'projects_locations-spokes-list.md' + - 'Locations Spokes Patch': 'projects_locations-spokes-patch.md' + - 'Locations Spokes Set Iam Policy': 'projects_locations-spokes-set-iam-policy.md' + - 'Locations Spokes Test Iam Permissions': 'projects_locations-spokes-test-iam-permissions.md' theme: readthedocs diff --git a/gen/networkconnectivity1-cli/src/client.rs b/gen/networkconnectivity1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/networkconnectivity1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/networkconnectivity1-cli/src/main.rs b/gen/networkconnectivity1-cli/src/main.rs index 5efdc4ea35..0aef3d7c18 100644 --- a/gen/networkconnectivity1-cli/src/main.rs +++ b/gen/networkconnectivity1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_networkconnectivity1::{api, Error, oauth2}; +use google_networkconnectivity1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -316,7 +315,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -375,7 +374,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -476,7 +475,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -707,7 +706,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -928,6 +927,387 @@ where } } + async fn _projects_locations_internal_ranges_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ip-cidr-range" => Some(("ipCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "overlaps" => Some(("overlaps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "peering" => Some(("peering", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "prefix-length" => Some(("prefixLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "target-cidr-range" => Some(("targetCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "usage" => Some(("usage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "ip-cidr-range", "labels", "name", "network", "overlaps", "peering", "prefix-length", "target-cidr-range", "update-time", "usage", "users"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InternalRange = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_internal_ranges_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + "internal-range-id" => { + call = call.internal_range_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["internal-range-id", "request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_internal_ranges_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_internal_ranges_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_internal_ranges_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_internal_ranges_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_internal_ranges_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_internal_ranges_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_internal_ranges_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ip-cidr-range" => Some(("ipCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "overlaps" => Some(("overlaps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "peering" => Some(("peering", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "prefix-length" => Some(("prefixLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "target-cidr-range" => Some(("targetCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "usage" => Some(("usage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "ip-cidr-range", "labels", "name", "network", "overlaps", "peering", "prefix-length", "target-cidr-range", "update-time", "usage", "users"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InternalRange = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_internal_ranges_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or("")); @@ -938,7 +1318,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1188,7 +1568,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1240,6 +1620,690 @@ where } } + async fn _projects_locations_service_classes_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_service_classes_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_classes_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_classes_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_classes_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_classes_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_connection_maps_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_service_connection_maps_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_connection_maps_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_connection_maps_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_connection_maps_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_connection_maps_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_connection_policies_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_service_connection_policies_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_connection_policies_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_connection_policies_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_connection_policies_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_connection_policies_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_spokes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1269,15 +2333,18 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "linked-interconnect-attachments.site-to-site-data-transfer" => Some(("linkedInterconnectAttachments.siteToSiteDataTransfer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "linked-interconnect-attachments.uris" => Some(("linkedInterconnectAttachments.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "linked-interconnect-attachments.vpc-network" => Some(("linkedInterconnectAttachments.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linked-router-appliance-instances.site-to-site-data-transfer" => Some(("linkedRouterApplianceInstances.siteToSiteDataTransfer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "linked-router-appliance-instances.vpc-network" => Some(("linkedRouterApplianceInstances.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linked-vpn-tunnels.site-to-site-data-transfer" => Some(("linkedVpnTunnels.siteToSiteDataTransfer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "linked-vpn-tunnels.uris" => Some(("linkedVpnTunnels.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "linked-vpn-tunnels.vpc-network" => Some(("linkedVpnTunnels.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "unique-id" => Some(("uniqueId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "hub", "labels", "linked-interconnect-attachments", "linked-router-appliance-instances", "linked-vpn-tunnels", "name", "site-to-site-data-transfer", "state", "unique-id", "update-time", "uris"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "hub", "labels", "linked-interconnect-attachments", "linked-router-appliance-instances", "linked-vpn-tunnels", "name", "site-to-site-data-transfer", "state", "unique-id", "update-time", "uris", "vpc-network"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1459,7 +2526,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1518,7 +2585,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1602,15 +2669,18 @@ where "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "linked-interconnect-attachments.site-to-site-data-transfer" => Some(("linkedInterconnectAttachments.siteToSiteDataTransfer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "linked-interconnect-attachments.uris" => Some(("linkedInterconnectAttachments.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "linked-interconnect-attachments.vpc-network" => Some(("linkedInterconnectAttachments.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linked-router-appliance-instances.site-to-site-data-transfer" => Some(("linkedRouterApplianceInstances.siteToSiteDataTransfer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "linked-router-appliance-instances.vpc-network" => Some(("linkedRouterApplianceInstances.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linked-vpn-tunnels.site-to-site-data-transfer" => Some(("linkedVpnTunnels.siteToSiteDataTransfer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "linked-vpn-tunnels.uris" => Some(("linkedVpnTunnels.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "linked-vpn-tunnels.vpc-network" => Some(("linkedVpnTunnels.vpcNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "unique-id" => Some(("uniqueId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "hub", "labels", "linked-interconnect-attachments", "linked-router-appliance-instances", "linked-vpn-tunnels", "name", "site-to-site-data-transfer", "state", "unique-id", "update-time", "uris"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "hub", "labels", "linked-interconnect-attachments", "linked-router-appliance-instances", "linked-vpn-tunnels", "name", "site-to-site-data-transfer", "state", "unique-id", "update-time", "uris", "vpc-network"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1625,7 +2695,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -1892,6 +2962,21 @@ where ("locations-global-policy-based-routes-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_global_policy_based_routes_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-internal-ranges-create", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_create(opt, dry_run, &mut err).await; + }, + ("locations-internal-ranges-delete", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_delete(opt, dry_run, &mut err).await; + }, + ("locations-internal-ranges-get", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_get(opt, dry_run, &mut err).await; + }, + ("locations-internal-ranges-list", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_list(opt, dry_run, &mut err).await; + }, + ("locations-internal-ranges-patch", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_patch(opt, dry_run, &mut err).await; + }, ("locations-list", Some(opt)) => { call_result = self._projects_locations_list(opt, dry_run, &mut err).await; }, @@ -1907,6 +2992,33 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, + ("locations-service-classes-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_classes_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-classes-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_classes_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-classes-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_service_classes_test_iam_permissions(opt, dry_run, &mut err).await; + }, + ("locations-service-connection-maps-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_connection_maps_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-connection-maps-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_connection_maps_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-connection-maps-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_service_connection_maps_test_iam_permissions(opt, dry_run, &mut err).await; + }, + ("locations-service-connection-policies-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_connection_policies_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-connection-policies-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_connection_policies_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-connection-policies-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_service_connection_policies_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-spokes-create", Some(opt)) => { call_result = self._projects_locations_spokes_create(opt, dry_run, &mut err).await; }, @@ -2010,7 +3122,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-global-hubs-create', 'locations-global-hubs-delete', 'locations-global-hubs-get', 'locations-global-hubs-get-iam-policy', 'locations-global-hubs-list', 'locations-global-hubs-patch', 'locations-global-hubs-set-iam-policy', 'locations-global-hubs-test-iam-permissions', 'locations-global-policy-based-routes-get-iam-policy', 'locations-global-policy-based-routes-set-iam-policy', 'locations-global-policy-based-routes-test-iam-permissions', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-spokes-create', 'locations-spokes-delete', 'locations-spokes-get', 'locations-spokes-get-iam-policy', 'locations-spokes-list', 'locations-spokes-patch', 'locations-spokes-set-iam-policy' and 'locations-spokes-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-get', 'locations-global-hubs-create', 'locations-global-hubs-delete', 'locations-global-hubs-get', 'locations-global-hubs-get-iam-policy', 'locations-global-hubs-list', 'locations-global-hubs-patch', 'locations-global-hubs-set-iam-policy', 'locations-global-hubs-test-iam-permissions', 'locations-global-policy-based-routes-get-iam-policy', 'locations-global-policy-based-routes-set-iam-policy', 'locations-global-policy-based-routes-test-iam-permissions', 'locations-internal-ranges-create', 'locations-internal-ranges-delete', 'locations-internal-ranges-get', 'locations-internal-ranges-list', 'locations-internal-ranges-patch', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-service-classes-get-iam-policy', 'locations-service-classes-set-iam-policy', 'locations-service-classes-test-iam-permissions', 'locations-service-connection-maps-get-iam-policy', 'locations-service-connection-maps-set-iam-policy', 'locations-service-connection-maps-test-iam-permissions', 'locations-service-connection-policies-get-iam-policy', 'locations-service-connection-policies-set-iam-policy', 'locations-service-connection-policies-test-iam-permissions', 'locations-spokes-create', 'locations-spokes-delete', 'locations-spokes-get', 'locations-spokes-get-iam-policy', 'locations-spokes-list', 'locations-spokes-patch', 'locations-spokes-set-iam-policy' and 'locations-spokes-test-iam-permissions'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-get", @@ -2034,7 +3146,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-create", - Some(r##"Creates a new hub in the specified project."##), + Some(r##"Creates a new Network Connectivity Center hub in the specified project."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-global-hubs-create", vec![ (Some(r##"parent"##), @@ -2062,7 +3174,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-delete", - Some(r##"Deletes the specified hub."##), + Some(r##"Deletes a Network Connectivity Center hub."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-global-hubs-delete", vec![ (Some(r##"name"##), @@ -2084,7 +3196,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-get", - Some(r##"Gets details about the specified hub."##), + Some(r##"Gets details about a Network Connectivity Center hub."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-global-hubs-get", vec![ (Some(r##"name"##), @@ -2111,7 +3223,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2128,7 +3240,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-list", - Some(r##"Lists hubs in a given project."##), + Some(r##"Lists the Network Connectivity Center hubs associated with a given project."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-global-hubs-list", vec![ (Some(r##"parent"##), @@ -2150,7 +3262,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-patch", - Some(r##"Updates the description and/or labels of the specified hub."##), + Some(r##"Updates the description and/or labels of a Network Connectivity Center hub."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-global-hubs-patch", vec![ (Some(r##"name"##), @@ -2183,7 +3295,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2211,7 +3323,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2239,7 +3351,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2261,7 +3373,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2289,7 +3401,129 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-create", + Some(r##"Creates a new internal range in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-internal-ranges-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource's name of the internal range."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-delete", + Some(r##"Deletes a single internal range."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-internal-ranges-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the internal range to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-get", + Some(r##"Gets details of a single internal range."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-internal-ranges-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the InternalRange to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-list", + Some(r##"Lists internal ranges in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-internal-ranges-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource's name."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-patch", + Some(r##"Updates the parameters of a single internal range."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-internal-ranges-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Immutable. The name of an internal range. Format: projects/{project}/locations/{location}/internalRanges/{internal_range} See: https://google.aip.dev/122#fields-representing-resource-names"##), Some(true), Some(false)), @@ -2421,6 +3655,240 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-classes-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-classes-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-classes-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-classes-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-classes-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-classes-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-connection-maps-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-connection-maps-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-connection-maps-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-connection-maps-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-connection-maps-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-connection-maps-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-connection-policies-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-connection-policies-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-connection-policies-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-connection-policies-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-connection-policies-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-service-connection-policies-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2428,7 +3896,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-create", - Some(r##"Creates a spoke in the specified project and location."##), + Some(r##"Creates a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-spokes-create", vec![ (Some(r##"parent"##), @@ -2456,7 +3924,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-delete", - Some(r##"Deletes the specified spoke."##), + Some(r##"Deletes a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-spokes-delete", vec![ (Some(r##"name"##), @@ -2478,7 +3946,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-get", - Some(r##"Gets details about the specified spoke."##), + Some(r##"Gets details about a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-spokes-get", vec![ (Some(r##"name"##), @@ -2505,7 +3973,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2522,7 +3990,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-list", - Some(r##"Lists the spokes in the specified project and location."##), + Some(r##"Lists the Network Connectivity Center spokes in a specified project and location."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-spokes-list", vec![ (Some(r##"parent"##), @@ -2544,7 +4012,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-patch", - Some(r##"Updates the parameters of the specified spoke."##), + Some(r##"Updates the parameters of a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli/projects_locations-spokes-patch", vec![ (Some(r##"name"##), @@ -2577,7 +4045,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2605,7 +4073,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2633,8 +4101,8 @@ async fn main() { let mut app = App::new("networkconnectivity1") .author("Sebastian Thiel ") - .version("4.0.1+20220210") - .about("The Network Connectivity API provides access to Network Connectivity Center.") + .version("5.0.2+20230105") + .about("This API enables connectivity with and between Google Cloud resources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_networkconnectivity1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/networkconnectivity1/Cargo.toml b/gen/networkconnectivity1/Cargo.toml index e5c9465812..a7fe70db4b 100644 --- a/gen/networkconnectivity1/Cargo.toml +++ b/gen/networkconnectivity1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-networkconnectivity1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with networkconnectivity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkconnectivity1" homepage = "https://cloud.google.com/network-connectivity/docs/reference/networkconnectivity/rest" -documentation = "https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-networkconnectivity1/5.0.2+20230105" license = "MIT" keywords = ["networkconnectivity", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/networkconnectivity1/README.md b/gen/networkconnectivity1/README.md index 763c9684db..b402ee925e 100644 --- a/gen/networkconnectivity1/README.md +++ b/gen/networkconnectivity1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-networkconnectivity1` library allows access to all features of the *Google networkconnectivity* service. -This documentation was generated from *networkconnectivity* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *networkconnectivity* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *networkconnectivity* *v1* API can be found at the [official documentation site](https://cloud.google.com/network-connectivity/docs/reference/networkconnectivity/rest). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/Networkconnectivity) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/Networkconnectivity) ... * projects - * [*locations get*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGetCall), [*locations global hubs create*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubCreateCall), [*locations global hubs delete*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubDeleteCall), [*locations global hubs get*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubGetCall), [*locations global hubs get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubGetIamPolicyCall), [*locations global hubs list*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubListCall), [*locations global hubs patch*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubPatchCall), [*locations global hubs set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubSetIamPolicyCall), [*locations global hubs test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubTestIamPermissionCall), [*locations global policy based routes get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalPolicyBasedRouteGetIamPolicyCall), [*locations global policy based routes set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalPolicyBasedRouteSetIamPolicyCall), [*locations global policy based routes test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationGlobalPolicyBasedRouteTestIamPermissionCall), [*locations internal ranges create*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeCreateCall), [*locations internal ranges delete*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeDeleteCall), [*locations internal ranges get*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeGetCall), [*locations internal ranges list*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeListCall), [*locations internal ranges patch*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangePatchCall), [*locations list*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationOperationListCall), [*locations service classes get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceClassGetIamPolicyCall), [*locations service classes set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceClassSetIamPolicyCall), [*locations service classes test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceClassTestIamPermissionCall), [*locations service connection maps get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionMapGetIamPolicyCall), [*locations service connection maps set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionMapSetIamPolicyCall), [*locations service connection maps test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionMapTestIamPermissionCall), [*locations service connection policies get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionPolicyGetIamPolicyCall), [*locations service connection policies set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionPolicySetIamPolicyCall), [*locations service connection policies test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionPolicyTestIamPermissionCall), [*locations spokes create*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokeCreateCall), [*locations spokes delete*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokeDeleteCall), [*locations spokes get*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokeGetCall), [*locations spokes get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokeGetIamPolicyCall), [*locations spokes list*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokeListCall), [*locations spokes patch*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokePatchCall), [*locations spokes set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokeSetIamPolicyCall) and [*locations spokes test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/api::ProjectLocationSpokeTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGetCall), [*locations global hubs create*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubCreateCall), [*locations global hubs delete*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubDeleteCall), [*locations global hubs get*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubGetCall), [*locations global hubs get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubGetIamPolicyCall), [*locations global hubs list*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubListCall), [*locations global hubs patch*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubPatchCall), [*locations global hubs set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubSetIamPolicyCall), [*locations global hubs test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalHubTestIamPermissionCall), [*locations global policy based routes get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalPolicyBasedRouteGetIamPolicyCall), [*locations global policy based routes set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalPolicyBasedRouteSetIamPolicyCall), [*locations global policy based routes test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationGlobalPolicyBasedRouteTestIamPermissionCall), [*locations internal ranges create*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeCreateCall), [*locations internal ranges delete*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeDeleteCall), [*locations internal ranges get*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeGetCall), [*locations internal ranges list*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangeListCall), [*locations internal ranges patch*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationInternalRangePatchCall), [*locations list*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationOperationListCall), [*locations service classes get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceClassGetIamPolicyCall), [*locations service classes set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceClassSetIamPolicyCall), [*locations service classes test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceClassTestIamPermissionCall), [*locations service connection maps get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionMapGetIamPolicyCall), [*locations service connection maps set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionMapSetIamPolicyCall), [*locations service connection maps test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionMapTestIamPermissionCall), [*locations service connection policies get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionPolicyGetIamPolicyCall), [*locations service connection policies set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionPolicySetIamPolicyCall), [*locations service connection policies test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationServiceConnectionPolicyTestIamPermissionCall), [*locations spokes create*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokeCreateCall), [*locations spokes delete*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokeDeleteCall), [*locations spokes get*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokeGetCall), [*locations spokes get iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokeGetIamPolicyCall), [*locations spokes list*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokeListCall), [*locations spokes patch*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokePatchCall), [*locations spokes set iam policy*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokeSetIamPolicyCall) and [*locations spokes test iam permissions*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/api::ProjectLocationSpokeTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/Networkconnectivity)** +* **[Hub](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/Networkconnectivity)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::CallBuilder) -* **[Resources](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::CallBuilder) +* **[Resources](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Part)** + * **[Parts](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Delegate) to the -[Method Builder](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Delegate) to the +[Method Builder](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::RequestValue) and -[decodable](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::RequestValue) and +[decodable](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-networkconnectivity1/5.0.2-beta-1+20230105/google_networkconnectivity1/client::RequestValue) are moved +* [request values](https://docs.rs/google-networkconnectivity1/5.0.2+20230105/google_networkconnectivity1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/networkconnectivity1/src/api.rs b/gen/networkconnectivity1/src/api.rs index eab2e633b2..77431dfde8 100644 --- a/gen/networkconnectivity1/src/api.rs +++ b/gen/networkconnectivity1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> Networkconnectivity { Networkconnectivity { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://networkconnectivity.googleapis.com/".to_string(), _root_url: "https://networkconnectivity.googleapis.com/".to_string(), } @@ -131,7 +131,7 @@ impl<'a, S> Networkconnectivity { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/networkconnectivity1/src/client.rs b/gen/networkconnectivity1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/networkconnectivity1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/networkconnectivity1/src/lib.rs b/gen/networkconnectivity1/src/lib.rs index e942a441b2..ad81f169ec 100644 --- a/gen/networkconnectivity1/src/lib.rs +++ b/gen/networkconnectivity1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *networkconnectivity* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *networkconnectivity* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *networkconnectivity* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/network-connectivity/docs/reference/networkconnectivity/rest). diff --git a/gen/networkconnectivity1_alpha1-cli/Cargo.toml b/gen/networkconnectivity1_alpha1-cli/Cargo.toml index 2148cb2fb7..9251ec6b42 100644 --- a/gen/networkconnectivity1_alpha1-cli/Cargo.toml +++ b/gen/networkconnectivity1_alpha1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-networkconnectivity1_alpha1-cli" -version = "4.0.1+20220210" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with networkconnectivity (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkconnectivity1_alpha1-cli" @@ -20,13 +20,13 @@ name = "networkconnectivity1-alpha1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-networkconnectivity1_alpha1] path = "../networkconnectivity1_alpha1" -version = "4.0.1+20220210" +version = "5.0.2+20230105" + diff --git a/gen/networkconnectivity1_alpha1-cli/README.md b/gen/networkconnectivity1_alpha1-cli/README.md index 07dbd2dd66..6ca777f2d9 100644 --- a/gen/networkconnectivity1_alpha1-cli/README.md +++ b/gen/networkconnectivity1_alpha1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *networkconnectivity* API at revision *20220210*. The CLI is at version *4.0.1*. +This documentation was generated from the *networkconnectivity* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash networkconnectivity1-alpha1 [options] @@ -39,7 +39,12 @@ networkconnectivity1-alpha1 [options] locations-global-hubs-patch (-r )... [-p ]... [-o ] locations-global-hubs-set-iam-policy (-r )... [-p ]... [-o ] locations-global-hubs-test-iam-permissions (-r )... [-p ]... [-o ] + locations-internal-ranges-create (-r )... [-p ]... [-o ] + locations-internal-ranges-delete [-p ]... [-o ] + locations-internal-ranges-get [-p ]... [-o ] locations-internal-ranges-get-iam-policy [-p ]... [-o ] + locations-internal-ranges-list [-p ]... [-o ] + locations-internal-ranges-patch (-r )... [-p ]... [-o ] locations-internal-ranges-set-iam-policy (-r )... [-p ]... [-o ] locations-internal-ranges-test-iam-permissions (-r )... [-p ]... [-o ] locations-list [-p ]... [-o ] diff --git a/gen/networkconnectivity1_alpha1-cli/mkdocs.yml b/gen/networkconnectivity1_alpha1-cli/mkdocs.yml index 3a727f3198..1cd35d0445 100644 --- a/gen/networkconnectivity1_alpha1-cli/mkdocs.yml +++ b/gen/networkconnectivity1_alpha1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: networkconnectivity v4.0.1+20220210 +site_name: networkconnectivity v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-networkconnectivity1_alpha1-cli site_description: A complete library to interact with networkconnectivity (protocol v1alpha1) @@ -7,33 +7,39 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/networkconnectiv docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-global-hubs-create.md', 'Projects', 'Locations Global Hubs Create'] -- ['projects_locations-global-hubs-delete.md', 'Projects', 'Locations Global Hubs Delete'] -- ['projects_locations-global-hubs-get.md', 'Projects', 'Locations Global Hubs Get'] -- ['projects_locations-global-hubs-get-iam-policy.md', 'Projects', 'Locations Global Hubs Get Iam Policy'] -- ['projects_locations-global-hubs-list.md', 'Projects', 'Locations Global Hubs List'] -- ['projects_locations-global-hubs-patch.md', 'Projects', 'Locations Global Hubs Patch'] -- ['projects_locations-global-hubs-set-iam-policy.md', 'Projects', 'Locations Global Hubs Set Iam Policy'] -- ['projects_locations-global-hubs-test-iam-permissions.md', 'Projects', 'Locations Global Hubs Test Iam Permissions'] -- ['projects_locations-internal-ranges-get-iam-policy.md', 'Projects', 'Locations Internal Ranges Get Iam Policy'] -- ['projects_locations-internal-ranges-set-iam-policy.md', 'Projects', 'Locations Internal Ranges Set Iam Policy'] -- ['projects_locations-internal-ranges-test-iam-permissions.md', 'Projects', 'Locations Internal Ranges Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-spokes-create.md', 'Projects', 'Locations Spokes Create'] -- ['projects_locations-spokes-delete.md', 'Projects', 'Locations Spokes Delete'] -- ['projects_locations-spokes-get.md', 'Projects', 'Locations Spokes Get'] -- ['projects_locations-spokes-get-iam-policy.md', 'Projects', 'Locations Spokes Get Iam Policy'] -- ['projects_locations-spokes-list.md', 'Projects', 'Locations Spokes List'] -- ['projects_locations-spokes-patch.md', 'Projects', 'Locations Spokes Patch'] -- ['projects_locations-spokes-set-iam-policy.md', 'Projects', 'Locations Spokes Set Iam Policy'] -- ['projects_locations-spokes-test-iam-permissions.md', 'Projects', 'Locations Spokes Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Global Hubs Create': 'projects_locations-global-hubs-create.md' + - 'Locations Global Hubs Delete': 'projects_locations-global-hubs-delete.md' + - 'Locations Global Hubs Get': 'projects_locations-global-hubs-get.md' + - 'Locations Global Hubs Get Iam Policy': 'projects_locations-global-hubs-get-iam-policy.md' + - 'Locations Global Hubs List': 'projects_locations-global-hubs-list.md' + - 'Locations Global Hubs Patch': 'projects_locations-global-hubs-patch.md' + - 'Locations Global Hubs Set Iam Policy': 'projects_locations-global-hubs-set-iam-policy.md' + - 'Locations Global Hubs Test Iam Permissions': 'projects_locations-global-hubs-test-iam-permissions.md' + - 'Locations Internal Ranges Create': 'projects_locations-internal-ranges-create.md' + - 'Locations Internal Ranges Delete': 'projects_locations-internal-ranges-delete.md' + - 'Locations Internal Ranges Get': 'projects_locations-internal-ranges-get.md' + - 'Locations Internal Ranges Get Iam Policy': 'projects_locations-internal-ranges-get-iam-policy.md' + - 'Locations Internal Ranges List': 'projects_locations-internal-ranges-list.md' + - 'Locations Internal Ranges Patch': 'projects_locations-internal-ranges-patch.md' + - 'Locations Internal Ranges Set Iam Policy': 'projects_locations-internal-ranges-set-iam-policy.md' + - 'Locations Internal Ranges Test Iam Permissions': 'projects_locations-internal-ranges-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Spokes Create': 'projects_locations-spokes-create.md' + - 'Locations Spokes Delete': 'projects_locations-spokes-delete.md' + - 'Locations Spokes Get': 'projects_locations-spokes-get.md' + - 'Locations Spokes Get Iam Policy': 'projects_locations-spokes-get-iam-policy.md' + - 'Locations Spokes List': 'projects_locations-spokes-list.md' + - 'Locations Spokes Patch': 'projects_locations-spokes-patch.md' + - 'Locations Spokes Set Iam Policy': 'projects_locations-spokes-set-iam-policy.md' + - 'Locations Spokes Test Iam Permissions': 'projects_locations-spokes-test-iam-permissions.md' theme: readthedocs diff --git a/gen/networkconnectivity1_alpha1-cli/src/client.rs b/gen/networkconnectivity1_alpha1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/networkconnectivity1_alpha1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/networkconnectivity1_alpha1-cli/src/main.rs b/gen/networkconnectivity1_alpha1-cli/src/main.rs index 72650818cf..03bf804ed4 100644 --- a/gen/networkconnectivity1_alpha1-cli/src/main.rs +++ b/gen/networkconnectivity1_alpha1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_networkconnectivity1_alpha1::{api, Error, oauth2}; +use google_networkconnectivity1_alpha1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -317,7 +316,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -376,7 +375,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -478,7 +477,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -702,6 +701,218 @@ where } } + async fn _projects_locations_internal_ranges_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ip-cidr-range" => Some(("ipCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "overlaps" => Some(("overlaps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "peering" => Some(("peering", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "prefix-length" => Some(("prefixLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "target-cidr-range" => Some(("targetCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "usage" => Some(("usage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "ip-cidr-range", "labels", "name", "network", "overlaps", "peering", "prefix-length", "target-cidr-range", "update-time", "usage", "users"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InternalRange = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_internal_ranges_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + "internal-range-id" => { + call = call.internal_range_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["internal-range-id", "request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_internal_ranges_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_internal_ranges_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_internal_ranges_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_internal_ranges_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_internal_ranges_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_internal_ranges_get_iam_policy(opt.value_of("resource").unwrap_or("")); @@ -709,7 +920,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -758,6 +969,175 @@ where } } + async fn _projects_locations_internal_ranges_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_internal_ranges_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_internal_ranges_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ip-cidr-range" => Some(("ipCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "overlaps" => Some(("overlaps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "peering" => Some(("peering", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "prefix-length" => Some(("prefixLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "target-cidr-range" => Some(("targetCidrRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "usage" => Some(("usage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "users" => Some(("users", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "ip-cidr-range", "labels", "name", "network", "overlaps", "peering", "prefix-length", "target-cidr-range", "update-time", "usage", "users"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::InternalRange = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_internal_ranges_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_internal_ranges_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -940,7 +1320,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1190,7 +1570,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1458,7 +1838,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1517,7 +1897,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1621,7 +2001,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -1879,9 +2259,24 @@ where ("locations-global-hubs-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_global_hubs_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-internal-ranges-create", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_create(opt, dry_run, &mut err).await; + }, + ("locations-internal-ranges-delete", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_delete(opt, dry_run, &mut err).await; + }, + ("locations-internal-ranges-get", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_get(opt, dry_run, &mut err).await; + }, ("locations-internal-ranges-get-iam-policy", Some(opt)) => { call_result = self._projects_locations_internal_ranges_get_iam_policy(opt, dry_run, &mut err).await; }, + ("locations-internal-ranges-list", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_list(opt, dry_run, &mut err).await; + }, + ("locations-internal-ranges-patch", Some(opt)) => { + call_result = self._projects_locations_internal_ranges_patch(opt, dry_run, &mut err).await; + }, ("locations-internal-ranges-set-iam-policy", Some(opt)) => { call_result = self._projects_locations_internal_ranges_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -2006,7 +2401,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-global-hubs-create', 'locations-global-hubs-delete', 'locations-global-hubs-get', 'locations-global-hubs-get-iam-policy', 'locations-global-hubs-list', 'locations-global-hubs-patch', 'locations-global-hubs-set-iam-policy', 'locations-global-hubs-test-iam-permissions', 'locations-internal-ranges-get-iam-policy', 'locations-internal-ranges-set-iam-policy', 'locations-internal-ranges-test-iam-permissions', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-spokes-create', 'locations-spokes-delete', 'locations-spokes-get', 'locations-spokes-get-iam-policy', 'locations-spokes-list', 'locations-spokes-patch', 'locations-spokes-set-iam-policy' and 'locations-spokes-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-get', 'locations-global-hubs-create', 'locations-global-hubs-delete', 'locations-global-hubs-get', 'locations-global-hubs-get-iam-policy', 'locations-global-hubs-list', 'locations-global-hubs-patch', 'locations-global-hubs-set-iam-policy', 'locations-global-hubs-test-iam-permissions', 'locations-internal-ranges-create', 'locations-internal-ranges-delete', 'locations-internal-ranges-get', 'locations-internal-ranges-get-iam-policy', 'locations-internal-ranges-list', 'locations-internal-ranges-patch', 'locations-internal-ranges-set-iam-policy', 'locations-internal-ranges-test-iam-permissions', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-spokes-create', 'locations-spokes-delete', 'locations-spokes-get', 'locations-spokes-get-iam-policy', 'locations-spokes-list', 'locations-spokes-patch', 'locations-spokes-set-iam-policy' and 'locations-spokes-test-iam-permissions'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-get", @@ -2030,7 +2425,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-create", - Some(r##"Creates a new Hub in a given project and location."##), + Some(r##"Creates a new Network Connectivity Center hub in the specified project."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-global-hubs-create", vec![ (Some(r##"parent"##), @@ -2058,7 +2453,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-delete", - Some(r##"Deletes a single Hub."##), + Some(r##"Deletes a Network Connectivity Center hub."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-global-hubs-delete", vec![ (Some(r##"name"##), @@ -2080,7 +2475,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-get", - Some(r##"Gets details of a single Hub."##), + Some(r##"Gets details about a Network Connectivity Center hub."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-global-hubs-get", vec![ (Some(r##"name"##), @@ -2107,7 +2502,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2124,7 +2519,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-list", - Some(r##"Lists Hubs in a given project and location."##), + Some(r##"Lists the Network Connectivity Center hubs associated with a given project."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-global-hubs-list", vec![ (Some(r##"parent"##), @@ -2146,7 +2541,7 @@ async fn main() { Some(false)), ]), ("locations-global-hubs-patch", - Some(r##"Updates the parameters of a single Hub."##), + Some(r##"Updates the description and/or labels of a Network Connectivity Center hub."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-global-hubs-patch", vec![ (Some(r##"name"##), @@ -2179,7 +2574,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2207,7 +2602,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2223,6 +2618,78 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-create", + Some(r##"Creates a new internal range in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-internal-ranges-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource's name of the InternalRange."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-delete", + Some(r##"Deletes a single internal range."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-internal-ranges-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the InternalRange to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-get", + Some(r##"Gets details of a single internal range."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-internal-ranges-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the InternalRange to get."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2235,7 +2702,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2245,6 +2712,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-list", + Some(r##"Lists internal ranges in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-internal-ranges-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource's name."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-internal-ranges-patch", + Some(r##"Updates the parameters of a single internal range."##), + "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-internal-ranges-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Immutable. The name of an internal range. Format: projects/{project}/locations/{location}/internalRanges/{internal_range} See: https://google.aip.dev/122#fields-representing-resource-names"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2257,7 +2774,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2285,7 +2802,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2424,7 +2941,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-create", - Some(r##"Creates a new Spoke in a given project and location."##), + Some(r##"Creates a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-spokes-create", vec![ (Some(r##"parent"##), @@ -2452,7 +2969,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-delete", - Some(r##"Deletes a single Spoke."##), + Some(r##"Deletes a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-spokes-delete", vec![ (Some(r##"name"##), @@ -2474,7 +2991,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-get", - Some(r##"Gets details of a single Spoke."##), + Some(r##"Gets details about a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-spokes-get", vec![ (Some(r##"name"##), @@ -2501,7 +3018,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2518,7 +3035,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-list", - Some(r##"Lists Spokes in a given project and location."##), + Some(r##"Lists the Network Connectivity Center spokes in a specified project and location."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-spokes-list", vec![ (Some(r##"parent"##), @@ -2540,7 +3057,7 @@ async fn main() { Some(false)), ]), ("locations-spokes-patch", - Some(r##"Updates the parameters of a single Spoke."##), + Some(r##"Updates the parameters of a Network Connectivity Center spoke."##), "Details at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli/projects_locations-spokes-patch", vec![ (Some(r##"name"##), @@ -2573,7 +3090,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2601,7 +3118,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2629,8 +3146,8 @@ async fn main() { let mut app = App::new("networkconnectivity1-alpha1") .author("Sebastian Thiel ") - .version("4.0.1+20220210") - .about("The Network Connectivity API provides access to Network Connectivity Center.") + .version("5.0.2+20230105") + .about("This API enables connectivity with and between Google Cloud resources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_networkconnectivity1_alpha1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/networkconnectivity1_alpha1/Cargo.toml b/gen/networkconnectivity1_alpha1/Cargo.toml index f0fb6cbe93..e7015d94a5 100644 --- a/gen/networkconnectivity1_alpha1/Cargo.toml +++ b/gen/networkconnectivity1_alpha1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-networkconnectivity1_alpha1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with networkconnectivity (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkconnectivity1_alpha1" homepage = "https://cloud.google.com/network-connectivity/docs/reference/networkconnectivity/rest" -documentation = "https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105" license = "MIT" keywords = ["networkconnectivity", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/networkconnectivity1_alpha1/README.md b/gen/networkconnectivity1_alpha1/README.md index 845d94524a..76d48b7d82 100644 --- a/gen/networkconnectivity1_alpha1/README.md +++ b/gen/networkconnectivity1_alpha1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-networkconnectivity1_alpha1` library allows access to all features of the *Google networkconnectivity* service. -This documentation was generated from *networkconnectivity* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *networkconnectivity* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *networkconnectivity* *v1_alpha1* API can be found at the [official documentation site](https://cloud.google.com/network-connectivity/docs/reference/networkconnectivity/rest). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/Networkconnectivity) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/Networkconnectivity) ... * projects - * [*locations get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGetCall), [*locations global hubs create*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubCreateCall), [*locations global hubs delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubDeleteCall), [*locations global hubs get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubGetCall), [*locations global hubs get iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubGetIamPolicyCall), [*locations global hubs list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubListCall), [*locations global hubs patch*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubPatchCall), [*locations global hubs set iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubSetIamPolicyCall), [*locations global hubs test iam permissions*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubTestIamPermissionCall), [*locations internal ranges create*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeCreateCall), [*locations internal ranges delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeDeleteCall), [*locations internal ranges get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeGetCall), [*locations internal ranges get iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeGetIamPolicyCall), [*locations internal ranges list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeListCall), [*locations internal ranges patch*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangePatchCall), [*locations internal ranges set iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeSetIamPolicyCall), [*locations internal ranges test iam permissions*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeTestIamPermissionCall), [*locations list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationListCall), [*locations spokes create*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeCreateCall), [*locations spokes delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeDeleteCall), [*locations spokes get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeGetCall), [*locations spokes get iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeGetIamPolicyCall), [*locations spokes list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeListCall), [*locations spokes patch*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokePatchCall), [*locations spokes set iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeSetIamPolicyCall) and [*locations spokes test iam permissions*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGetCall), [*locations global hubs create*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubCreateCall), [*locations global hubs delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubDeleteCall), [*locations global hubs get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubGetCall), [*locations global hubs get iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubGetIamPolicyCall), [*locations global hubs list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubListCall), [*locations global hubs patch*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubPatchCall), [*locations global hubs set iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubSetIamPolicyCall), [*locations global hubs test iam permissions*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationGlobalHubTestIamPermissionCall), [*locations internal ranges create*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeCreateCall), [*locations internal ranges delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeDeleteCall), [*locations internal ranges get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeGetCall), [*locations internal ranges get iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeGetIamPolicyCall), [*locations internal ranges list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeListCall), [*locations internal ranges patch*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangePatchCall), [*locations internal ranges set iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeSetIamPolicyCall), [*locations internal ranges test iam permissions*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationInternalRangeTestIamPermissionCall), [*locations list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationOperationListCall), [*locations spokes create*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeCreateCall), [*locations spokes delete*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeDeleteCall), [*locations spokes get*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeGetCall), [*locations spokes get iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeGetIamPolicyCall), [*locations spokes list*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeListCall), [*locations spokes patch*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokePatchCall), [*locations spokes set iam policy*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeSetIamPolicyCall) and [*locations spokes test iam permissions*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/api::ProjectLocationSpokeTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/Networkconnectivity)** +* **[Hub](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/Networkconnectivity)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::CallBuilder) -* **[Resources](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::CallBuilder) +* **[Resources](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Part)** + * **[Parts](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -135,17 +135,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -155,29 +155,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Delegate) to the -[Method Builder](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Delegate) to the +[Method Builder](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::RequestValue) and -[decodable](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::RequestValue) and +[decodable](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2-beta-1+20230105/google_networkconnectivity1_alpha1/client::RequestValue) are moved +* [request values](https://docs.rs/google-networkconnectivity1_alpha1/5.0.2+20230105/google_networkconnectivity1_alpha1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/networkconnectivity1_alpha1/src/api.rs b/gen/networkconnectivity1_alpha1/src/api.rs index 29662f4cf1..d12058cc14 100644 --- a/gen/networkconnectivity1_alpha1/src/api.rs +++ b/gen/networkconnectivity1_alpha1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> Networkconnectivity { Networkconnectivity { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://networkconnectivity.googleapis.com/".to_string(), _root_url: "https://networkconnectivity.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> Networkconnectivity { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/networkconnectivity1_alpha1/src/client.rs b/gen/networkconnectivity1_alpha1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/networkconnectivity1_alpha1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/networkconnectivity1_alpha1/src/lib.rs b/gen/networkconnectivity1_alpha1/src/lib.rs index 99d9121cbb..d751ac8133 100644 --- a/gen/networkconnectivity1_alpha1/src/lib.rs +++ b/gen/networkconnectivity1_alpha1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *networkconnectivity* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *networkconnectivity* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkconnectivity:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *networkconnectivity* *v1_alpha1* API can be found at the //! [official documentation site](https://cloud.google.com/network-connectivity/docs/reference/networkconnectivity/rest). diff --git a/gen/networkmanagement1-cli/Cargo.toml b/gen/networkmanagement1-cli/Cargo.toml index 246b9e08bc..4eb5c54d5c 100644 --- a/gen/networkmanagement1-cli/Cargo.toml +++ b/gen/networkmanagement1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-networkmanagement1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Network Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkmanagement1-cli" @@ -20,13 +20,13 @@ name = "networkmanagement1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-networkmanagement1] path = "../networkmanagement1" -version = "4.0.1+20220223" +version = "5.0.2+20230105" + diff --git a/gen/networkmanagement1-cli/README.md b/gen/networkmanagement1-cli/README.md index a68fa63614..dfa19d1bf3 100644 --- a/gen/networkmanagement1-cli/README.md +++ b/gen/networkmanagement1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Network Management* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *Network Management* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash networkmanagement1 [options] diff --git a/gen/networkmanagement1-cli/mkdocs.yml b/gen/networkmanagement1-cli/mkdocs.yml index 666a3aefa1..5c8123a7f3 100644 --- a/gen/networkmanagement1-cli/mkdocs.yml +++ b/gen/networkmanagement1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Network Management v4.0.1+20220223 +site_name: Network Management v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-networkmanagement1-cli site_description: A complete library to interact with Network Management (protocol v1) @@ -7,23 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/networkmanagemen docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-global-connectivity-tests-create.md', 'Projects', 'Locations Global Connectivity Tests Create'] -- ['projects_locations-global-connectivity-tests-delete.md', 'Projects', 'Locations Global Connectivity Tests Delete'] -- ['projects_locations-global-connectivity-tests-get.md', 'Projects', 'Locations Global Connectivity Tests Get'] -- ['projects_locations-global-connectivity-tests-get-iam-policy.md', 'Projects', 'Locations Global Connectivity Tests Get Iam Policy'] -- ['projects_locations-global-connectivity-tests-list.md', 'Projects', 'Locations Global Connectivity Tests List'] -- ['projects_locations-global-connectivity-tests-patch.md', 'Projects', 'Locations Global Connectivity Tests Patch'] -- ['projects_locations-global-connectivity-tests-rerun.md', 'Projects', 'Locations Global Connectivity Tests Rerun'] -- ['projects_locations-global-connectivity-tests-set-iam-policy.md', 'Projects', 'Locations Global Connectivity Tests Set Iam Policy'] -- ['projects_locations-global-connectivity-tests-test-iam-permissions.md', 'Projects', 'Locations Global Connectivity Tests Test Iam Permissions'] -- ['projects_locations-global-operations-cancel.md', 'Projects', 'Locations Global Operations Cancel'] -- ['projects_locations-global-operations-delete.md', 'Projects', 'Locations Global Operations Delete'] -- ['projects_locations-global-operations-get.md', 'Projects', 'Locations Global Operations Get'] -- ['projects_locations-global-operations-list.md', 'Projects', 'Locations Global Operations List'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Global Connectivity Tests Create': 'projects_locations-global-connectivity-tests-create.md' + - 'Locations Global Connectivity Tests Delete': 'projects_locations-global-connectivity-tests-delete.md' + - 'Locations Global Connectivity Tests Get': 'projects_locations-global-connectivity-tests-get.md' + - 'Locations Global Connectivity Tests Get Iam Policy': 'projects_locations-global-connectivity-tests-get-iam-policy.md' + - 'Locations Global Connectivity Tests List': 'projects_locations-global-connectivity-tests-list.md' + - 'Locations Global Connectivity Tests Patch': 'projects_locations-global-connectivity-tests-patch.md' + - 'Locations Global Connectivity Tests Rerun': 'projects_locations-global-connectivity-tests-rerun.md' + - 'Locations Global Connectivity Tests Set Iam Policy': 'projects_locations-global-connectivity-tests-set-iam-policy.md' + - 'Locations Global Connectivity Tests Test Iam Permissions': 'projects_locations-global-connectivity-tests-test-iam-permissions.md' + - 'Locations Global Operations Cancel': 'projects_locations-global-operations-cancel.md' + - 'Locations Global Operations Delete': 'projects_locations-global-operations-delete.md' + - 'Locations Global Operations Get': 'projects_locations-global-operations-get.md' + - 'Locations Global Operations List': 'projects_locations-global-operations-list.md' + - 'Locations List': 'projects_locations-list.md' theme: readthedocs diff --git a/gen/networkmanagement1-cli/src/client.rs b/gen/networkmanagement1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/networkmanagement1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/networkmanagement1-cli/src/main.rs b/gen/networkmanagement1-cli/src/main.rs index 86a80db2d3..18cc6edffa 100644 --- a/gen/networkmanagement1-cli/src/main.rs +++ b/gen/networkmanagement1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_networkmanagement1::{api, Error, oauth2}; +use google_networkmanagement1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -128,6 +127,9 @@ where match &temp_cursor.to_string()[..] { "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.app-engine-version.uri" => Some(("destination.appEngineVersion.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.cloud-function.uri" => Some(("destination.cloudFunction.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.cloud-run-revision.uri" => Some(("destination.cloudRunRevision.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.cloud-sql-instance" => Some(("destination.cloudSqlInstance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.gke-master-cluster" => Some(("destination.gkeMasterCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.instance" => Some(("destination.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -145,6 +147,9 @@ where "reachability-details.result" => Some(("reachabilityDetails.result", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "reachability-details.verify-time" => Some(("reachabilityDetails.verifyTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "related-projects" => Some(("relatedProjects", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "source.app-engine-version.uri" => Some(("source.appEngineVersion.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.cloud-function.uri" => Some(("source.cloudFunction.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.cloud-run-revision.uri" => Some(("source.cloudRunRevision.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source.cloud-sql-instance" => Some(("source.cloudSqlInstance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source.gke-master-cluster" => Some(("source.gkeMasterCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source.instance" => Some(("source.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -155,7 +160,7 @@ where "source.project-id" => Some(("source.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-sql-instance", "code", "create-time", "description", "destination", "display-name", "error", "gke-master-cluster", "instance", "ip-address", "labels", "message", "name", "network", "network-type", "port", "project-id", "protocol", "reachability-details", "related-projects", "result", "source", "update-time", "verify-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-version", "cloud-function", "cloud-run-revision", "cloud-sql-instance", "code", "create-time", "description", "destination", "display-name", "error", "gke-master-cluster", "instance", "ip-address", "labels", "message", "name", "network", "network-type", "port", "project-id", "protocol", "reachability-details", "related-projects", "result", "source", "update-time", "uri", "verify-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -330,7 +335,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -389,7 +394,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -469,6 +474,9 @@ where match &temp_cursor.to_string()[..] { "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.app-engine-version.uri" => Some(("destination.appEngineVersion.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.cloud-function.uri" => Some(("destination.cloudFunction.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "destination.cloud-run-revision.uri" => Some(("destination.cloudRunRevision.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.cloud-sql-instance" => Some(("destination.cloudSqlInstance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.gke-master-cluster" => Some(("destination.gkeMasterCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "destination.instance" => Some(("destination.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -486,6 +494,9 @@ where "reachability-details.result" => Some(("reachabilityDetails.result", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "reachability-details.verify-time" => Some(("reachabilityDetails.verifyTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "related-projects" => Some(("relatedProjects", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "source.app-engine-version.uri" => Some(("source.appEngineVersion.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.cloud-function.uri" => Some(("source.cloudFunction.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source.cloud-run-revision.uri" => Some(("source.cloudRunRevision.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source.cloud-sql-instance" => Some(("source.cloudSqlInstance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source.gke-master-cluster" => Some(("source.gkeMasterCluster", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source.instance" => Some(("source.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -496,7 +507,7 @@ where "source.project-id" => Some(("source.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["cloud-sql-instance", "code", "create-time", "description", "destination", "display-name", "error", "gke-master-cluster", "instance", "ip-address", "labels", "message", "name", "network", "network-type", "port", "project-id", "protocol", "reachability-details", "related-projects", "result", "source", "update-time", "verify-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["app-engine-version", "cloud-function", "cloud-run-revision", "cloud-sql-instance", "code", "create-time", "description", "destination", "display-name", "error", "gke-master-cluster", "instance", "ip-address", "labels", "message", "name", "network", "network-type", "port", "project-id", "protocol", "reachability-details", "related-projects", "result", "source", "update-time", "uri", "verify-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -511,7 +522,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1014,7 +1025,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1076,7 +1087,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1360,7 +1371,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1460,7 +1471,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1488,7 +1499,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1632,7 +1643,7 @@ async fn main() { let mut app = App::new("networkmanagement1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20230105") .about("The Network Management API provides a collection of network performance monitoring and diagnostic capabilities.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_networkmanagement1_cli") .arg(Arg::with_name("url") diff --git a/gen/networkmanagement1/Cargo.toml b/gen/networkmanagement1/Cargo.toml index 3f9a6dca86..31e1af8162 100644 --- a/gen/networkmanagement1/Cargo.toml +++ b/gen/networkmanagement1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-networkmanagement1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Network Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkmanagement1" homepage = "https://cloud.google.com/" -documentation = "https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-networkmanagement1/5.0.2+20230105" license = "MIT" keywords = ["networkmanagement", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/networkmanagement1/README.md b/gen/networkmanagement1/README.md index 69a3b1a8de..fd1ed3c835 100644 --- a/gen/networkmanagement1/README.md +++ b/gen/networkmanagement1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-networkmanagement1` library allows access to all features of the *Google Network Management* service. -This documentation was generated from *Network Management* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Network Management* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Network Management* *v1* API can be found at the [official documentation site](https://cloud.google.com/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/NetworkManagement) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/NetworkManagement) ... * projects - * [*locations get*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGetCall), [*locations global connectivity tests create*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestCreateCall), [*locations global connectivity tests delete*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestDeleteCall), [*locations global connectivity tests get*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestGetCall), [*locations global connectivity tests get iam policy*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestGetIamPolicyCall), [*locations global connectivity tests list*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestListCall), [*locations global connectivity tests patch*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestPatchCall), [*locations global connectivity tests rerun*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestRerunCall), [*locations global connectivity tests set iam policy*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestSetIamPolicyCall), [*locations global connectivity tests test iam permissions*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestTestIamPermissionCall), [*locations global operations cancel*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationCancelCall), [*locations global operations delete*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationDeleteCall), [*locations global operations get*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationGetCall), [*locations global operations list*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationListCall) and [*locations list*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/api::ProjectLocationListCall) + * [*locations get*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGetCall), [*locations global connectivity tests create*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestCreateCall), [*locations global connectivity tests delete*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestDeleteCall), [*locations global connectivity tests get*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestGetCall), [*locations global connectivity tests get iam policy*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestGetIamPolicyCall), [*locations global connectivity tests list*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestListCall), [*locations global connectivity tests patch*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestPatchCall), [*locations global connectivity tests rerun*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestRerunCall), [*locations global connectivity tests set iam policy*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestSetIamPolicyCall), [*locations global connectivity tests test iam permissions*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalConnectivityTestTestIamPermissionCall), [*locations global operations cancel*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationCancelCall), [*locations global operations delete*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationDeleteCall), [*locations global operations get*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationGetCall), [*locations global operations list*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationGlobalOperationListCall) and [*locations list*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/api::ProjectLocationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/NetworkManagement)** +* **[Hub](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/NetworkManagement)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::CallBuilder) -* **[Resources](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::CallBuilder) +* **[Resources](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Part)** + * **[Parts](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Delegate) to the -[Method Builder](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Delegate) to the +[Method Builder](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::RequestValue) and -[decodable](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::RequestValue) and +[decodable](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-networkmanagement1/5.0.2-beta-1+20230105/google_networkmanagement1/client::RequestValue) are moved +* [request values](https://docs.rs/google-networkmanagement1/5.0.2+20230105/google_networkmanagement1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/networkmanagement1/src/api.rs b/gen/networkmanagement1/src/api.rs index 8082d61860..e28ff2efeb 100644 --- a/gen/networkmanagement1/src/api.rs +++ b/gen/networkmanagement1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> NetworkManagement { NetworkManagement { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://networkmanagement.googleapis.com/".to_string(), _root_url: "https://networkmanagement.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> NetworkManagement { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/networkmanagement1/src/client.rs b/gen/networkmanagement1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/networkmanagement1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/networkmanagement1/src/lib.rs b/gen/networkmanagement1/src/lib.rs index b3158150d4..99931a99fb 100644 --- a/gen/networkmanagement1/src/lib.rs +++ b/gen/networkmanagement1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Network Management* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Network Management* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkmanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Network Management* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/). diff --git a/gen/networksecurity1-cli/Cargo.toml b/gen/networksecurity1-cli/Cargo.toml index 525f5c3571..627339d84a 100644 --- a/gen/networksecurity1-cli/Cargo.toml +++ b/gen/networksecurity1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-networksecurity1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with NetworkSecurity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networksecurity1-cli" @@ -20,13 +20,13 @@ name = "networksecurity1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-networksecurity1] path = "../networksecurity1" -version = "4.0.1+20220223" +version = "5.0.2+20230106" + diff --git a/gen/networksecurity1-cli/README.md b/gen/networksecurity1-cli/README.md index 10dfaabc89..a049662529 100644 --- a/gen/networksecurity1-cli/README.md +++ b/gen/networksecurity1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *NetworkSecurity* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *NetworkSecurity* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash networksecurity1 [options] diff --git a/gen/networksecurity1-cli/mkdocs.yml b/gen/networksecurity1-cli/mkdocs.yml index a8810d9a6d..8ff74a2d4a 100644 --- a/gen/networksecurity1-cli/mkdocs.yml +++ b/gen/networksecurity1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: NetworkSecurity v4.0.1+20220223 +site_name: NetworkSecurity v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-networksecurity1-cli site_description: A complete library to interact with NetworkSecurity (protocol v1) @@ -7,38 +7,39 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/networksecurity1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-authorization-policies-create.md', 'Projects', 'Locations Authorization Policies Create'] -- ['projects_locations-authorization-policies-delete.md', 'Projects', 'Locations Authorization Policies Delete'] -- ['projects_locations-authorization-policies-get.md', 'Projects', 'Locations Authorization Policies Get'] -- ['projects_locations-authorization-policies-get-iam-policy.md', 'Projects', 'Locations Authorization Policies Get Iam Policy'] -- ['projects_locations-authorization-policies-list.md', 'Projects', 'Locations Authorization Policies List'] -- ['projects_locations-authorization-policies-patch.md', 'Projects', 'Locations Authorization Policies Patch'] -- ['projects_locations-authorization-policies-set-iam-policy.md', 'Projects', 'Locations Authorization Policies Set Iam Policy'] -- ['projects_locations-authorization-policies-test-iam-permissions.md', 'Projects', 'Locations Authorization Policies Test Iam Permissions'] -- ['projects_locations-client-tls-policies-create.md', 'Projects', 'Locations Client Tls Policies Create'] -- ['projects_locations-client-tls-policies-delete.md', 'Projects', 'Locations Client Tls Policies Delete'] -- ['projects_locations-client-tls-policies-get.md', 'Projects', 'Locations Client Tls Policies Get'] -- ['projects_locations-client-tls-policies-get-iam-policy.md', 'Projects', 'Locations Client Tls Policies Get Iam Policy'] -- ['projects_locations-client-tls-policies-list.md', 'Projects', 'Locations Client Tls Policies List'] -- ['projects_locations-client-tls-policies-patch.md', 'Projects', 'Locations Client Tls Policies Patch'] -- ['projects_locations-client-tls-policies-set-iam-policy.md', 'Projects', 'Locations Client Tls Policies Set Iam Policy'] -- ['projects_locations-client-tls-policies-test-iam-permissions.md', 'Projects', 'Locations Client Tls Policies Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-server-tls-policies-create.md', 'Projects', 'Locations Server Tls Policies Create'] -- ['projects_locations-server-tls-policies-delete.md', 'Projects', 'Locations Server Tls Policies Delete'] -- ['projects_locations-server-tls-policies-get.md', 'Projects', 'Locations Server Tls Policies Get'] -- ['projects_locations-server-tls-policies-get-iam-policy.md', 'Projects', 'Locations Server Tls Policies Get Iam Policy'] -- ['projects_locations-server-tls-policies-list.md', 'Projects', 'Locations Server Tls Policies List'] -- ['projects_locations-server-tls-policies-patch.md', 'Projects', 'Locations Server Tls Policies Patch'] -- ['projects_locations-server-tls-policies-set-iam-policy.md', 'Projects', 'Locations Server Tls Policies Set Iam Policy'] -- ['projects_locations-server-tls-policies-test-iam-permissions.md', 'Projects', 'Locations Server Tls Policies Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Authorization Policies Create': 'projects_locations-authorization-policies-create.md' + - 'Locations Authorization Policies Delete': 'projects_locations-authorization-policies-delete.md' + - 'Locations Authorization Policies Get': 'projects_locations-authorization-policies-get.md' + - 'Locations Authorization Policies Get Iam Policy': 'projects_locations-authorization-policies-get-iam-policy.md' + - 'Locations Authorization Policies List': 'projects_locations-authorization-policies-list.md' + - 'Locations Authorization Policies Patch': 'projects_locations-authorization-policies-patch.md' + - 'Locations Authorization Policies Set Iam Policy': 'projects_locations-authorization-policies-set-iam-policy.md' + - 'Locations Authorization Policies Test Iam Permissions': 'projects_locations-authorization-policies-test-iam-permissions.md' + - 'Locations Client Tls Policies Create': 'projects_locations-client-tls-policies-create.md' + - 'Locations Client Tls Policies Delete': 'projects_locations-client-tls-policies-delete.md' + - 'Locations Client Tls Policies Get': 'projects_locations-client-tls-policies-get.md' + - 'Locations Client Tls Policies Get Iam Policy': 'projects_locations-client-tls-policies-get-iam-policy.md' + - 'Locations Client Tls Policies List': 'projects_locations-client-tls-policies-list.md' + - 'Locations Client Tls Policies Patch': 'projects_locations-client-tls-policies-patch.md' + - 'Locations Client Tls Policies Set Iam Policy': 'projects_locations-client-tls-policies-set-iam-policy.md' + - 'Locations Client Tls Policies Test Iam Permissions': 'projects_locations-client-tls-policies-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Server Tls Policies Create': 'projects_locations-server-tls-policies-create.md' + - 'Locations Server Tls Policies Delete': 'projects_locations-server-tls-policies-delete.md' + - 'Locations Server Tls Policies Get': 'projects_locations-server-tls-policies-get.md' + - 'Locations Server Tls Policies Get Iam Policy': 'projects_locations-server-tls-policies-get-iam-policy.md' + - 'Locations Server Tls Policies List': 'projects_locations-server-tls-policies-list.md' + - 'Locations Server Tls Policies Patch': 'projects_locations-server-tls-policies-patch.md' + - 'Locations Server Tls Policies Set Iam Policy': 'projects_locations-server-tls-policies-set-iam-policy.md' + - 'Locations Server Tls Policies Test Iam Permissions': 'projects_locations-server-tls-policies-test-iam-permissions.md' theme: readthedocs diff --git a/gen/networksecurity1-cli/src/client.rs b/gen/networksecurity1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/networksecurity1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/networksecurity1-cli/src/main.rs b/gen/networksecurity1-cli/src/main.rs index b60e2c3c7f..0dca336b50 100644 --- a/gen/networksecurity1-cli/src/main.rs +++ b/gen/networksecurity1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_networksecurity1::{api, Error, oauth2}; +use google_networksecurity1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -256,7 +255,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -315,7 +314,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -409,7 +408,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -837,7 +836,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -896,7 +895,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -992,7 +991,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1275,7 +1274,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1525,7 +1524,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1784,7 +1783,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1843,7 +1842,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1939,7 +1938,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2415,7 +2414,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2487,7 +2486,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2515,7 +2514,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2615,7 +2614,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2687,7 +2686,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2715,7 +2714,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2953,7 +2952,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3025,7 +3024,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3053,7 +3052,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3081,7 +3080,7 @@ async fn main() { let mut app = App::new("networksecurity1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20230106") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_networksecurity1_cli") .arg(Arg::with_name("url") diff --git a/gen/networksecurity1/Cargo.toml b/gen/networksecurity1/Cargo.toml index 0917d0b20a..bfa579d0f0 100644 --- a/gen/networksecurity1/Cargo.toml +++ b/gen/networksecurity1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-networksecurity1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with NetworkSecurity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networksecurity1" homepage = "https://cloud.google.com/networking" -documentation = "https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-networksecurity1/5.0.2+20230106" license = "MIT" keywords = ["networksecurity", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/networksecurity1/README.md b/gen/networksecurity1/README.md index 5908d5cd25..cd46d44eae 100644 --- a/gen/networksecurity1/README.md +++ b/gen/networksecurity1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-networksecurity1` library allows access to all features of the *Google NetworkSecurity* service. -This documentation was generated from *NetworkSecurity* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *networksecurity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *NetworkSecurity* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *networksecurity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *NetworkSecurity* *v1* API can be found at the [official documentation site](https://cloud.google.com/networking). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/NetworkSecurity) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/NetworkSecurity) ... * projects - * [*locations authorization policies create*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyCreateCall), [*locations authorization policies delete*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyDeleteCall), [*locations authorization policies get*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyGetCall), [*locations authorization policies get iam policy*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyGetIamPolicyCall), [*locations authorization policies list*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyListCall), [*locations authorization policies patch*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyPatchCall), [*locations authorization policies set iam policy*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicySetIamPolicyCall), [*locations authorization policies test iam permissions*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyTestIamPermissionCall), [*locations client tls policies create*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyCreateCall), [*locations client tls policies delete*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyDeleteCall), [*locations client tls policies get*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyGetCall), [*locations client tls policies get iam policy*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyGetIamPolicyCall), [*locations client tls policies list*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyListCall), [*locations client tls policies patch*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyPatchCall), [*locations client tls policies set iam policy*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicySetIamPolicyCall), [*locations client tls policies test iam permissions*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyTestIamPermissionCall), [*locations get*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationOperationListCall), [*locations server tls policies create*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyCreateCall), [*locations server tls policies delete*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyDeleteCall), [*locations server tls policies get*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyGetCall), [*locations server tls policies get iam policy*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyGetIamPolicyCall), [*locations server tls policies list*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyListCall), [*locations server tls policies patch*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyPatchCall), [*locations server tls policies set iam policy*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicySetIamPolicyCall) and [*locations server tls policies test iam permissions*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyTestIamPermissionCall) + * [*locations authorization policies create*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyCreateCall), [*locations authorization policies delete*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyDeleteCall), [*locations authorization policies get*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyGetCall), [*locations authorization policies get iam policy*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyGetIamPolicyCall), [*locations authorization policies list*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyListCall), [*locations authorization policies patch*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyPatchCall), [*locations authorization policies set iam policy*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicySetIamPolicyCall), [*locations authorization policies test iam permissions*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationAuthorizationPolicyTestIamPermissionCall), [*locations client tls policies create*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyCreateCall), [*locations client tls policies delete*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyDeleteCall), [*locations client tls policies get*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyGetCall), [*locations client tls policies get iam policy*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyGetIamPolicyCall), [*locations client tls policies list*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyListCall), [*locations client tls policies patch*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyPatchCall), [*locations client tls policies set iam policy*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicySetIamPolicyCall), [*locations client tls policies test iam permissions*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationClientTlsPolicyTestIamPermissionCall), [*locations get*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationOperationListCall), [*locations server tls policies create*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyCreateCall), [*locations server tls policies delete*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyDeleteCall), [*locations server tls policies get*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyGetCall), [*locations server tls policies get iam policy*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyGetIamPolicyCall), [*locations server tls policies list*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyListCall), [*locations server tls policies patch*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyPatchCall), [*locations server tls policies set iam policy*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicySetIamPolicyCall) and [*locations server tls policies test iam permissions*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/api::ProjectLocationServerTlsPolicyTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/NetworkSecurity)** +* **[Hub](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/NetworkSecurity)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::CallBuilder) -* **[Resources](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::CallBuilder) +* **[Resources](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Part)** + * **[Parts](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Delegate) to the -[Method Builder](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Delegate) to the +[Method Builder](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::RequestValue) and -[decodable](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::RequestValue) and +[decodable](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-networksecurity1/5.0.2-beta-1+20230106/google_networksecurity1/client::RequestValue) are moved +* [request values](https://docs.rs/google-networksecurity1/5.0.2+20230106/google_networksecurity1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/networksecurity1/src/api.rs b/gen/networksecurity1/src/api.rs index 7fa87c66d4..25e593cf9d 100644 --- a/gen/networksecurity1/src/api.rs +++ b/gen/networksecurity1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> NetworkSecurity { NetworkSecurity { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://networksecurity.googleapis.com/".to_string(), _root_url: "https://networksecurity.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> NetworkSecurity { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/networksecurity1/src/client.rs b/gen/networksecurity1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/networksecurity1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/networksecurity1/src/lib.rs b/gen/networksecurity1/src/lib.rs index a75dc9d401..458be7a28d 100644 --- a/gen/networksecurity1/src/lib.rs +++ b/gen/networksecurity1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *NetworkSecurity* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *networksecurity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *NetworkSecurity* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *networksecurity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *NetworkSecurity* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/networking). diff --git a/gen/networkservices1-cli/Cargo.toml b/gen/networkservices1-cli/Cargo.toml index fce84bda7b..352759e35e 100644 --- a/gen/networkservices1-cli/Cargo.toml +++ b/gen/networkservices1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-networkservices1-cli" -version = "4.0.1+20220222" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with NetworkServices (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkservices1-cli" @@ -20,13 +20,13 @@ name = "networkservices1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-networkservices1] path = "../networkservices1" -version = "4.0.1+20220222" +version = "5.0.2+20230105" + diff --git a/gen/networkservices1-cli/README.md b/gen/networkservices1-cli/README.md index 81e378f9f9..255f5ac4ef 100644 --- a/gen/networkservices1-cli/README.md +++ b/gen/networkservices1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *NetworkServices* API at revision *20220222*. The CLI is at version *4.0.1*. +This documentation was generated from the *NetworkServices* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash networkservices1 [options] @@ -47,12 +47,55 @@ networkservices1 [options] locations-endpoint-policies-patch (-r )... [-p ]... [-o ] locations-endpoint-policies-set-iam-policy (-r )... [-p ]... [-o ] locations-endpoint-policies-test-iam-permissions (-r )... [-p ]... [-o ] + locations-gateways-create (-r )... [-p ]... [-o ] + locations-gateways-delete [-p ]... [-o ] + locations-gateways-get [-p ]... [-o ] + locations-gateways-get-iam-policy [-p ]... [-o ] + locations-gateways-list [-p ]... [-o ] + locations-gateways-patch (-r )... [-p ]... [-o ] + locations-gateways-set-iam-policy (-r )... [-p ]... [-o ] + locations-gateways-test-iam-permissions (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] + locations-grpc-routes-create (-r )... [-p ]... [-o ] + locations-grpc-routes-delete [-p ]... [-o ] + locations-grpc-routes-get [-p ]... [-o ] + locations-grpc-routes-list [-p ]... [-o ] + locations-grpc-routes-patch (-r )... [-p ]... [-o ] + locations-http-routes-create (-r )... [-p ]... [-o ] + locations-http-routes-delete [-p ]... [-o ] + locations-http-routes-get [-p ]... [-o ] + locations-http-routes-list [-p ]... [-o ] + locations-http-routes-patch (-r )... [-p ]... [-o ] locations-list [-p ]... [-o ] + locations-meshes-create (-r )... [-p ]... [-o ] + locations-meshes-delete [-p ]... [-o ] + locations-meshes-get [-p ]... [-o ] + locations-meshes-get-iam-policy [-p ]... [-o ] + locations-meshes-list [-p ]... [-o ] + locations-meshes-patch (-r )... [-p ]... [-o ] + locations-meshes-set-iam-policy (-r )... [-p ]... [-o ] + locations-meshes-test-iam-permissions (-r )... [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] + locations-service-bindings-create (-r )... [-p ]... [-o ] + locations-service-bindings-delete [-p ]... [-o ] + locations-service-bindings-get [-p ]... [-o ] + locations-service-bindings-get-iam-policy [-p ]... [-o ] + locations-service-bindings-list [-p ]... [-o ] + locations-service-bindings-set-iam-policy (-r )... [-p ]... [-o ] + locations-service-bindings-test-iam-permissions (-r )... [-p ]... [-o ] + locations-tcp-routes-create (-r )... [-p ]... [-o ] + locations-tcp-routes-delete [-p ]... [-o ] + locations-tcp-routes-get [-p ]... [-o ] + locations-tcp-routes-list [-p ]... [-o ] + locations-tcp-routes-patch (-r )... [-p ]... [-o ] + locations-tls-routes-create (-r )... [-p ]... [-o ] + locations-tls-routes-delete [-p ]... [-o ] + locations-tls-routes-get [-p ]... [-o ] + locations-tls-routes-list [-p ]... [-o ] + locations-tls-routes-patch (-r )... [-p ]... [-o ] networkservices1 --help Configuration: diff --git a/gen/networkservices1-cli/mkdocs.yml b/gen/networkservices1-cli/mkdocs.yml index 782f52a551..ed594ea24c 100644 --- a/gen/networkservices1-cli/mkdocs.yml +++ b/gen/networkservices1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: NetworkServices v4.0.1+20220222 +site_name: NetworkServices v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-networkservices1-cli site_description: A complete library to interact with NetworkServices (protocol v1) @@ -7,31 +7,75 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/networkservices1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-edge-cache-keysets-get-iam-policy.md', 'Projects', 'Locations Edge Cache Keysets Get Iam Policy'] -- ['projects_locations-edge-cache-keysets-set-iam-policy.md', 'Projects', 'Locations Edge Cache Keysets Set Iam Policy'] -- ['projects_locations-edge-cache-keysets-test-iam-permissions.md', 'Projects', 'Locations Edge Cache Keysets Test Iam Permissions'] -- ['projects_locations-edge-cache-origins-get-iam-policy.md', 'Projects', 'Locations Edge Cache Origins Get Iam Policy'] -- ['projects_locations-edge-cache-origins-set-iam-policy.md', 'Projects', 'Locations Edge Cache Origins Set Iam Policy'] -- ['projects_locations-edge-cache-origins-test-iam-permissions.md', 'Projects', 'Locations Edge Cache Origins Test Iam Permissions'] -- ['projects_locations-edge-cache-services-get-iam-policy.md', 'Projects', 'Locations Edge Cache Services Get Iam Policy'] -- ['projects_locations-edge-cache-services-set-iam-policy.md', 'Projects', 'Locations Edge Cache Services Set Iam Policy'] -- ['projects_locations-edge-cache-services-test-iam-permissions.md', 'Projects', 'Locations Edge Cache Services Test Iam Permissions'] -- ['projects_locations-endpoint-policies-create.md', 'Projects', 'Locations Endpoint Policies Create'] -- ['projects_locations-endpoint-policies-delete.md', 'Projects', 'Locations Endpoint Policies Delete'] -- ['projects_locations-endpoint-policies-get.md', 'Projects', 'Locations Endpoint Policies Get'] -- ['projects_locations-endpoint-policies-get-iam-policy.md', 'Projects', 'Locations Endpoint Policies Get Iam Policy'] -- ['projects_locations-endpoint-policies-list.md', 'Projects', 'Locations Endpoint Policies List'] -- ['projects_locations-endpoint-policies-patch.md', 'Projects', 'Locations Endpoint Policies Patch'] -- ['projects_locations-endpoint-policies-set-iam-policy.md', 'Projects', 'Locations Endpoint Policies Set Iam Policy'] -- ['projects_locations-endpoint-policies-test-iam-permissions.md', 'Projects', 'Locations Endpoint Policies Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Edge Cache Keysets Get Iam Policy': 'projects_locations-edge-cache-keysets-get-iam-policy.md' + - 'Locations Edge Cache Keysets Set Iam Policy': 'projects_locations-edge-cache-keysets-set-iam-policy.md' + - 'Locations Edge Cache Keysets Test Iam Permissions': 'projects_locations-edge-cache-keysets-test-iam-permissions.md' + - 'Locations Edge Cache Origins Get Iam Policy': 'projects_locations-edge-cache-origins-get-iam-policy.md' + - 'Locations Edge Cache Origins Set Iam Policy': 'projects_locations-edge-cache-origins-set-iam-policy.md' + - 'Locations Edge Cache Origins Test Iam Permissions': 'projects_locations-edge-cache-origins-test-iam-permissions.md' + - 'Locations Edge Cache Services Get Iam Policy': 'projects_locations-edge-cache-services-get-iam-policy.md' + - 'Locations Edge Cache Services Set Iam Policy': 'projects_locations-edge-cache-services-set-iam-policy.md' + - 'Locations Edge Cache Services Test Iam Permissions': 'projects_locations-edge-cache-services-test-iam-permissions.md' + - 'Locations Endpoint Policies Create': 'projects_locations-endpoint-policies-create.md' + - 'Locations Endpoint Policies Delete': 'projects_locations-endpoint-policies-delete.md' + - 'Locations Endpoint Policies Get': 'projects_locations-endpoint-policies-get.md' + - 'Locations Endpoint Policies Get Iam Policy': 'projects_locations-endpoint-policies-get-iam-policy.md' + - 'Locations Endpoint Policies List': 'projects_locations-endpoint-policies-list.md' + - 'Locations Endpoint Policies Patch': 'projects_locations-endpoint-policies-patch.md' + - 'Locations Endpoint Policies Set Iam Policy': 'projects_locations-endpoint-policies-set-iam-policy.md' + - 'Locations Endpoint Policies Test Iam Permissions': 'projects_locations-endpoint-policies-test-iam-permissions.md' + - 'Locations Gateways Create': 'projects_locations-gateways-create.md' + - 'Locations Gateways Delete': 'projects_locations-gateways-delete.md' + - 'Locations Gateways Get': 'projects_locations-gateways-get.md' + - 'Locations Gateways Get Iam Policy': 'projects_locations-gateways-get-iam-policy.md' + - 'Locations Gateways List': 'projects_locations-gateways-list.md' + - 'Locations Gateways Patch': 'projects_locations-gateways-patch.md' + - 'Locations Gateways Set Iam Policy': 'projects_locations-gateways-set-iam-policy.md' + - 'Locations Gateways Test Iam Permissions': 'projects_locations-gateways-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Grpc Routes Create': 'projects_locations-grpc-routes-create.md' + - 'Locations Grpc Routes Delete': 'projects_locations-grpc-routes-delete.md' + - 'Locations Grpc Routes Get': 'projects_locations-grpc-routes-get.md' + - 'Locations Grpc Routes List': 'projects_locations-grpc-routes-list.md' + - 'Locations Grpc Routes Patch': 'projects_locations-grpc-routes-patch.md' + - 'Locations Http Routes Create': 'projects_locations-http-routes-create.md' + - 'Locations Http Routes Delete': 'projects_locations-http-routes-delete.md' + - 'Locations Http Routes Get': 'projects_locations-http-routes-get.md' + - 'Locations Http Routes List': 'projects_locations-http-routes-list.md' + - 'Locations Http Routes Patch': 'projects_locations-http-routes-patch.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Meshes Create': 'projects_locations-meshes-create.md' + - 'Locations Meshes Delete': 'projects_locations-meshes-delete.md' + - 'Locations Meshes Get': 'projects_locations-meshes-get.md' + - 'Locations Meshes Get Iam Policy': 'projects_locations-meshes-get-iam-policy.md' + - 'Locations Meshes List': 'projects_locations-meshes-list.md' + - 'Locations Meshes Patch': 'projects_locations-meshes-patch.md' + - 'Locations Meshes Set Iam Policy': 'projects_locations-meshes-set-iam-policy.md' + - 'Locations Meshes Test Iam Permissions': 'projects_locations-meshes-test-iam-permissions.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Service Bindings Create': 'projects_locations-service-bindings-create.md' + - 'Locations Service Bindings Delete': 'projects_locations-service-bindings-delete.md' + - 'Locations Service Bindings Get': 'projects_locations-service-bindings-get.md' + - 'Locations Service Bindings Get Iam Policy': 'projects_locations-service-bindings-get-iam-policy.md' + - 'Locations Service Bindings List': 'projects_locations-service-bindings-list.md' + - 'Locations Service Bindings Set Iam Policy': 'projects_locations-service-bindings-set-iam-policy.md' + - 'Locations Service Bindings Test Iam Permissions': 'projects_locations-service-bindings-test-iam-permissions.md' + - 'Locations Tcp Routes Create': 'projects_locations-tcp-routes-create.md' + - 'Locations Tcp Routes Delete': 'projects_locations-tcp-routes-delete.md' + - 'Locations Tcp Routes Get': 'projects_locations-tcp-routes-get.md' + - 'Locations Tcp Routes List': 'projects_locations-tcp-routes-list.md' + - 'Locations Tcp Routes Patch': 'projects_locations-tcp-routes-patch.md' + - 'Locations Tls Routes Create': 'projects_locations-tls-routes-create.md' + - 'Locations Tls Routes Delete': 'projects_locations-tls-routes-delete.md' + - 'Locations Tls Routes Get': 'projects_locations-tls-routes-get.md' + - 'Locations Tls Routes List': 'projects_locations-tls-routes-list.md' + - 'Locations Tls Routes Patch': 'projects_locations-tls-routes-patch.md' theme: readthedocs diff --git a/gen/networkservices1-cli/src/client.rs b/gen/networkservices1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/networkservices1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/networkservices1-cli/src/main.rs b/gen/networkservices1-cli/src/main.rs index f26ade1399..bc5210e9cf 100644 --- a/gen/networkservices1-cli/src/main.rs +++ b/gen/networkservices1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_networkservices1::{api, Error, oauth2}; +use google_networkservices1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -286,7 +285,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -514,7 +513,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -945,7 +944,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1004,7 +1003,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1103,7 +1102,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1324,6 +1323,593 @@ where } } + async fn _projects_locations_gateways_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ports" => Some(("ports", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Vec })), + "scope" => Some(("scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "server-tls-policy" => Some(("serverTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "labels", "name", "ports", "scope", "self-link", "server-tls-policy", "type", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Gateway = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_gateways_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "gateway-id" => { + call = call.gateway_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["gateway-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_gateways_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_gateways_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_gateways_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_gateways_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_gateways_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_gateways_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_gateways_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_gateways_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_gateways_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "ports" => Some(("ports", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Vec })), + "scope" => Some(("scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "server-tls-policy" => Some(("serverTlsPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "labels", "name", "ports", "scope", "self-link", "server-tls-policy", "type", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Gateway = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_gateways_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_gateways_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_gateways_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_gateways_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_gateways_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_get(opt.value_of("name").unwrap_or("")); @@ -1376,6 +1962,720 @@ where } } + async fn _projects_locations_grpc_routes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hostnames" => Some(("hostnames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "hostnames", "labels", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GrpcRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_grpc_routes_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "grpc-route-id" => { + call = call.grpc_route_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["grpc-route-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_grpc_routes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_grpc_routes_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_grpc_routes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_grpc_routes_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_grpc_routes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_grpc_routes_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_grpc_routes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hostnames" => Some(("hostnames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "hostnames", "labels", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GrpcRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_grpc_routes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_http_routes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hostnames" => Some(("hostnames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "hostnames", "labels", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::HttpRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_http_routes_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "http-route-id" => { + call = call.http_route_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["http-route-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_http_routes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_http_routes_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_http_routes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_http_routes_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_http_routes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_http_routes_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_http_routes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "hostnames" => Some(("hostnames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "hostnames", "labels", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::HttpRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_http_routes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or("")); @@ -1386,7 +2686,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1438,6 +2738,587 @@ where } } + async fn _projects_locations_meshes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "interception-port" => Some(("interceptionPort", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "interception-port", "labels", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Mesh = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_meshes_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "mesh-id" => { + call = call.mesh_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["mesh-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_meshes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_meshes_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_meshes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_meshes_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_meshes_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_meshes_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_meshes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_meshes_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_meshes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "interception-port" => Some(("interceptionPort", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "interception-port", "labels", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Mesh = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_meshes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_meshes_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_meshes_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_meshes_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_meshes_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1636,7 +3517,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1688,6 +3569,1199 @@ where } } + async fn _projects_locations_service_bindings_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service" => Some(("service", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "labels", "name", "service", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ServiceBinding = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_bindings_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "service-binding-id" => { + call = call.service_binding_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["service-binding-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_bindings_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_service_bindings_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_bindings_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_service_bindings_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_bindings_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_service_bindings_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_bindings_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_service_bindings_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_bindings_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_bindings_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_service_bindings_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_service_bindings_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tcp_routes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "labels", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TcpRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_tcp_routes_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "tcp-route-id" => { + call = call.tcp_route_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["tcp-route-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tcp_routes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_tcp_routes_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tcp_routes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_tcp_routes_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tcp_routes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_tcp_routes_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tcp_routes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "labels", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TcpRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_tcp_routes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tls_routes_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TlsRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_tls_routes_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "tls-route-id" => { + call = call.tls_route_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["tls-route-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tls_routes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_tls_routes_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tls_routes_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_tls_routes_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tls_routes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_tls_routes_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_tls_routes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gateways" => Some(("gateways", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "meshes" => Some(("meshes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "gateways", "meshes", "name", "self-link", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TlsRoute = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_tls_routes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -1746,12 +4820,90 @@ where ("locations-endpoint-policies-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_endpoint_policies_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-gateways-create", Some(opt)) => { + call_result = self._projects_locations_gateways_create(opt, dry_run, &mut err).await; + }, + ("locations-gateways-delete", Some(opt)) => { + call_result = self._projects_locations_gateways_delete(opt, dry_run, &mut err).await; + }, + ("locations-gateways-get", Some(opt)) => { + call_result = self._projects_locations_gateways_get(opt, dry_run, &mut err).await; + }, + ("locations-gateways-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_gateways_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-gateways-list", Some(opt)) => { + call_result = self._projects_locations_gateways_list(opt, dry_run, &mut err).await; + }, + ("locations-gateways-patch", Some(opt)) => { + call_result = self._projects_locations_gateways_patch(opt, dry_run, &mut err).await; + }, + ("locations-gateways-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_gateways_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-gateways-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_gateways_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-get", Some(opt)) => { call_result = self._projects_locations_get(opt, dry_run, &mut err).await; }, + ("locations-grpc-routes-create", Some(opt)) => { + call_result = self._projects_locations_grpc_routes_create(opt, dry_run, &mut err).await; + }, + ("locations-grpc-routes-delete", Some(opt)) => { + call_result = self._projects_locations_grpc_routes_delete(opt, dry_run, &mut err).await; + }, + ("locations-grpc-routes-get", Some(opt)) => { + call_result = self._projects_locations_grpc_routes_get(opt, dry_run, &mut err).await; + }, + ("locations-grpc-routes-list", Some(opt)) => { + call_result = self._projects_locations_grpc_routes_list(opt, dry_run, &mut err).await; + }, + ("locations-grpc-routes-patch", Some(opt)) => { + call_result = self._projects_locations_grpc_routes_patch(opt, dry_run, &mut err).await; + }, + ("locations-http-routes-create", Some(opt)) => { + call_result = self._projects_locations_http_routes_create(opt, dry_run, &mut err).await; + }, + ("locations-http-routes-delete", Some(opt)) => { + call_result = self._projects_locations_http_routes_delete(opt, dry_run, &mut err).await; + }, + ("locations-http-routes-get", Some(opt)) => { + call_result = self._projects_locations_http_routes_get(opt, dry_run, &mut err).await; + }, + ("locations-http-routes-list", Some(opt)) => { + call_result = self._projects_locations_http_routes_list(opt, dry_run, &mut err).await; + }, + ("locations-http-routes-patch", Some(opt)) => { + call_result = self._projects_locations_http_routes_patch(opt, dry_run, &mut err).await; + }, ("locations-list", Some(opt)) => { call_result = self._projects_locations_list(opt, dry_run, &mut err).await; }, + ("locations-meshes-create", Some(opt)) => { + call_result = self._projects_locations_meshes_create(opt, dry_run, &mut err).await; + }, + ("locations-meshes-delete", Some(opt)) => { + call_result = self._projects_locations_meshes_delete(opt, dry_run, &mut err).await; + }, + ("locations-meshes-get", Some(opt)) => { + call_result = self._projects_locations_meshes_get(opt, dry_run, &mut err).await; + }, + ("locations-meshes-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_meshes_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-meshes-list", Some(opt)) => { + call_result = self._projects_locations_meshes_list(opt, dry_run, &mut err).await; + }, + ("locations-meshes-patch", Some(opt)) => { + call_result = self._projects_locations_meshes_patch(opt, dry_run, &mut err).await; + }, + ("locations-meshes-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_meshes_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-meshes-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_meshes_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-operations-cancel", Some(opt)) => { call_result = self._projects_locations_operations_cancel(opt, dry_run, &mut err).await; }, @@ -1764,6 +4916,57 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, + ("locations-service-bindings-create", Some(opt)) => { + call_result = self._projects_locations_service_bindings_create(opt, dry_run, &mut err).await; + }, + ("locations-service-bindings-delete", Some(opt)) => { + call_result = self._projects_locations_service_bindings_delete(opt, dry_run, &mut err).await; + }, + ("locations-service-bindings-get", Some(opt)) => { + call_result = self._projects_locations_service_bindings_get(opt, dry_run, &mut err).await; + }, + ("locations-service-bindings-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_bindings_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-bindings-list", Some(opt)) => { + call_result = self._projects_locations_service_bindings_list(opt, dry_run, &mut err).await; + }, + ("locations-service-bindings-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_service_bindings_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-service-bindings-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_service_bindings_test_iam_permissions(opt, dry_run, &mut err).await; + }, + ("locations-tcp-routes-create", Some(opt)) => { + call_result = self._projects_locations_tcp_routes_create(opt, dry_run, &mut err).await; + }, + ("locations-tcp-routes-delete", Some(opt)) => { + call_result = self._projects_locations_tcp_routes_delete(opt, dry_run, &mut err).await; + }, + ("locations-tcp-routes-get", Some(opt)) => { + call_result = self._projects_locations_tcp_routes_get(opt, dry_run, &mut err).await; + }, + ("locations-tcp-routes-list", Some(opt)) => { + call_result = self._projects_locations_tcp_routes_list(opt, dry_run, &mut err).await; + }, + ("locations-tcp-routes-patch", Some(opt)) => { + call_result = self._projects_locations_tcp_routes_patch(opt, dry_run, &mut err).await; + }, + ("locations-tls-routes-create", Some(opt)) => { + call_result = self._projects_locations_tls_routes_create(opt, dry_run, &mut err).await; + }, + ("locations-tls-routes-delete", Some(opt)) => { + call_result = self._projects_locations_tls_routes_delete(opt, dry_run, &mut err).await; + }, + ("locations-tls-routes-get", Some(opt)) => { + call_result = self._projects_locations_tls_routes_get(opt, dry_run, &mut err).await; + }, + ("locations-tls-routes-list", Some(opt)) => { + call_result = self._projects_locations_tls_routes_list(opt, dry_run, &mut err).await; + }, + ("locations-tls-routes-patch", Some(opt)) => { + call_result = self._projects_locations_tls_routes_patch(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -1843,14 +5046,14 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-edge-cache-keysets-get-iam-policy', 'locations-edge-cache-keysets-set-iam-policy', 'locations-edge-cache-keysets-test-iam-permissions', 'locations-edge-cache-origins-get-iam-policy', 'locations-edge-cache-origins-set-iam-policy', 'locations-edge-cache-origins-test-iam-permissions', 'locations-edge-cache-services-get-iam-policy', 'locations-edge-cache-services-set-iam-policy', 'locations-edge-cache-services-test-iam-permissions', 'locations-endpoint-policies-create', 'locations-endpoint-policies-delete', 'locations-endpoint-policies-get', 'locations-endpoint-policies-get-iam-policy', 'locations-endpoint-policies-list', 'locations-endpoint-policies-patch', 'locations-endpoint-policies-set-iam-policy', 'locations-endpoint-policies-test-iam-permissions', 'locations-get', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get' and 'locations-operations-list'", vec![ + ("projects", "methods: 'locations-edge-cache-keysets-get-iam-policy', 'locations-edge-cache-keysets-set-iam-policy', 'locations-edge-cache-keysets-test-iam-permissions', 'locations-edge-cache-origins-get-iam-policy', 'locations-edge-cache-origins-set-iam-policy', 'locations-edge-cache-origins-test-iam-permissions', 'locations-edge-cache-services-get-iam-policy', 'locations-edge-cache-services-set-iam-policy', 'locations-edge-cache-services-test-iam-permissions', 'locations-endpoint-policies-create', 'locations-endpoint-policies-delete', 'locations-endpoint-policies-get', 'locations-endpoint-policies-get-iam-policy', 'locations-endpoint-policies-list', 'locations-endpoint-policies-patch', 'locations-endpoint-policies-set-iam-policy', 'locations-endpoint-policies-test-iam-permissions', 'locations-gateways-create', 'locations-gateways-delete', 'locations-gateways-get', 'locations-gateways-get-iam-policy', 'locations-gateways-list', 'locations-gateways-patch', 'locations-gateways-set-iam-policy', 'locations-gateways-test-iam-permissions', 'locations-get', 'locations-grpc-routes-create', 'locations-grpc-routes-delete', 'locations-grpc-routes-get', 'locations-grpc-routes-list', 'locations-grpc-routes-patch', 'locations-http-routes-create', 'locations-http-routes-delete', 'locations-http-routes-get', 'locations-http-routes-list', 'locations-http-routes-patch', 'locations-list', 'locations-meshes-create', 'locations-meshes-delete', 'locations-meshes-get', 'locations-meshes-get-iam-policy', 'locations-meshes-list', 'locations-meshes-patch', 'locations-meshes-set-iam-policy', 'locations-meshes-test-iam-permissions', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-service-bindings-create', 'locations-service-bindings-delete', 'locations-service-bindings-get', 'locations-service-bindings-get-iam-policy', 'locations-service-bindings-list', 'locations-service-bindings-set-iam-policy', 'locations-service-bindings-test-iam-permissions', 'locations-tcp-routes-create', 'locations-tcp-routes-delete', 'locations-tcp-routes-get', 'locations-tcp-routes-list', 'locations-tcp-routes-patch', 'locations-tls-routes-create', 'locations-tls-routes-delete', 'locations-tls-routes-get', 'locations-tls-routes-list' and 'locations-tls-routes-patch'", vec![ ("locations-edge-cache-keysets-get-iam-policy", Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-edge-cache-keysets-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1872,7 +5075,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1900,7 +5103,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1928,7 +5131,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1950,7 +5153,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1978,7 +5181,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2006,7 +5209,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2028,7 +5231,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2056,7 +5259,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2156,7 +5359,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2228,7 +5431,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2256,7 +5459,207 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-create", + Some(r##"Creates a new Gateway in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the Gateway. Must be in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-delete", + Some(r##"Deletes a single Gateway."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the Gateway to delete. Must be in the format `projects/*/locations/*/gateways/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-get", + Some(r##"Gets details of a single Gateway."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the Gateway to get. Must be in the format `projects/*/locations/*/gateways/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-list", + Some(r##"Lists Gateways in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the Gateways should be listed, specified in the format `projects/*/locations/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-patch", + Some(r##"Updates the parameters of a single Gateway."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the Gateway resource. It matches pattern `projects/*/locations/*/gateways/`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-gateways-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-gateways-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2294,6 +5697,250 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-grpc-routes-create", + Some(r##"Creates a new GrpcRoute in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-grpc-routes-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the GrpcRoute. Must be in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-grpc-routes-delete", + Some(r##"Deletes a single GrpcRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-grpc-routes-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the GrpcRoute to delete. Must be in the format `projects/*/locations/global/grpcRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-grpc-routes-get", + Some(r##"Gets details of a single GrpcRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-grpc-routes-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the GrpcRoute to get. Must be in the format `projects/*/locations/global/grpcRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-grpc-routes-list", + Some(r##"Lists GrpcRoutes in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-grpc-routes-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the GrpcRoutes should be listed, specified in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-grpc-routes-patch", + Some(r##"Updates the parameters of a single GrpcRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-grpc-routes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the GrpcRoute resource. It matches pattern `projects/*/locations/global/grpcRoutes/`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-http-routes-create", + Some(r##"Creates a new HttpRoute in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-http-routes-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the HttpRoute. Must be in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-http-routes-delete", + Some(r##"Deletes a single HttpRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-http-routes-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the HttpRoute to delete. Must be in the format `projects/*/locations/global/httpRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-http-routes-get", + Some(r##"Gets details of a single HttpRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-http-routes-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the HttpRoute to get. Must be in the format `projects/*/locations/global/httpRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-http-routes-list", + Some(r##"Lists HttpRoute in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-http-routes-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the HttpRoutes should be listed, specified in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-http-routes-patch", + Some(r##"Updates the parameters of a single HttpRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-http-routes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the HttpRoute resource. It matches pattern `projects/*/locations/global/httpRoutes/http_route_name>`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2316,6 +5963,206 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-create", + Some(r##"Creates a new Mesh in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the Mesh. Must be in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-delete", + Some(r##"Deletes a single Mesh."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the Mesh to delete. Must be in the format `projects/*/locations/global/meshes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-get", + Some(r##"Gets details of a single Mesh."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the Mesh to get. Must be in the format `projects/*/locations/global/meshes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-list", + Some(r##"Lists Meshes in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the Meshes should be listed, specified in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-patch", + Some(r##"Updates the parameters of a single Mesh."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the Mesh resource. It matches pattern `projects/*/locations/global/meshes/`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-meshes-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-meshes-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2410,6 +6257,422 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-bindings-create", + Some(r##"Creates a new ServiceBinding in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-service-bindings-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the ServiceBinding. Must be in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-bindings-delete", + Some(r##"Deletes a single ServiceBinding."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-service-bindings-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the ServiceBinding to delete. Must be in the format `projects/*/locations/global/serviceBindings/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-bindings-get", + Some(r##"Gets details of a single ServiceBinding."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-service-bindings-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the ServiceBinding to get. Must be in the format `projects/*/locations/global/serviceBindings/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-bindings-get-iam-policy", + Some(r##"Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-service-bindings-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-bindings-list", + Some(r##"Lists ServiceBinding in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-service-bindings-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the ServiceBindings should be listed, specified in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-bindings-set-iam-policy", + Some(r##"Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-service-bindings-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-service-bindings-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-service-bindings-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tcp-routes-create", + Some(r##"Creates a new TcpRoute in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tcp-routes-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the TcpRoute. Must be in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tcp-routes-delete", + Some(r##"Deletes a single TcpRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tcp-routes-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the TcpRoute to delete. Must be in the format `projects/*/locations/global/tcpRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tcp-routes-get", + Some(r##"Gets details of a single TcpRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tcp-routes-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the TcpRoute to get. Must be in the format `projects/*/locations/global/tcpRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tcp-routes-list", + Some(r##"Lists TcpRoute in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tcp-routes-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the TcpRoutes should be listed, specified in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tcp-routes-patch", + Some(r##"Updates the parameters of a single TcpRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tcp-routes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the TcpRoute resource. It matches pattern `projects/*/locations/global/tcpRoutes/tcp_route_name>`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tls-routes-create", + Some(r##"Creates a new TlsRoute in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tls-routes-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent resource of the TlsRoute. Must be in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tls-routes-delete", + Some(r##"Deletes a single TlsRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tls-routes-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the TlsRoute to delete. Must be in the format `projects/*/locations/global/tlsRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tls-routes-get", + Some(r##"Gets details of a single TlsRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tls-routes-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. A name of the TlsRoute to get. Must be in the format `projects/*/locations/global/tlsRoutes/*`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tls-routes-list", + Some(r##"Lists TlsRoute in a given project and location."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tls-routes-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project and location from which the TlsRoutes should be listed, specified in the format `projects/*/locations/global`."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-tls-routes-patch", + Some(r##"Updates the parameters of a single TlsRoute."##), + "Details at http://byron.github.io/google-apis-rs/google_networkservices1_cli/projects_locations-tls-routes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the TlsRoute resource. It matches pattern `projects/*/locations/global/tlsRoutes/tls_route_name>`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2422,7 +6685,7 @@ async fn main() { let mut app = App::new("networkservices1") .author("Sebastian Thiel ") - .version("4.0.1+20220222") + .version("5.0.2+20230105") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_networkservices1_cli") .arg(Arg::with_name("url") diff --git a/gen/networkservices1/Cargo.toml b/gen/networkservices1/Cargo.toml index f94310ecf2..fa32e14927 100644 --- a/gen/networkservices1/Cargo.toml +++ b/gen/networkservices1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-networkservices1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with NetworkServices (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/networkservices1" homepage = "https://cloud.google.com/networking" -documentation = "https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-networkservices1/5.0.2+20230105" license = "MIT" keywords = ["networkservices", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/networkservices1/README.md b/gen/networkservices1/README.md index f8e734cd29..9a84a993a6 100644 --- a/gen/networkservices1/README.md +++ b/gen/networkservices1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-networkservices1` library allows access to all features of the *Google NetworkServices* service. -This documentation was generated from *NetworkServices* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *NetworkServices* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *NetworkServices* *v1* API can be found at the [official documentation site](https://cloud.google.com/networking). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/NetworkServices) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/NetworkServices) ... * projects - * [*locations edge cache keysets get iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheKeysetGetIamPolicyCall), [*locations edge cache keysets set iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheKeysetSetIamPolicyCall), [*locations edge cache keysets test iam permissions*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheKeysetTestIamPermissionCall), [*locations edge cache origins get iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheOriginGetIamPolicyCall), [*locations edge cache origins set iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheOriginSetIamPolicyCall), [*locations edge cache origins test iam permissions*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheOriginTestIamPermissionCall), [*locations edge cache services get iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheServiceGetIamPolicyCall), [*locations edge cache services set iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheServiceSetIamPolicyCall), [*locations edge cache services test iam permissions*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEdgeCacheServiceTestIamPermissionCall), [*locations endpoint policies create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyCreateCall), [*locations endpoint policies delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyDeleteCall), [*locations endpoint policies get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyGetCall), [*locations endpoint policies get iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyGetIamPolicyCall), [*locations endpoint policies list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyListCall), [*locations endpoint policies patch*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyPatchCall), [*locations endpoint policies set iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicySetIamPolicyCall), [*locations endpoint policies test iam permissions*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyTestIamPermissionCall), [*locations gateways create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewayCreateCall), [*locations gateways delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewayDeleteCall), [*locations gateways get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewayGetCall), [*locations gateways get iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewayGetIamPolicyCall), [*locations gateways list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewayListCall), [*locations gateways patch*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewayPatchCall), [*locations gateways set iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewaySetIamPolicyCall), [*locations gateways test iam permissions*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGatewayTestIamPermissionCall), [*locations get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGetCall), [*locations grpc routes create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGrpcRouteCreateCall), [*locations grpc routes delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGrpcRouteDeleteCall), [*locations grpc routes get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGrpcRouteGetCall), [*locations grpc routes list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGrpcRouteListCall), [*locations grpc routes patch*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationGrpcRoutePatchCall), [*locations http routes create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationHttpRouteCreateCall), [*locations http routes delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationHttpRouteDeleteCall), [*locations http routes get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationHttpRouteGetCall), [*locations http routes list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationHttpRouteListCall), [*locations http routes patch*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationHttpRoutePatchCall), [*locations list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationListCall), [*locations meshes create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshCreateCall), [*locations meshes delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshDeleteCall), [*locations meshes get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshGetCall), [*locations meshes get iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshGetIamPolicyCall), [*locations meshes list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshListCall), [*locations meshes patch*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshPatchCall), [*locations meshes set iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshSetIamPolicyCall), [*locations meshes test iam permissions*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationMeshTestIamPermissionCall), [*locations operations cancel*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationOperationListCall), [*locations service bindings create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationServiceBindingCreateCall), [*locations service bindings delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationServiceBindingDeleteCall), [*locations service bindings get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationServiceBindingGetCall), [*locations service bindings get iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationServiceBindingGetIamPolicyCall), [*locations service bindings list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationServiceBindingListCall), [*locations service bindings set iam policy*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationServiceBindingSetIamPolicyCall), [*locations service bindings test iam permissions*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationServiceBindingTestIamPermissionCall), [*locations tcp routes create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTcpRouteCreateCall), [*locations tcp routes delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTcpRouteDeleteCall), [*locations tcp routes get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTcpRouteGetCall), [*locations tcp routes list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTcpRouteListCall), [*locations tcp routes patch*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTcpRoutePatchCall), [*locations tls routes create*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTlsRouteCreateCall), [*locations tls routes delete*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTlsRouteDeleteCall), [*locations tls routes get*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTlsRouteGetCall), [*locations tls routes list*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTlsRouteListCall) and [*locations tls routes patch*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/api::ProjectLocationTlsRoutePatchCall) + * [*locations edge cache keysets get iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheKeysetGetIamPolicyCall), [*locations edge cache keysets set iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheKeysetSetIamPolicyCall), [*locations edge cache keysets test iam permissions*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheKeysetTestIamPermissionCall), [*locations edge cache origins get iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheOriginGetIamPolicyCall), [*locations edge cache origins set iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheOriginSetIamPolicyCall), [*locations edge cache origins test iam permissions*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheOriginTestIamPermissionCall), [*locations edge cache services get iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheServiceGetIamPolicyCall), [*locations edge cache services set iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheServiceSetIamPolicyCall), [*locations edge cache services test iam permissions*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEdgeCacheServiceTestIamPermissionCall), [*locations endpoint policies create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyCreateCall), [*locations endpoint policies delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyDeleteCall), [*locations endpoint policies get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyGetCall), [*locations endpoint policies get iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyGetIamPolicyCall), [*locations endpoint policies list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyListCall), [*locations endpoint policies patch*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyPatchCall), [*locations endpoint policies set iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicySetIamPolicyCall), [*locations endpoint policies test iam permissions*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationEndpointPolicyTestIamPermissionCall), [*locations gateways create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewayCreateCall), [*locations gateways delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewayDeleteCall), [*locations gateways get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewayGetCall), [*locations gateways get iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewayGetIamPolicyCall), [*locations gateways list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewayListCall), [*locations gateways patch*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewayPatchCall), [*locations gateways set iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewaySetIamPolicyCall), [*locations gateways test iam permissions*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGatewayTestIamPermissionCall), [*locations get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGetCall), [*locations grpc routes create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGrpcRouteCreateCall), [*locations grpc routes delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGrpcRouteDeleteCall), [*locations grpc routes get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGrpcRouteGetCall), [*locations grpc routes list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGrpcRouteListCall), [*locations grpc routes patch*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationGrpcRoutePatchCall), [*locations http routes create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationHttpRouteCreateCall), [*locations http routes delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationHttpRouteDeleteCall), [*locations http routes get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationHttpRouteGetCall), [*locations http routes list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationHttpRouteListCall), [*locations http routes patch*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationHttpRoutePatchCall), [*locations list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationListCall), [*locations meshes create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshCreateCall), [*locations meshes delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshDeleteCall), [*locations meshes get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshGetCall), [*locations meshes get iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshGetIamPolicyCall), [*locations meshes list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshListCall), [*locations meshes patch*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshPatchCall), [*locations meshes set iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshSetIamPolicyCall), [*locations meshes test iam permissions*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationMeshTestIamPermissionCall), [*locations operations cancel*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationOperationListCall), [*locations service bindings create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationServiceBindingCreateCall), [*locations service bindings delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationServiceBindingDeleteCall), [*locations service bindings get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationServiceBindingGetCall), [*locations service bindings get iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationServiceBindingGetIamPolicyCall), [*locations service bindings list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationServiceBindingListCall), [*locations service bindings set iam policy*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationServiceBindingSetIamPolicyCall), [*locations service bindings test iam permissions*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationServiceBindingTestIamPermissionCall), [*locations tcp routes create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTcpRouteCreateCall), [*locations tcp routes delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTcpRouteDeleteCall), [*locations tcp routes get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTcpRouteGetCall), [*locations tcp routes list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTcpRouteListCall), [*locations tcp routes patch*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTcpRoutePatchCall), [*locations tls routes create*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTlsRouteCreateCall), [*locations tls routes delete*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTlsRouteDeleteCall), [*locations tls routes get*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTlsRouteGetCall), [*locations tls routes list*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTlsRouteListCall) and [*locations tls routes patch*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/api::ProjectLocationTlsRoutePatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/NetworkServices)** +* **[Hub](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/NetworkServices)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::CallBuilder) -* **[Resources](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::CallBuilder) +* **[Resources](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Part)** + * **[Parts](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -148,17 +148,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -168,29 +168,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Delegate) to the -[Method Builder](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Delegate) to the +[Method Builder](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::RequestValue) and -[decodable](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::RequestValue) and +[decodable](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-networkservices1/5.0.2-beta-1+20230105/google_networkservices1/client::RequestValue) are moved +* [request values](https://docs.rs/google-networkservices1/5.0.2+20230105/google_networkservices1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/networkservices1/src/api.rs b/gen/networkservices1/src/api.rs index f1b837ec41..4e580a33ca 100644 --- a/gen/networkservices1/src/api.rs +++ b/gen/networkservices1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> NetworkServices { NetworkServices { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://networkservices.googleapis.com/".to_string(), _root_url: "https://networkservices.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> NetworkServices { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/networkservices1/src/client.rs b/gen/networkservices1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/networkservices1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/networkservices1/src/lib.rs b/gen/networkservices1/src/lib.rs index 0df767e461..64d66d9337 100644 --- a/gen/networkservices1/src/lib.rs +++ b/gen/networkservices1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *NetworkServices* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *networkservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *NetworkServices* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *networkservices:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *NetworkServices* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/networking). diff --git a/gen/notebooks1-cli/Cargo.toml b/gen/notebooks1-cli/Cargo.toml index fc42d6b779..a3d1d061f5 100644 --- a/gen/notebooks1-cli/Cargo.toml +++ b/gen/notebooks1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-notebooks1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with AI Platform Notebooks (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/notebooks1-cli" @@ -20,13 +20,13 @@ name = "notebooks1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-notebooks1] path = "../notebooks1" -version = "4.0.1+20220224" +version = "5.0.2+20221213" + diff --git a/gen/notebooks1-cli/README.md b/gen/notebooks1-cli/README.md index 81762be86d..83cf43bb39 100644 --- a/gen/notebooks1-cli/README.md +++ b/gen/notebooks1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *AI Platform Notebooks* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *AI Platform Notebooks* API at revision *20221213*. The CLI is at version *5.0.2*. ```bash notebooks1 [options] @@ -41,6 +41,7 @@ notebooks1 [options] locations-get [-p ]... [-o ] locations-instances-create (-r )... [-p ]... [-o ] locations-instances-delete [-p ]... [-o ] + locations-instances-diagnose (-r )... [-p ]... [-o ] locations-instances-get [-p ]... [-o ] locations-instances-get-iam-policy [-p ]... [-o ] locations-instances-get-instance-health [-p ]... [-o ] @@ -69,9 +70,12 @@ notebooks1 [options] locations-operations-list [-p ]... [-o ] locations-runtimes-create (-r )... [-p ]... [-o ] locations-runtimes-delete [-p ]... [-o ] + locations-runtimes-diagnose (-r )... [-p ]... [-o ] locations-runtimes-get [-p ]... [-o ] locations-runtimes-get-iam-policy [-p ]... [-o ] locations-runtimes-list [-p ]... [-o ] + locations-runtimes-patch (-r )... [-p ]... [-o ] + locations-runtimes-refresh-runtime-token-internal (-r )... [-p ]... [-o ] locations-runtimes-report-event (-r )... [-p ]... [-o ] locations-runtimes-reset (-r )... [-p ]... [-o ] locations-runtimes-set-iam-policy (-r )... [-p ]... [-o ] @@ -79,6 +83,7 @@ notebooks1 [options] locations-runtimes-stop (-r )... [-p ]... [-o ] locations-runtimes-switch (-r )... [-p ]... [-o ] locations-runtimes-test-iam-permissions (-r )... [-p ]... [-o ] + locations-runtimes-upgrade (-r )... [-p ]... [-o ] locations-schedules-create (-r )... [-p ]... [-o ] locations-schedules-delete [-p ]... [-o ] locations-schedules-get [-p ]... [-o ] diff --git a/gen/notebooks1-cli/mkdocs.yml b/gen/notebooks1-cli/mkdocs.yml index ddb57ee88f..8d025f5ef3 100644 --- a/gen/notebooks1-cli/mkdocs.yml +++ b/gen/notebooks1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: AI Platform Notebooks v4.0.1+20220224 +site_name: AI Platform Notebooks v5.0.2+20221213 site_url: http://byron.github.io/google-apis-rs/google-notebooks1-cli site_description: A complete library to interact with AI Platform Notebooks (protocol v1) @@ -7,62 +7,68 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/notebooks1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-environments-create.md', 'Projects', 'Locations Environments Create'] -- ['projects_locations-environments-delete.md', 'Projects', 'Locations Environments Delete'] -- ['projects_locations-environments-get.md', 'Projects', 'Locations Environments Get'] -- ['projects_locations-environments-list.md', 'Projects', 'Locations Environments List'] -- ['projects_locations-executions-create.md', 'Projects', 'Locations Executions Create'] -- ['projects_locations-executions-delete.md', 'Projects', 'Locations Executions Delete'] -- ['projects_locations-executions-get.md', 'Projects', 'Locations Executions Get'] -- ['projects_locations-executions-list.md', 'Projects', 'Locations Executions List'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-get-iam-policy.md', 'Projects', 'Locations Instances Get Iam Policy'] -- ['projects_locations-instances-get-instance-health.md', 'Projects', 'Locations Instances Get Instance Health'] -- ['projects_locations-instances-is-upgradeable.md', 'Projects', 'Locations Instances Is Upgradeable'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-register.md', 'Projects', 'Locations Instances Register'] -- ['projects_locations-instances-report.md', 'Projects', 'Locations Instances Report'] -- ['projects_locations-instances-reset.md', 'Projects', 'Locations Instances Reset'] -- ['projects_locations-instances-rollback.md', 'Projects', 'Locations Instances Rollback'] -- ['projects_locations-instances-set-accelerator.md', 'Projects', 'Locations Instances Set Accelerator'] -- ['projects_locations-instances-set-iam-policy.md', 'Projects', 'Locations Instances Set Iam Policy'] -- ['projects_locations-instances-set-labels.md', 'Projects', 'Locations Instances Set Labels'] -- ['projects_locations-instances-set-machine-type.md', 'Projects', 'Locations Instances Set Machine Type'] -- ['projects_locations-instances-start.md', 'Projects', 'Locations Instances Start'] -- ['projects_locations-instances-stop.md', 'Projects', 'Locations Instances Stop'] -- ['projects_locations-instances-test-iam-permissions.md', 'Projects', 'Locations Instances Test Iam Permissions'] -- ['projects_locations-instances-update-config.md', 'Projects', 'Locations Instances Update Config'] -- ['projects_locations-instances-update-metadata-items.md', 'Projects', 'Locations Instances Update Metadata Items'] -- ['projects_locations-instances-update-shielded-instance-config.md', 'Projects', 'Locations Instances Update Shielded Instance Config'] -- ['projects_locations-instances-upgrade.md', 'Projects', 'Locations Instances Upgrade'] -- ['projects_locations-instances-upgrade-internal.md', 'Projects', 'Locations Instances Upgrade Internal'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-runtimes-create.md', 'Projects', 'Locations Runtimes Create'] -- ['projects_locations-runtimes-delete.md', 'Projects', 'Locations Runtimes Delete'] -- ['projects_locations-runtimes-get.md', 'Projects', 'Locations Runtimes Get'] -- ['projects_locations-runtimes-get-iam-policy.md', 'Projects', 'Locations Runtimes Get Iam Policy'] -- ['projects_locations-runtimes-list.md', 'Projects', 'Locations Runtimes List'] -- ['projects_locations-runtimes-report-event.md', 'Projects', 'Locations Runtimes Report Event'] -- ['projects_locations-runtimes-reset.md', 'Projects', 'Locations Runtimes Reset'] -- ['projects_locations-runtimes-set-iam-policy.md', 'Projects', 'Locations Runtimes Set Iam Policy'] -- ['projects_locations-runtimes-start.md', 'Projects', 'Locations Runtimes Start'] -- ['projects_locations-runtimes-stop.md', 'Projects', 'Locations Runtimes Stop'] -- ['projects_locations-runtimes-switch.md', 'Projects', 'Locations Runtimes Switch'] -- ['projects_locations-runtimes-test-iam-permissions.md', 'Projects', 'Locations Runtimes Test Iam Permissions'] -- ['projects_locations-schedules-create.md', 'Projects', 'Locations Schedules Create'] -- ['projects_locations-schedules-delete.md', 'Projects', 'Locations Schedules Delete'] -- ['projects_locations-schedules-get.md', 'Projects', 'Locations Schedules Get'] -- ['projects_locations-schedules-list.md', 'Projects', 'Locations Schedules List'] -- ['projects_locations-schedules-trigger.md', 'Projects', 'Locations Schedules Trigger'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Environments Create': 'projects_locations-environments-create.md' + - 'Locations Environments Delete': 'projects_locations-environments-delete.md' + - 'Locations Environments Get': 'projects_locations-environments-get.md' + - 'Locations Environments List': 'projects_locations-environments-list.md' + - 'Locations Executions Create': 'projects_locations-executions-create.md' + - 'Locations Executions Delete': 'projects_locations-executions-delete.md' + - 'Locations Executions Get': 'projects_locations-executions-get.md' + - 'Locations Executions List': 'projects_locations-executions-list.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Diagnose': 'projects_locations-instances-diagnose.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances Get Iam Policy': 'projects_locations-instances-get-iam-policy.md' + - 'Locations Instances Get Instance Health': 'projects_locations-instances-get-instance-health.md' + - 'Locations Instances Is Upgradeable': 'projects_locations-instances-is-upgradeable.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Register': 'projects_locations-instances-register.md' + - 'Locations Instances Report': 'projects_locations-instances-report.md' + - 'Locations Instances Reset': 'projects_locations-instances-reset.md' + - 'Locations Instances Rollback': 'projects_locations-instances-rollback.md' + - 'Locations Instances Set Accelerator': 'projects_locations-instances-set-accelerator.md' + - 'Locations Instances Set Iam Policy': 'projects_locations-instances-set-iam-policy.md' + - 'Locations Instances Set Labels': 'projects_locations-instances-set-labels.md' + - 'Locations Instances Set Machine Type': 'projects_locations-instances-set-machine-type.md' + - 'Locations Instances Start': 'projects_locations-instances-start.md' + - 'Locations Instances Stop': 'projects_locations-instances-stop.md' + - 'Locations Instances Test Iam Permissions': 'projects_locations-instances-test-iam-permissions.md' + - 'Locations Instances Update Config': 'projects_locations-instances-update-config.md' + - 'Locations Instances Update Metadata Items': 'projects_locations-instances-update-metadata-items.md' + - 'Locations Instances Update Shielded Instance Config': 'projects_locations-instances-update-shielded-instance-config.md' + - 'Locations Instances Upgrade': 'projects_locations-instances-upgrade.md' + - 'Locations Instances Upgrade Internal': 'projects_locations-instances-upgrade-internal.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Runtimes Create': 'projects_locations-runtimes-create.md' + - 'Locations Runtimes Delete': 'projects_locations-runtimes-delete.md' + - 'Locations Runtimes Diagnose': 'projects_locations-runtimes-diagnose.md' + - 'Locations Runtimes Get': 'projects_locations-runtimes-get.md' + - 'Locations Runtimes Get Iam Policy': 'projects_locations-runtimes-get-iam-policy.md' + - 'Locations Runtimes List': 'projects_locations-runtimes-list.md' + - 'Locations Runtimes Patch': 'projects_locations-runtimes-patch.md' + - 'Locations Runtimes Refresh Runtime Token Internal': 'projects_locations-runtimes-refresh-runtime-token-internal.md' + - 'Locations Runtimes Report Event': 'projects_locations-runtimes-report-event.md' + - 'Locations Runtimes Reset': 'projects_locations-runtimes-reset.md' + - 'Locations Runtimes Set Iam Policy': 'projects_locations-runtimes-set-iam-policy.md' + - 'Locations Runtimes Start': 'projects_locations-runtimes-start.md' + - 'Locations Runtimes Stop': 'projects_locations-runtimes-stop.md' + - 'Locations Runtimes Switch': 'projects_locations-runtimes-switch.md' + - 'Locations Runtimes Test Iam Permissions': 'projects_locations-runtimes-test-iam-permissions.md' + - 'Locations Runtimes Upgrade': 'projects_locations-runtimes-upgrade.md' + - 'Locations Schedules Create': 'projects_locations-schedules-create.md' + - 'Locations Schedules Delete': 'projects_locations-schedules-delete.md' + - 'Locations Schedules Get': 'projects_locations-schedules-get.md' + - 'Locations Schedules List': 'projects_locations-schedules-list.md' + - 'Locations Schedules Trigger': 'projects_locations-schedules-trigger.md' theme: readthedocs diff --git a/gen/notebooks1-cli/src/client.rs b/gen/notebooks1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/notebooks1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/notebooks1-cli/src/main.rs b/gen/notebooks1-cli/src/main.rs index c6be808301..479f3acac1 100644 --- a/gen/notebooks1-cli/src/main.rs +++ b/gen/notebooks1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_notebooks1::{api, Error, oauth2}; +use google_notebooks1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -263,7 +262,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -539,7 +538,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -828,6 +827,95 @@ where } } + async fn _projects_locations_instances_diagnose(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "diagnostic-config.copy-home-files-flag-enabled" => Some(("diagnosticConfig.copyHomeFilesFlagEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "diagnostic-config.gcs-bucket" => Some(("diagnosticConfig.gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diagnostic-config.packet-capture-flag-enabled" => Some(("diagnosticConfig.packetCaptureFlagEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "diagnostic-config.relative-path" => Some(("diagnosticConfig.relativePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diagnostic-config.repair-flag-enabled" => Some(("diagnosticConfig.repairFlagEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["copy-home-files-flag-enabled", "diagnostic-config", "gcs-bucket", "packet-capture-flag-enabled", "relative-path", "repair-flag-enabled"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DiagnoseInstanceRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_instances_diagnose(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_instances_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_instances_get(opt.value_of("name").unwrap_or("")); @@ -887,7 +975,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1054,7 +1142,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2477,7 +2565,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2727,7 +2815,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2810,13 +2898,16 @@ where "metrics.system-metrics" => Some(("metrics.systemMetrics", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "software-config.custom-gpu-driver-path" => Some(("softwareConfig.customGpuDriverPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "software-config.disable-terminal" => Some(("softwareConfig.disableTerminal", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "software-config.enable-health-monitoring" => Some(("softwareConfig.enableHealthMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "software-config.idle-shutdown" => Some(("softwareConfig.idleShutdown", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "software-config.idle-shutdown-timeout" => Some(("softwareConfig.idleShutdownTimeout", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "software-config.install-gpu-driver" => Some(("softwareConfig.installGpuDriver", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "software-config.notebook-upgrade-schedule" => Some(("softwareConfig.notebookUpgradeSchedule", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "software-config.post-startup-script" => Some(("softwareConfig.postStartupScript", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "software-config.post-startup-script-behavior" => Some(("softwareConfig.postStartupScriptBehavior", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "software-config.upgradeable" => Some(("softwareConfig.upgradeable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "software-config.version" => Some(("softwareConfig.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "virtual-machine.instance-id" => Some(("virtualMachine.instanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2854,7 +2945,7 @@ where "virtual-machine.virtual-machine-config.tags" => Some(("virtualMachine.virtualMachineConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "virtual-machine.virtual-machine-config.zone" => Some(("virtualMachine.virtualMachineConfig.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["accelerator-config", "access-config", "access-type", "auto-delete", "boot", "core-count", "create-time", "custom-gpu-driver-path", "data-disk", "description", "device-name", "disk-name", "disk-size-gb", "disk-type", "enable-health-monitoring", "enable-integrity-monitoring", "enable-secure-boot", "enable-vtpm", "encryption-config", "guest-attributes", "health-state", "idle-shutdown", "idle-shutdown-timeout", "index", "initialize-params", "install-gpu-driver", "instance-id", "instance-name", "interface", "internal-ip-only", "kind", "kms-key", "labels", "licenses", "machine-type", "metadata", "metrics", "mode", "name", "network", "nic-type", "notebook-upgrade-schedule", "post-startup-script", "proxy-uri", "reserved-ip-range", "runtime-owner", "shielded-instance-config", "software-config", "source", "state", "subnet", "system-metrics", "tags", "type", "update-time", "upgradeable", "virtual-machine", "virtual-machine-config", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["accelerator-config", "access-config", "access-type", "auto-delete", "boot", "core-count", "create-time", "custom-gpu-driver-path", "data-disk", "description", "device-name", "disable-terminal", "disk-name", "disk-size-gb", "disk-type", "enable-health-monitoring", "enable-integrity-monitoring", "enable-secure-boot", "enable-vtpm", "encryption-config", "guest-attributes", "health-state", "idle-shutdown", "idle-shutdown-timeout", "index", "initialize-params", "install-gpu-driver", "instance-id", "instance-name", "interface", "internal-ip-only", "kind", "kms-key", "labels", "licenses", "machine-type", "metadata", "metrics", "mode", "name", "network", "nic-type", "notebook-upgrade-schedule", "post-startup-script", "post-startup-script-behavior", "proxy-uri", "reserved-ip-range", "runtime-owner", "shielded-instance-config", "software-config", "source", "state", "subnet", "system-metrics", "tags", "type", "update-time", "upgradeable", "version", "virtual-machine", "virtual-machine-config", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2871,6 +2962,9 @@ where "runtime-id" => { call = call.runtime_id(value.unwrap_or("")); }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, _ => { let mut found = false; for param in &self.gp { @@ -2884,7 +2978,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["runtime-id"].iter().map(|v|*v)); + v.extend(["request-id", "runtime-id"].iter().map(|v|*v)); v } )); } } @@ -2921,6 +3015,99 @@ where async fn _projects_locations_runtimes_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_runtimes_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_runtimes_diagnose(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "diagnostic-config.copy-home-files-flag-enabled" => Some(("diagnosticConfig.copyHomeFilesFlagEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "diagnostic-config.gcs-bucket" => Some(("diagnosticConfig.gcsBucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diagnostic-config.packet-capture-flag-enabled" => Some(("diagnosticConfig.packetCaptureFlagEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "diagnostic-config.relative-path" => Some(("diagnosticConfig.relativePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diagnostic-config.repair-flag-enabled" => Some(("diagnosticConfig.repairFlagEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["copy-home-files-flag-enabled", "diagnostic-config", "gcs-bucket", "packet-capture-flag-enabled", "relative-path", "repair-flag-enabled"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::DiagnoseRuntimeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_runtimes_diagnose(request, opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -3029,7 +3216,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3088,7 +3275,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3137,6 +3324,236 @@ where } } + async fn _projects_locations_runtimes_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "access-config.access-type" => Some(("accessConfig.accessType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access-config.proxy-uri" => Some(("accessConfig.proxyUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access-config.runtime-owner" => Some(("accessConfig.runtimeOwner", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "health-state" => Some(("healthState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "metrics.system-metrics" => Some(("metrics.systemMetrics", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "software-config.custom-gpu-driver-path" => Some(("softwareConfig.customGpuDriverPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "software-config.disable-terminal" => Some(("softwareConfig.disableTerminal", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "software-config.enable-health-monitoring" => Some(("softwareConfig.enableHealthMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "software-config.idle-shutdown" => Some(("softwareConfig.idleShutdown", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "software-config.idle-shutdown-timeout" => Some(("softwareConfig.idleShutdownTimeout", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "software-config.install-gpu-driver" => Some(("softwareConfig.installGpuDriver", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "software-config.notebook-upgrade-schedule" => Some(("softwareConfig.notebookUpgradeSchedule", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "software-config.post-startup-script" => Some(("softwareConfig.postStartupScript", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "software-config.post-startup-script-behavior" => Some(("softwareConfig.postStartupScriptBehavior", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "software-config.upgradeable" => Some(("softwareConfig.upgradeable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "software-config.version" => Some(("softwareConfig.version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.instance-id" => Some(("virtualMachine.instanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.instance-name" => Some(("virtualMachine.instanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.accelerator-config.core-count" => Some(("virtualMachine.virtualMachineConfig.acceleratorConfig.coreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.accelerator-config.type" => Some(("virtualMachine.virtualMachineConfig.acceleratorConfig.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.auto-delete" => Some(("virtualMachine.virtualMachineConfig.dataDisk.autoDelete", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.boot" => Some(("virtualMachine.virtualMachineConfig.dataDisk.boot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.device-name" => Some(("virtualMachine.virtualMachineConfig.dataDisk.deviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.index" => Some(("virtualMachine.virtualMachineConfig.dataDisk.index", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.initialize-params.description" => Some(("virtualMachine.virtualMachineConfig.dataDisk.initializeParams.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.initialize-params.disk-name" => Some(("virtualMachine.virtualMachineConfig.dataDisk.initializeParams.diskName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.initialize-params.disk-size-gb" => Some(("virtualMachine.virtualMachineConfig.dataDisk.initializeParams.diskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.initialize-params.disk-type" => Some(("virtualMachine.virtualMachineConfig.dataDisk.initializeParams.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.initialize-params.labels" => Some(("virtualMachine.virtualMachineConfig.dataDisk.initializeParams.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-machine.virtual-machine-config.data-disk.interface" => Some(("virtualMachine.virtualMachineConfig.dataDisk.interface", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.kind" => Some(("virtualMachine.virtualMachineConfig.dataDisk.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.licenses" => Some(("virtualMachine.virtualMachineConfig.dataDisk.licenses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "virtual-machine.virtual-machine-config.data-disk.mode" => Some(("virtualMachine.virtualMachineConfig.dataDisk.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.source" => Some(("virtualMachine.virtualMachineConfig.dataDisk.source", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.data-disk.type" => Some(("virtualMachine.virtualMachineConfig.dataDisk.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.encryption-config.kms-key" => Some(("virtualMachine.virtualMachineConfig.encryptionConfig.kmsKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.guest-attributes" => Some(("virtualMachine.virtualMachineConfig.guestAttributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-machine.virtual-machine-config.internal-ip-only" => Some(("virtualMachine.virtualMachineConfig.internalIpOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.labels" => Some(("virtualMachine.virtualMachineConfig.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-machine.virtual-machine-config.machine-type" => Some(("virtualMachine.virtualMachineConfig.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.metadata" => Some(("virtualMachine.virtualMachineConfig.metadata", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "virtual-machine.virtual-machine-config.network" => Some(("virtualMachine.virtualMachineConfig.network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.nic-type" => Some(("virtualMachine.virtualMachineConfig.nicType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.reserved-ip-range" => Some(("virtualMachine.virtualMachineConfig.reservedIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.shielded-instance-config.enable-integrity-monitoring" => Some(("virtualMachine.virtualMachineConfig.shieldedInstanceConfig.enableIntegrityMonitoring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.shielded-instance-config.enable-secure-boot" => Some(("virtualMachine.virtualMachineConfig.shieldedInstanceConfig.enableSecureBoot", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.shielded-instance-config.enable-vtpm" => Some(("virtualMachine.virtualMachineConfig.shieldedInstanceConfig.enableVtpm", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.subnet" => Some(("virtualMachine.virtualMachineConfig.subnet", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "virtual-machine.virtual-machine-config.tags" => Some(("virtualMachine.virtualMachineConfig.tags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "virtual-machine.virtual-machine-config.zone" => Some(("virtualMachine.virtualMachineConfig.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["accelerator-config", "access-config", "access-type", "auto-delete", "boot", "core-count", "create-time", "custom-gpu-driver-path", "data-disk", "description", "device-name", "disable-terminal", "disk-name", "disk-size-gb", "disk-type", "enable-health-monitoring", "enable-integrity-monitoring", "enable-secure-boot", "enable-vtpm", "encryption-config", "guest-attributes", "health-state", "idle-shutdown", "idle-shutdown-timeout", "index", "initialize-params", "install-gpu-driver", "instance-id", "instance-name", "interface", "internal-ip-only", "kind", "kms-key", "labels", "licenses", "machine-type", "metadata", "metrics", "mode", "name", "network", "nic-type", "notebook-upgrade-schedule", "post-startup-script", "post-startup-script-behavior", "proxy-uri", "reserved-ip-range", "runtime-owner", "shielded-instance-config", "software-config", "source", "state", "subnet", "system-metrics", "tags", "type", "update-time", "upgradeable", "version", "virtual-machine", "virtual-machine-config", "zone"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Runtime = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_runtimes_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + "request-id" => { + call = call.request_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["request-id", "update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_runtimes_refresh_runtime_token_internal(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "vm-id" => Some(("vmId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["vm-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RefreshRuntimeTokenInternalRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_runtimes_refresh_runtime_token_internal(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_runtimes_report_event(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3248,8 +3665,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["request-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3418,8 +3836,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["request-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3502,8 +3921,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["request-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3589,8 +4009,9 @@ where "accelerator-config.core-count" => Some(("acceleratorConfig.coreCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "accelerator-config.type" => Some(("acceleratorConfig.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "machine-type" => Some(("machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["accelerator-config", "core-count", "machine-type", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["accelerator-config", "core-count", "machine-type", "request-id", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3735,6 +4156,91 @@ where } } + async fn _projects_locations_runtimes_upgrade(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "request-id" => Some(("requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["request-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UpgradeRuntimeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_runtimes_upgrade(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_schedules_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3962,7 +4468,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -4141,6 +4647,9 @@ where ("locations-instances-delete", Some(opt)) => { call_result = self._projects_locations_instances_delete(opt, dry_run, &mut err).await; }, + ("locations-instances-diagnose", Some(opt)) => { + call_result = self._projects_locations_instances_diagnose(opt, dry_run, &mut err).await; + }, ("locations-instances-get", Some(opt)) => { call_result = self._projects_locations_instances_get(opt, dry_run, &mut err).await; }, @@ -4225,6 +4734,9 @@ where ("locations-runtimes-delete", Some(opt)) => { call_result = self._projects_locations_runtimes_delete(opt, dry_run, &mut err).await; }, + ("locations-runtimes-diagnose", Some(opt)) => { + call_result = self._projects_locations_runtimes_diagnose(opt, dry_run, &mut err).await; + }, ("locations-runtimes-get", Some(opt)) => { call_result = self._projects_locations_runtimes_get(opt, dry_run, &mut err).await; }, @@ -4234,6 +4746,12 @@ where ("locations-runtimes-list", Some(opt)) => { call_result = self._projects_locations_runtimes_list(opt, dry_run, &mut err).await; }, + ("locations-runtimes-patch", Some(opt)) => { + call_result = self._projects_locations_runtimes_patch(opt, dry_run, &mut err).await; + }, + ("locations-runtimes-refresh-runtime-token-internal", Some(opt)) => { + call_result = self._projects_locations_runtimes_refresh_runtime_token_internal(opt, dry_run, &mut err).await; + }, ("locations-runtimes-report-event", Some(opt)) => { call_result = self._projects_locations_runtimes_report_event(opt, dry_run, &mut err).await; }, @@ -4255,6 +4773,9 @@ where ("locations-runtimes-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_runtimes_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-runtimes-upgrade", Some(opt)) => { + call_result = self._projects_locations_runtimes_upgrade(opt, dry_run, &mut err).await; + }, ("locations-schedules-create", Some(opt)) => { call_result = self._projects_locations_schedules_create(opt, dry_run, &mut err).await; }, @@ -4349,7 +4870,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-environments-create', 'locations-environments-delete', 'locations-environments-get', 'locations-environments-list', 'locations-executions-create', 'locations-executions-delete', 'locations-executions-get', 'locations-executions-list', 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-get', 'locations-instances-get-iam-policy', 'locations-instances-get-instance-health', 'locations-instances-is-upgradeable', 'locations-instances-list', 'locations-instances-register', 'locations-instances-report', 'locations-instances-reset', 'locations-instances-rollback', 'locations-instances-set-accelerator', 'locations-instances-set-iam-policy', 'locations-instances-set-labels', 'locations-instances-set-machine-type', 'locations-instances-start', 'locations-instances-stop', 'locations-instances-test-iam-permissions', 'locations-instances-update-config', 'locations-instances-update-metadata-items', 'locations-instances-update-shielded-instance-config', 'locations-instances-upgrade', 'locations-instances-upgrade-internal', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-runtimes-create', 'locations-runtimes-delete', 'locations-runtimes-get', 'locations-runtimes-get-iam-policy', 'locations-runtimes-list', 'locations-runtimes-report-event', 'locations-runtimes-reset', 'locations-runtimes-set-iam-policy', 'locations-runtimes-start', 'locations-runtimes-stop', 'locations-runtimes-switch', 'locations-runtimes-test-iam-permissions', 'locations-schedules-create', 'locations-schedules-delete', 'locations-schedules-get', 'locations-schedules-list' and 'locations-schedules-trigger'", vec![ + ("projects", "methods: 'locations-environments-create', 'locations-environments-delete', 'locations-environments-get', 'locations-environments-list', 'locations-executions-create', 'locations-executions-delete', 'locations-executions-get', 'locations-executions-list', 'locations-get', 'locations-instances-create', 'locations-instances-delete', 'locations-instances-diagnose', 'locations-instances-get', 'locations-instances-get-iam-policy', 'locations-instances-get-instance-health', 'locations-instances-is-upgradeable', 'locations-instances-list', 'locations-instances-register', 'locations-instances-report', 'locations-instances-reset', 'locations-instances-rollback', 'locations-instances-set-accelerator', 'locations-instances-set-iam-policy', 'locations-instances-set-labels', 'locations-instances-set-machine-type', 'locations-instances-start', 'locations-instances-stop', 'locations-instances-test-iam-permissions', 'locations-instances-update-config', 'locations-instances-update-metadata-items', 'locations-instances-update-shielded-instance-config', 'locations-instances-upgrade', 'locations-instances-upgrade-internal', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-runtimes-create', 'locations-runtimes-delete', 'locations-runtimes-diagnose', 'locations-runtimes-get', 'locations-runtimes-get-iam-policy', 'locations-runtimes-list', 'locations-runtimes-patch', 'locations-runtimes-refresh-runtime-token-internal', 'locations-runtimes-report-event', 'locations-runtimes-reset', 'locations-runtimes-set-iam-policy', 'locations-runtimes-start', 'locations-runtimes-stop', 'locations-runtimes-switch', 'locations-runtimes-test-iam-permissions', 'locations-runtimes-upgrade', 'locations-schedules-create', 'locations-schedules-delete', 'locations-schedules-get', 'locations-schedules-list' and 'locations-schedules-trigger'", vec![ ("locations-environments-create", Some(r##"Creates a new Environment."##), "Details at http://byron.github.io/google-apis-rs/google_notebooks1_cli/projects_locations-environments-create", @@ -4604,6 +5125,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-instances-diagnose", + Some(r##"Creates a Diagnostic File and runs Diagnostic Tool given an Instance."##), + "Details at http://byron.github.io/google-apis-rs/google_notebooks1_cli/projects_locations-instances-diagnose", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Format: `projects/{project_id}/locations/{location}/instances/{instance_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4638,7 +5187,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4866,7 +5415,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5006,7 +5555,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5328,6 +5877,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-runtimes-diagnose", + Some(r##"Creates a Diagnostic File and runs Diagnostic Tool given a Runtime."##), + "Details at http://byron.github.io/google-apis-rs/google_notebooks1_cli/projects_locations-runtimes-diagnose", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Format: `projects/{project_id}/locations/{location}/runtimes/{runtimes_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5362,7 +5939,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5394,6 +5971,62 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-runtimes-patch", + Some(r##"Update Notebook Runtime configuration."##), + "Details at http://byron.github.io/google-apis-rs/google_notebooks1_cli/projects_locations-runtimes-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Output only. The resource name of the runtime. Format: `projects/{project}/locations/{location}/runtimes/{runtimeId}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-runtimes-refresh-runtime-token-internal", + Some(r##"Gets an access token for the consumer service account that the customer attached to the runtime. Only accessible from the tenant instance."##), + "Details at http://byron.github.io/google-apis-rs/google_notebooks1_cli/projects_locations-runtimes-refresh-runtime-token-internal", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Format: `projects/{project_id}/locations/{location}/runtimes/{runtime_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5462,7 +6095,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5574,7 +6207,35 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-runtimes-upgrade", + Some(r##"Upgrades a Managed Notebook Runtime to the latest version."##), + "Details at http://byron.github.io/google-apis-rs/google_notebooks1_cli/projects_locations-runtimes-upgrade", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Format: `projects/{project_id}/locations/{location}/runtimes/{runtime_id}`"##), Some(true), Some(false)), @@ -5724,7 +6385,7 @@ async fn main() { let mut app = App::new("notebooks1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20221213") .about("Notebooks API is used to manage notebook resources in Google Cloud.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_notebooks1_cli") .arg(Arg::with_name("url") diff --git a/gen/notebooks1/Cargo.toml b/gen/notebooks1/Cargo.toml index 6e0d17527b..275e6eee5c 100644 --- a/gen/notebooks1/Cargo.toml +++ b/gen/notebooks1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-notebooks1" -version = "5.0.2-beta-1+20221213" +version = "5.0.2+20221213" authors = ["Sebastian Thiel "] description = "A complete library to interact with AI Platform Notebooks (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/notebooks1" homepage = "https://cloud.google.com/notebooks/docs/" -documentation = "https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213" +documentation = "https://docs.rs/google-notebooks1/5.0.2+20221213" license = "MIT" keywords = ["notebooks", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/notebooks1/README.md b/gen/notebooks1/README.md index e7ebacdc47..02240a4156 100644 --- a/gen/notebooks1/README.md +++ b/gen/notebooks1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-notebooks1` library allows access to all features of the *Google AI Platform Notebooks* service. -This documentation was generated from *AI Platform Notebooks* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *notebooks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *AI Platform Notebooks* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *notebooks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *AI Platform Notebooks* *v1* API can be found at the [official documentation site](https://cloud.google.com/notebooks/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/AIPlatformNotebooks) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/AIPlatformNotebooks) ... * projects - * [*locations environments create*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationEnvironmentCreateCall), [*locations environments delete*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationEnvironmentDeleteCall), [*locations environments get*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationEnvironmentGetCall), [*locations environments list*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationEnvironmentListCall), [*locations executions create*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationExecutionCreateCall), [*locations executions delete*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationExecutionDeleteCall), [*locations executions get*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationExecutionGetCall), [*locations executions list*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationExecutionListCall), [*locations get*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceDeleteCall), [*locations instances diagnose*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceDiagnoseCall), [*locations instances get*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceGetCall), [*locations instances get iam policy*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceGetIamPolicyCall), [*locations instances get instance health*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceGetInstanceHealthCall), [*locations instances is upgradeable*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceIsUpgradeableCall), [*locations instances list*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceListCall), [*locations instances register*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceRegisterCall), [*locations instances report*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceReportCall), [*locations instances reset*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceResetCall), [*locations instances rollback*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceRollbackCall), [*locations instances set accelerator*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceSetAcceleratorCall), [*locations instances set iam policy*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceSetIamPolicyCall), [*locations instances set labels*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceSetLabelCall), [*locations instances set machine type*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceSetMachineTypeCall), [*locations instances start*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceStartCall), [*locations instances stop*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceStopCall), [*locations instances test iam permissions*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceTestIamPermissionCall), [*locations instances update config*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceUpdateConfigCall), [*locations instances update metadata items*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceUpdateMetadataItemCall), [*locations instances update shielded instance config*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceUpdateShieldedInstanceConfigCall), [*locations instances upgrade*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceUpgradeCall), [*locations instances upgrade internal*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationInstanceUpgradeInternalCall), [*locations list*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationOperationListCall), [*locations runtimes create*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeCreateCall), [*locations runtimes delete*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeDeleteCall), [*locations runtimes diagnose*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeDiagnoseCall), [*locations runtimes get*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeGetCall), [*locations runtimes get iam policy*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeGetIamPolicyCall), [*locations runtimes list*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeListCall), [*locations runtimes patch*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimePatchCall), [*locations runtimes refresh runtime token internal*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeRefreshRuntimeTokenInternalCall), [*locations runtimes report event*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeReportEventCall), [*locations runtimes reset*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeResetCall), [*locations runtimes set iam policy*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeSetIamPolicyCall), [*locations runtimes start*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeStartCall), [*locations runtimes stop*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeStopCall), [*locations runtimes switch*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeSwitchCall), [*locations runtimes test iam permissions*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeTestIamPermissionCall), [*locations runtimes upgrade*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationRuntimeUpgradeCall), [*locations schedules create*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationScheduleCreateCall), [*locations schedules delete*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationScheduleDeleteCall), [*locations schedules get*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationScheduleGetCall), [*locations schedules list*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationScheduleListCall) and [*locations schedules trigger*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/api::ProjectLocationScheduleTriggerCall) + * [*locations environments create*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationEnvironmentCreateCall), [*locations environments delete*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationEnvironmentDeleteCall), [*locations environments get*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationEnvironmentGetCall), [*locations environments list*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationEnvironmentListCall), [*locations executions create*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationExecutionCreateCall), [*locations executions delete*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationExecutionDeleteCall), [*locations executions get*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationExecutionGetCall), [*locations executions list*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationExecutionListCall), [*locations get*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceDeleteCall), [*locations instances diagnose*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceDiagnoseCall), [*locations instances get*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceGetCall), [*locations instances get iam policy*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceGetIamPolicyCall), [*locations instances get instance health*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceGetInstanceHealthCall), [*locations instances is upgradeable*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceIsUpgradeableCall), [*locations instances list*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceListCall), [*locations instances register*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceRegisterCall), [*locations instances report*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceReportCall), [*locations instances reset*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceResetCall), [*locations instances rollback*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceRollbackCall), [*locations instances set accelerator*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceSetAcceleratorCall), [*locations instances set iam policy*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceSetIamPolicyCall), [*locations instances set labels*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceSetLabelCall), [*locations instances set machine type*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceSetMachineTypeCall), [*locations instances start*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceStartCall), [*locations instances stop*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceStopCall), [*locations instances test iam permissions*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceTestIamPermissionCall), [*locations instances update config*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceUpdateConfigCall), [*locations instances update metadata items*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceUpdateMetadataItemCall), [*locations instances update shielded instance config*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceUpdateShieldedInstanceConfigCall), [*locations instances upgrade*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceUpgradeCall), [*locations instances upgrade internal*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationInstanceUpgradeInternalCall), [*locations list*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationOperationListCall), [*locations runtimes create*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeCreateCall), [*locations runtimes delete*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeDeleteCall), [*locations runtimes diagnose*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeDiagnoseCall), [*locations runtimes get*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeGetCall), [*locations runtimes get iam policy*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeGetIamPolicyCall), [*locations runtimes list*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeListCall), [*locations runtimes patch*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimePatchCall), [*locations runtimes refresh runtime token internal*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeRefreshRuntimeTokenInternalCall), [*locations runtimes report event*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeReportEventCall), [*locations runtimes reset*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeResetCall), [*locations runtimes set iam policy*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeSetIamPolicyCall), [*locations runtimes start*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeStartCall), [*locations runtimes stop*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeStopCall), [*locations runtimes switch*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeSwitchCall), [*locations runtimes test iam permissions*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeTestIamPermissionCall), [*locations runtimes upgrade*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationRuntimeUpgradeCall), [*locations schedules create*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationScheduleCreateCall), [*locations schedules delete*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationScheduleDeleteCall), [*locations schedules get*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationScheduleGetCall), [*locations schedules list*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationScheduleListCall) and [*locations schedules trigger*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/api::ProjectLocationScheduleTriggerCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/AIPlatformNotebooks)** +* **[Hub](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/AIPlatformNotebooks)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::CallBuilder) -* **[Resources](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::CallBuilder) +* **[Resources](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Part)** + * **[Parts](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -159,17 +159,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -179,29 +179,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Delegate) to the -[Method Builder](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Delegate) to the +[Method Builder](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::RequestValue) and -[decodable](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::RequestValue) and +[decodable](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-notebooks1/5.0.2-beta-1+20221213/google_notebooks1/client::RequestValue) are moved +* [request values](https://docs.rs/google-notebooks1/5.0.2+20221213/google_notebooks1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/notebooks1/src/api.rs b/gen/notebooks1/src/api.rs index d12074605f..7278f3d06c 100644 --- a/gen/notebooks1/src/api.rs +++ b/gen/notebooks1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> AIPlatformNotebooks { AIPlatformNotebooks { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://notebooks.googleapis.com/".to_string(), _root_url: "https://notebooks.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> AIPlatformNotebooks { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/notebooks1/src/client.rs b/gen/notebooks1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/notebooks1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/notebooks1/src/lib.rs b/gen/notebooks1/src/lib.rs index 33a5647857..b315bad9a9 100644 --- a/gen/notebooks1/src/lib.rs +++ b/gen/notebooks1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *AI Platform Notebooks* crate version *5.0.2-beta-1+20221213*, where *20221213* is the exact revision of the *notebooks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *AI Platform Notebooks* crate version *5.0.2+20221213*, where *20221213* is the exact revision of the *notebooks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *AI Platform Notebooks* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/notebooks/docs/). diff --git a/gen/ondemandscanning1-cli/Cargo.toml b/gen/ondemandscanning1-cli/Cargo.toml index 937a3c4e46..6a516a4acb 100644 --- a/gen/ondemandscanning1-cli/Cargo.toml +++ b/gen/ondemandscanning1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-ondemandscanning1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230109" authors = ["Sebastian Thiel "] description = "A complete library to interact with On-Demand Scanning (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ondemandscanning1-cli" @@ -20,13 +20,13 @@ name = "ondemandscanning1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-ondemandscanning1] path = "../ondemandscanning1" -version = "4.0.1+20220228" +version = "5.0.2+20230109" + diff --git a/gen/ondemandscanning1-cli/README.md b/gen/ondemandscanning1-cli/README.md index 50a70d72da..8f7550cc96 100644 --- a/gen/ondemandscanning1-cli/README.md +++ b/gen/ondemandscanning1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *On-Demand Scanning* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *On-Demand Scanning* API at revision *20230109*. The CLI is at version *5.0.2*. ```bash ondemandscanning1 [options] diff --git a/gen/ondemandscanning1-cli/mkdocs.yml b/gen/ondemandscanning1-cli/mkdocs.yml index aa33eff9f6..bd6197dbb4 100644 --- a/gen/ondemandscanning1-cli/mkdocs.yml +++ b/gen/ondemandscanning1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: On-Demand Scanning v4.0.1+20220228 +site_name: On-Demand Scanning v5.0.2+20230109 site_url: http://byron.github.io/google-apis-rs/google-ondemandscanning1-cli site_description: A complete library to interact with On-Demand Scanning (protocol v1) @@ -7,15 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/ondemandscanning docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-operations-wait.md', 'Projects', 'Locations Operations Wait'] -- ['projects_locations-scans-analyze-packages.md', 'Projects', 'Locations Scans Analyze Packages'] -- ['projects_locations-scans-vulnerabilities-list.md', 'Projects', 'Locations Scans Vulnerabilities List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Operations Wait': 'projects_locations-operations-wait.md' + - 'Locations Scans Analyze Packages': 'projects_locations-scans-analyze-packages.md' + - 'Locations Scans Vulnerabilities List': 'projects_locations-scans-vulnerabilities-list.md' theme: readthedocs diff --git a/gen/ondemandscanning1-cli/src/client.rs b/gen/ondemandscanning1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/ondemandscanning1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/ondemandscanning1-cli/src/main.rs b/gen/ondemandscanning1-cli/src/main.rs index 33228ba1b2..ef77dcf719 100644 --- a/gen/ondemandscanning1-cli/src/main.rs +++ b/gen/ondemandscanning1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_ondemandscanning1::{api, Error, oauth2}; +use google_ondemandscanning1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -217,7 +216,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -276,7 +275,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "timeout" => { - call = call.timeout(value.unwrap_or("")); + call = call.timeout( value.map(|v| arg_from_str(v, err, "timeout", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -421,7 +420,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -744,7 +743,7 @@ async fn main() { let mut app = App::new("ondemandscanning1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230109") .about("A service to scan container images for vulnerabilities.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_ondemandscanning1_cli") .arg(Arg::with_name("url") diff --git a/gen/ondemandscanning1/Cargo.toml b/gen/ondemandscanning1/Cargo.toml index d31e046ace..10d90e108b 100644 --- a/gen/ondemandscanning1/Cargo.toml +++ b/gen/ondemandscanning1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-ondemandscanning1" -version = "5.0.2-beta-1+20230109" +version = "5.0.2+20230109" authors = ["Sebastian Thiel "] description = "A complete library to interact with On-Demand Scanning (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/ondemandscanning1" homepage = "https://cloud.google.com/container-analysis/docs/on-demand-scanning/" -documentation = "https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109" +documentation = "https://docs.rs/google-ondemandscanning1/5.0.2+20230109" license = "MIT" keywords = ["ondemandscanning", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/ondemandscanning1/README.md b/gen/ondemandscanning1/README.md index f51dd6b5cd..cda0914754 100644 --- a/gen/ondemandscanning1/README.md +++ b/gen/ondemandscanning1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-ondemandscanning1` library allows access to all features of the *Google On-Demand Scanning* service. -This documentation was generated from *On-Demand Scanning* crate version *5.0.2-beta-1+20230109*, where *20230109* is the exact revision of the *ondemandscanning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *On-Demand Scanning* crate version *5.0.2+20230109*, where *20230109* is the exact revision of the *ondemandscanning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *On-Demand Scanning* *v1* API can be found at the [official documentation site](https://cloud.google.com/container-analysis/docs/on-demand-scanning/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/OnDemandScanning) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/OnDemandScanning) ... * projects - * [*locations operations cancel*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/api::ProjectLocationOperationListCall), [*locations operations wait*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/api::ProjectLocationOperationWaitCall), [*locations scans analyze packages*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/api::ProjectLocationScanAnalyzePackageCall) and [*locations scans vulnerabilities list*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/api::ProjectLocationScanVulnerabilityListCall) + * [*locations operations cancel*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/api::ProjectLocationOperationListCall), [*locations operations wait*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/api::ProjectLocationOperationWaitCall), [*locations scans analyze packages*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/api::ProjectLocationScanAnalyzePackageCall) and [*locations scans vulnerabilities list*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/api::ProjectLocationScanVulnerabilityListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/OnDemandScanning)** +* **[Hub](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/OnDemandScanning)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::CallBuilder) -* **[Resources](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::CallBuilder) +* **[Resources](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Part)** + * **[Parts](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -121,17 +121,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -141,29 +141,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Delegate) to the -[Method Builder](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Delegate) to the +[Method Builder](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::RequestValue) and -[decodable](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::RequestValue) and +[decodable](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-ondemandscanning1/5.0.2-beta-1+20230109/google_ondemandscanning1/client::RequestValue) are moved +* [request values](https://docs.rs/google-ondemandscanning1/5.0.2+20230109/google_ondemandscanning1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/ondemandscanning1/src/api.rs b/gen/ondemandscanning1/src/api.rs index 83ce1ac805..3180459192 100644 --- a/gen/ondemandscanning1/src/api.rs +++ b/gen/ondemandscanning1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> OnDemandScanning { OnDemandScanning { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://ondemandscanning.googleapis.com/".to_string(), _root_url: "https://ondemandscanning.googleapis.com/".to_string(), } @@ -131,7 +131,7 @@ impl<'a, S> OnDemandScanning { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/ondemandscanning1/src/client.rs b/gen/ondemandscanning1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/ondemandscanning1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/ondemandscanning1/src/lib.rs b/gen/ondemandscanning1/src/lib.rs index 2f3248d74d..0b3b4ed87a 100644 --- a/gen/ondemandscanning1/src/lib.rs +++ b/gen/ondemandscanning1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *On-Demand Scanning* crate version *5.0.2-beta-1+20230109*, where *20230109* is the exact revision of the *ondemandscanning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *On-Demand Scanning* crate version *5.0.2+20230109*, where *20230109* is the exact revision of the *ondemandscanning:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *On-Demand Scanning* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/container-analysis/docs/on-demand-scanning/). diff --git a/gen/orgpolicy2-cli/Cargo.toml b/gen/orgpolicy2-cli/Cargo.toml index bae7ba6a2b..9f701e6844 100644 --- a/gen/orgpolicy2-cli/Cargo.toml +++ b/gen/orgpolicy2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-orgpolicy2-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with OrgPolicy API (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/orgpolicy2-cli" @@ -20,13 +20,13 @@ name = "orgpolicy2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-orgpolicy2] path = "../orgpolicy2" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/orgpolicy2-cli/README.md b/gen/orgpolicy2-cli/README.md index b9c01bb1cc..84ad5da5be 100644 --- a/gen/orgpolicy2-cli/README.md +++ b/gen/orgpolicy2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *OrgPolicy API* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *OrgPolicy API* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash orgpolicy2 [options] @@ -39,6 +39,11 @@ orgpolicy2 [options] policies-patch (-r )... [-p ]... [-o ] organizations constraints-list [-p ]... [-o ] + custom-constraints-create (-r )... [-p ]... [-o ] + custom-constraints-delete [-p ]... [-o ] + custom-constraints-get [-p ]... [-o ] + custom-constraints-list [-p ]... [-o ] + custom-constraints-patch (-r )... [-p ]... [-o ] policies-create (-r )... [-p ]... [-o ] policies-delete [-p ]... [-o ] policies-get [-p ]... [-o ] diff --git a/gen/orgpolicy2-cli/mkdocs.yml b/gen/orgpolicy2-cli/mkdocs.yml index 6aaa0efb12..3fdacd9cc3 100644 --- a/gen/orgpolicy2-cli/mkdocs.yml +++ b/gen/orgpolicy2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: OrgPolicy API v4.0.1+20220305 +site_name: OrgPolicy API v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-orgpolicy2-cli site_description: A complete library to interact with OrgPolicy API (protocol v2) @@ -7,29 +7,37 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/orgpolicy2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_constraints-list.md', 'Folders', 'Constraints List'] -- ['folders_policies-create.md', 'Folders', 'Policies Create'] -- ['folders_policies-delete.md', 'Folders', 'Policies Delete'] -- ['folders_policies-get.md', 'Folders', 'Policies Get'] -- ['folders_policies-get-effective-policy.md', 'Folders', 'Policies Get Effective Policy'] -- ['folders_policies-list.md', 'Folders', 'Policies List'] -- ['folders_policies-patch.md', 'Folders', 'Policies Patch'] -- ['organizations_constraints-list.md', 'Organizations', 'Constraints List'] -- ['organizations_policies-create.md', 'Organizations', 'Policies Create'] -- ['organizations_policies-delete.md', 'Organizations', 'Policies Delete'] -- ['organizations_policies-get.md', 'Organizations', 'Policies Get'] -- ['organizations_policies-get-effective-policy.md', 'Organizations', 'Policies Get Effective Policy'] -- ['organizations_policies-list.md', 'Organizations', 'Policies List'] -- ['organizations_policies-patch.md', 'Organizations', 'Policies Patch'] -- ['projects_constraints-list.md', 'Projects', 'Constraints List'] -- ['projects_policies-create.md', 'Projects', 'Policies Create'] -- ['projects_policies-delete.md', 'Projects', 'Policies Delete'] -- ['projects_policies-get.md', 'Projects', 'Policies Get'] -- ['projects_policies-get-effective-policy.md', 'Projects', 'Policies Get Effective Policy'] -- ['projects_policies-list.md', 'Projects', 'Policies List'] -- ['projects_policies-patch.md', 'Projects', 'Policies Patch'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Constraints List': 'folders_constraints-list.md' + - 'Policies Create': 'folders_policies-create.md' + - 'Policies Delete': 'folders_policies-delete.md' + - 'Policies Get': 'folders_policies-get.md' + - 'Policies Get Effective Policy': 'folders_policies-get-effective-policy.md' + - 'Policies List': 'folders_policies-list.md' + - 'Policies Patch': 'folders_policies-patch.md' +- 'Organizations': + - 'Constraints List': 'organizations_constraints-list.md' + - 'Custom Constraints Create': 'organizations_custom-constraints-create.md' + - 'Custom Constraints Delete': 'organizations_custom-constraints-delete.md' + - 'Custom Constraints Get': 'organizations_custom-constraints-get.md' + - 'Custom Constraints List': 'organizations_custom-constraints-list.md' + - 'Custom Constraints Patch': 'organizations_custom-constraints-patch.md' + - 'Policies Create': 'organizations_policies-create.md' + - 'Policies Delete': 'organizations_policies-delete.md' + - 'Policies Get': 'organizations_policies-get.md' + - 'Policies Get Effective Policy': 'organizations_policies-get-effective-policy.md' + - 'Policies List': 'organizations_policies-list.md' + - 'Policies Patch': 'organizations_policies-patch.md' +- 'Projects': + - 'Constraints List': 'projects_constraints-list.md' + - 'Policies Create': 'projects_policies-create.md' + - 'Policies Delete': 'projects_policies-delete.md' + - 'Policies Get': 'projects_policies-get.md' + - 'Policies Get Effective Policy': 'projects_policies-get-effective-policy.md' + - 'Policies List': 'projects_policies-list.md' + - 'Policies Patch': 'projects_policies-patch.md' theme: readthedocs diff --git a/gen/orgpolicy2-cli/src/client.rs b/gen/orgpolicy2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/orgpolicy2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/orgpolicy2-cli/src/main.rs b/gen/orgpolicy2-cli/src/main.rs index 194e83f100..c7491a0266 100644 --- a/gen/orgpolicy2-cli/src/main.rs +++ b/gen/orgpolicy2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_orgpolicy2::{api, Error, oauth2}; +use google_orgpolicy2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -138,13 +137,17 @@ where "alternate.spec.inherit-from-parent" => Some(("alternate.spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.reset" => Some(("alternate.spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.update-time" => Some(("alternate.spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.etag" => Some(("dryRunSpec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.inherit-from-parent" => Some(("dryRunSpec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.reset" => Some(("dryRunSpec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.update-time" => Some(("dryRunSpec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.etag" => Some(("spec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.inherit-from-parent" => Some(("spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.reset" => Some(("spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.update-time" => Some(("spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "dry-run-spec", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -370,7 +373,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -447,13 +450,17 @@ where "alternate.spec.inherit-from-parent" => Some(("alternate.spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.reset" => Some(("alternate.spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.update-time" => Some(("alternate.spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.etag" => Some(("dryRunSpec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.inherit-from-parent" => Some(("dryRunSpec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.reset" => Some(("dryRunSpec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.update-time" => Some(("dryRunSpec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.etag" => Some(("spec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.inherit-from-parent" => Some(("spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.reset" => Some(("spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.update-time" => Some(("spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "dry-run-spec", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -464,6 +471,161 @@ where } let mut request: api::GoogleCloudOrgpolicyV2Policy = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().policies_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_constraints_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().constraints_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_custom_constraints_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "action-type" => Some(("actionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "condition" => Some(("condition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "method-types" => Some(("methodTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-types" => Some(("resourceTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["action-type", "condition", "description", "display-name", "method-types", "name", "resource-types", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudOrgpolicyV2CustomConstraint = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().custom_constraints_create(request, opt.value_of("parent").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -513,9 +675,113 @@ where } } - async fn _organizations_constraints_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _organizations_custom_constraints_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { - let mut call = self.hub.organizations().constraints_list(opt.value_of("parent").unwrap_or("")); + let mut call = self.hub.organizations().custom_constraints_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_custom_constraints_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().custom_constraints_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_custom_constraints_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().custom_constraints_list(opt.value_of("parent").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -523,7 +789,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -572,6 +838,98 @@ where } } + async fn _organizations_custom_constraints_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "action-type" => Some(("actionType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "condition" => Some(("condition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "method-types" => Some(("methodTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "resource-types" => Some(("resourceTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["action-type", "condition", "description", "display-name", "method-types", "name", "resource-types", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudOrgpolicyV2CustomConstraint = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().custom_constraints_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_policies_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -600,13 +958,17 @@ where "alternate.spec.inherit-from-parent" => Some(("alternate.spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.reset" => Some(("alternate.spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.update-time" => Some(("alternate.spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.etag" => Some(("dryRunSpec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.inherit-from-parent" => Some(("dryRunSpec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.reset" => Some(("dryRunSpec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.update-time" => Some(("dryRunSpec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.etag" => Some(("spec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.inherit-from-parent" => Some(("spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.reset" => Some(("spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.update-time" => Some(("spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "dry-run-spec", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -832,7 +1194,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -909,13 +1271,17 @@ where "alternate.spec.inherit-from-parent" => Some(("alternate.spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.reset" => Some(("alternate.spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.update-time" => Some(("alternate.spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.etag" => Some(("dryRunSpec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.inherit-from-parent" => Some(("dryRunSpec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.reset" => Some(("dryRunSpec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.update-time" => Some(("dryRunSpec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.etag" => Some(("spec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.inherit-from-parent" => Some(("spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.reset" => Some(("spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.update-time" => Some(("spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "dry-run-spec", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -929,6 +1295,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, _ => { let mut found = false; for param in &self.gp { @@ -942,6 +1311,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); v } )); } } @@ -985,7 +1355,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1062,13 +1432,17 @@ where "alternate.spec.inherit-from-parent" => Some(("alternate.spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.reset" => Some(("alternate.spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.update-time" => Some(("alternate.spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.etag" => Some(("dryRunSpec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.inherit-from-parent" => Some(("dryRunSpec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.reset" => Some(("dryRunSpec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.update-time" => Some(("dryRunSpec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.etag" => Some(("spec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.inherit-from-parent" => Some(("spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.reset" => Some(("spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.update-time" => Some(("spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "dry-run-spec", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1294,7 +1668,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1371,13 +1745,17 @@ where "alternate.spec.inherit-from-parent" => Some(("alternate.spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.reset" => Some(("alternate.spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "alternate.spec.update-time" => Some(("alternate.spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.etag" => Some(("dryRunSpec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dry-run-spec.inherit-from-parent" => Some(("dryRunSpec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.reset" => Some(("dryRunSpec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "dry-run-spec.update-time" => Some(("dryRunSpec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.etag" => Some(("spec.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.inherit-from-parent" => Some(("spec.inheritFromParent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.reset" => Some(("spec.reset", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "spec.update-time" => Some(("spec.updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternate", "dry-run-spec", "etag", "inherit-from-parent", "launch", "name", "reset", "spec", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1391,6 +1769,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, _ => { let mut found = false; for param in &self.gp { @@ -1404,6 +1785,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); v } )); } } @@ -1476,6 +1858,21 @@ where ("constraints-list", Some(opt)) => { call_result = self._organizations_constraints_list(opt, dry_run, &mut err).await; }, + ("custom-constraints-create", Some(opt)) => { + call_result = self._organizations_custom_constraints_create(opt, dry_run, &mut err).await; + }, + ("custom-constraints-delete", Some(opt)) => { + call_result = self._organizations_custom_constraints_delete(opt, dry_run, &mut err).await; + }, + ("custom-constraints-get", Some(opt)) => { + call_result = self._organizations_custom_constraints_get(opt, dry_run, &mut err).await; + }, + ("custom-constraints-list", Some(opt)) => { + call_result = self._organizations_custom_constraints_list(opt, dry_run, &mut err).await; + }, + ("custom-constraints-patch", Some(opt)) => { + call_result = self._organizations_custom_constraints_patch(opt, dry_run, &mut err).await; + }, ("policies-create", Some(opt)) => { call_result = self._organizations_policies_create(opt, dry_run, &mut err).await; }, @@ -1771,7 +2168,7 @@ async fn main() { ]), ]), - ("organizations", "methods: 'constraints-list', 'policies-create', 'policies-delete', 'policies-get', 'policies-get-effective-policy', 'policies-list' and 'policies-patch'", vec![ + ("organizations", "methods: 'constraints-list', 'custom-constraints-create', 'custom-constraints-delete', 'custom-constraints-get', 'custom-constraints-list', 'custom-constraints-patch', 'policies-create', 'policies-delete', 'policies-get', 'policies-get-effective-policy', 'policies-list' and 'policies-patch'", vec![ ("constraints-list", Some(r##"Lists `Constraints` that could be applied on the specified resource."##), "Details at http://byron.github.io/google-apis-rs/google_orgpolicy2_cli/organizations_constraints-list", @@ -1788,6 +2185,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("custom-constraints-create", + Some(r##"Creates a CustomConstraint. Returns a `google.rpc.Status` with `google.rpc.Code.NOT_FOUND` if the organization does not exist. Returns a `google.rpc.Status` with `google.rpc.Code.ALREADY_EXISTS` if the constraint already exists on the given organization."##), + "Details at http://byron.github.io/google-apis-rs/google_orgpolicy2_cli/organizations_custom-constraints-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Must be in the following form: * `organizations/{organization_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("custom-constraints-delete", + Some(r##"Deletes a Custom Constraint. Returns a `google.rpc.Status` with `google.rpc.Code.NOT_FOUND` if the constraint does not exist."##), + "Details at http://byron.github.io/google-apis-rs/google_orgpolicy2_cli/organizations_custom-constraints-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the custom constraint to delete. See `CustomConstraint` for naming rules."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("custom-constraints-get", + Some(r##"Gets a CustomConstraint. Returns a `google.rpc.Status` with `google.rpc.Code.NOT_FOUND` if the CustomConstraint does not exist."##), + "Details at http://byron.github.io/google-apis-rs/google_orgpolicy2_cli/organizations_custom-constraints-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Resource name of the custom constraint. See `CustomConstraint` for naming requirements."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("custom-constraints-list", + Some(r##"Retrieves all of the `CustomConstraints` that exist on a particular organization resource."##), + "Details at http://byron.github.io/google-apis-rs/google_orgpolicy2_cli/organizations_custom-constraints-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The target Cloud resource that parents the set of custom constraints that will be returned from this call. Must be in one of the following forms: * `organizations/{organization_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("custom-constraints-patch", + Some(r##"Updates a Custom Constraint. Returns a `google.rpc.Status` with `google.rpc.Code.NOT_FOUND` if the constraint does not exist. Note: the supplied policy will perform a full overwrite of all fields."##), + "Details at http://byron.github.io/google-apis-rs/google_orgpolicy2_cli/organizations_custom-constraints-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Immutable. Name of the constraint. This is unique within the organization. Format of the name should be * `organizations/{organization_id}/customConstraints/{custom_constraint_id}` Example : "organizations/123/customConstraints/custom.createOnlyE2TypeVms" The max length is 70 characters and the min length is 1. Note that the prefix "organizations/{organization_id}/customConstraints/" is not counted."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2113,7 +2632,7 @@ async fn main() { let mut app = App::new("orgpolicy2") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("The Org Policy API allows users to configure governance rules on their GCP resources across the Cloud Resource Hierarchy.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_orgpolicy2_cli") .arg(Arg::with_name("url") diff --git a/gen/orgpolicy2/Cargo.toml b/gen/orgpolicy2/Cargo.toml index 4c595753b9..f5d1f8079c 100644 --- a/gen/orgpolicy2/Cargo.toml +++ b/gen/orgpolicy2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-orgpolicy2" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with OrgPolicy API (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/orgpolicy2" homepage = "https://cloud.google.com/orgpolicy/docs/reference/rest/index.html" -documentation = "https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-orgpolicy2/5.0.2+20230123" license = "MIT" keywords = ["orgpolicy", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/orgpolicy2/README.md b/gen/orgpolicy2/README.md index 0eafb2d1c8..1aef75c811 100644 --- a/gen/orgpolicy2/README.md +++ b/gen/orgpolicy2/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-orgpolicy2` library allows access to all features of the *Google OrgPolicy API* service. -This documentation was generated from *OrgPolicy API* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *orgpolicy:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *OrgPolicy API* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *orgpolicy:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *OrgPolicy API* *v2* API can be found at the [official documentation site](https://cloud.google.com/orgpolicy/docs/reference/rest/index.html). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/OrgPolicyAPI) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/OrgPolicyAPI) ... * folders - * [*constraints list*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::FolderConstraintListCall), [*policies create*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::FolderPolicyCreateCall), [*policies delete*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::FolderPolicyDeleteCall), [*policies get*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::FolderPolicyGetCall), [*policies get effective policy*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::FolderPolicyGetEffectivePolicyCall), [*policies list*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::FolderPolicyListCall) and [*policies patch*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::FolderPolicyPatchCall) + * [*constraints list*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::FolderConstraintListCall), [*policies create*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::FolderPolicyCreateCall), [*policies delete*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::FolderPolicyDeleteCall), [*policies get*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::FolderPolicyGetCall), [*policies get effective policy*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::FolderPolicyGetEffectivePolicyCall), [*policies list*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::FolderPolicyListCall) and [*policies patch*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::FolderPolicyPatchCall) * organizations - * [*constraints list*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationConstraintListCall), [*custom constraints create*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationCustomConstraintCreateCall), [*custom constraints delete*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationCustomConstraintDeleteCall), [*custom constraints get*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationCustomConstraintGetCall), [*custom constraints list*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationCustomConstraintListCall), [*custom constraints patch*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationCustomConstraintPatchCall), [*policies create*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationPolicyCreateCall), [*policies delete*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationPolicyDeleteCall), [*policies get*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationPolicyGetCall), [*policies get effective policy*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationPolicyGetEffectivePolicyCall), [*policies list*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationPolicyListCall) and [*policies patch*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::OrganizationPolicyPatchCall) + * [*constraints list*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationConstraintListCall), [*custom constraints create*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationCustomConstraintCreateCall), [*custom constraints delete*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationCustomConstraintDeleteCall), [*custom constraints get*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationCustomConstraintGetCall), [*custom constraints list*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationCustomConstraintListCall), [*custom constraints patch*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationCustomConstraintPatchCall), [*policies create*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationPolicyCreateCall), [*policies delete*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationPolicyDeleteCall), [*policies get*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationPolicyGetCall), [*policies get effective policy*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationPolicyGetEffectivePolicyCall), [*policies list*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationPolicyListCall) and [*policies patch*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::OrganizationPolicyPatchCall) * projects - * [*constraints list*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::ProjectConstraintListCall), [*policies create*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::ProjectPolicyCreateCall), [*policies delete*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::ProjectPolicyDeleteCall), [*policies get*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::ProjectPolicyGetCall), [*policies get effective policy*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::ProjectPolicyGetEffectivePolicyCall), [*policies list*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::ProjectPolicyListCall) and [*policies patch*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/api::ProjectPolicyPatchCall) + * [*constraints list*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::ProjectConstraintListCall), [*policies create*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::ProjectPolicyCreateCall), [*policies delete*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::ProjectPolicyDeleteCall), [*policies get*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::ProjectPolicyGetCall), [*policies get effective policy*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::ProjectPolicyGetEffectivePolicyCall), [*policies list*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::ProjectPolicyListCall) and [*policies patch*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/api::ProjectPolicyPatchCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/OrgPolicyAPI)** +* **[Hub](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/OrgPolicyAPI)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::CallBuilder) -* **[Resources](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::CallBuilder) +* **[Resources](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Part)** + * **[Parts](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -140,17 +140,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -160,29 +160,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Delegate) to the -[Method Builder](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Delegate) to the +[Method Builder](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::RequestValue) and -[decodable](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::RequestValue) and +[decodable](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-orgpolicy2/5.0.2-beta-1+20230123/google_orgpolicy2/client::RequestValue) are moved +* [request values](https://docs.rs/google-orgpolicy2/5.0.2+20230123/google_orgpolicy2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/orgpolicy2/src/api.rs b/gen/orgpolicy2/src/api.rs index 4686dbc397..f97158886f 100644 --- a/gen/orgpolicy2/src/api.rs +++ b/gen/orgpolicy2/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> OrgPolicyAPI { OrgPolicyAPI { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://orgpolicy.googleapis.com/".to_string(), _root_url: "https://orgpolicy.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> OrgPolicyAPI { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/orgpolicy2/src/client.rs b/gen/orgpolicy2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/orgpolicy2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/orgpolicy2/src/lib.rs b/gen/orgpolicy2/src/lib.rs index 1ce5cfe97c..c8f8981962 100644 --- a/gen/orgpolicy2/src/lib.rs +++ b/gen/orgpolicy2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *OrgPolicy API* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *orgpolicy:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *OrgPolicy API* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *orgpolicy:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *OrgPolicy API* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/orgpolicy/docs/reference/rest/index.html). diff --git a/gen/oslogin1-cli/Cargo.toml b/gen/oslogin1-cli/Cargo.toml index c8e10302fc..34e379f182 100644 --- a/gen/oslogin1-cli/Cargo.toml +++ b/gen/oslogin1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-oslogin1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud OS Login (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/oslogin1-cli" @@ -20,13 +20,13 @@ name = "oslogin1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-oslogin1] path = "../oslogin1" -version = "4.0.1+20220228" +version = "5.0.2+20230115" + diff --git a/gen/oslogin1-cli/README.md b/gen/oslogin1-cli/README.md index 95ad43126d..b1ba806dd4 100644 --- a/gen/oslogin1-cli/README.md +++ b/gen/oslogin1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud OS Login* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud OS Login* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash oslogin1 [options] diff --git a/gen/oslogin1-cli/mkdocs.yml b/gen/oslogin1-cli/mkdocs.yml index ee737df8db..1bb3783126 100644 --- a/gen/oslogin1-cli/mkdocs.yml +++ b/gen/oslogin1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud OS Login v4.0.1+20220228 +site_name: Cloud OS Login v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-oslogin1-cli site_description: A complete library to interact with Cloud OS Login (protocol v1) @@ -7,15 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/oslogin1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['users_get-login-profile.md', 'Users', 'Get Login Profile'] -- ['users_import-ssh-public-key.md', 'Users', 'Import Ssh Public Key'] -- ['users_projects-delete.md', 'Users', 'Projects Delete'] -- ['users_ssh-public-keys-create.md', 'Users', 'Ssh Public Keys Create'] -- ['users_ssh-public-keys-delete.md', 'Users', 'Ssh Public Keys Delete'] -- ['users_ssh-public-keys-get.md', 'Users', 'Ssh Public Keys Get'] -- ['users_ssh-public-keys-patch.md', 'Users', 'Ssh Public Keys Patch'] +nav: +- Home: 'index.md' +- 'Users': + - 'Get Login Profile': 'users_get-login-profile.md' + - 'Import Ssh Public Key': 'users_import-ssh-public-key.md' + - 'Projects Delete': 'users_projects-delete.md' + - 'Ssh Public Keys Create': 'users_ssh-public-keys-create.md' + - 'Ssh Public Keys Delete': 'users_ssh-public-keys-delete.md' + - 'Ssh Public Keys Get': 'users_ssh-public-keys-get.md' + - 'Ssh Public Keys Patch': 'users_ssh-public-keys-patch.md' theme: readthedocs diff --git a/gen/oslogin1-cli/src/client.rs b/gen/oslogin1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/oslogin1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/oslogin1-cli/src/main.rs b/gen/oslogin1-cli/src/main.rs index 14d4cd216e..ffc4edc3b3 100644 --- a/gen/oslogin1-cli/src/main.rs +++ b/gen/oslogin1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_oslogin1::{api, Error, oauth2}; +use google_oslogin1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -489,7 +488,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -824,7 +823,7 @@ async fn main() { let mut app = App::new("oslogin1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230115") .about("You can use OS Login to manage access to your VM instances using IAM roles.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_oslogin1_cli") .arg(Arg::with_name("url") diff --git a/gen/oslogin1/Cargo.toml b/gen/oslogin1/Cargo.toml index 7bb514e0e6..fed105af35 100644 --- a/gen/oslogin1/Cargo.toml +++ b/gen/oslogin1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-oslogin1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud OS Login (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/oslogin1" homepage = "https://cloud.google.com/compute/docs/oslogin/" -documentation = "https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-oslogin1/5.0.2+20230115" license = "MIT" keywords = ["oslogin", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/oslogin1/README.md b/gen/oslogin1/README.md index f061294f1c..f93300decb 100644 --- a/gen/oslogin1/README.md +++ b/gen/oslogin1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-oslogin1` library allows access to all features of the *Google Cloud OS Login* service. -This documentation was generated from *Cloud OS Login* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *oslogin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud OS Login* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *oslogin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud OS Login* *v1* API can be found at the [official documentation site](https://cloud.google.com/compute/docs/oslogin/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/CloudOSLogin) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/CloudOSLogin) ... * users - * [*get login profile*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/api::UserGetLoginProfileCall), [*import ssh public key*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/api::UserImportSshPublicKeyCall), [*projects delete*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/api::UserProjectDeleteCall), [*ssh public keys create*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/api::UserSshPublicKeyCreateCall), [*ssh public keys delete*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/api::UserSshPublicKeyDeleteCall), [*ssh public keys get*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/api::UserSshPublicKeyGetCall) and [*ssh public keys patch*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/api::UserSshPublicKeyPatchCall) + * [*get login profile*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/api::UserGetLoginProfileCall), [*import ssh public key*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/api::UserImportSshPublicKeyCall), [*projects delete*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/api::UserProjectDeleteCall), [*ssh public keys create*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/api::UserSshPublicKeyCreateCall), [*ssh public keys delete*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/api::UserSshPublicKeyDeleteCall), [*ssh public keys get*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/api::UserSshPublicKeyGetCall) and [*ssh public keys patch*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/api::UserSshPublicKeyPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/CloudOSLogin)** +* **[Hub](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/CloudOSLogin)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::CallBuilder) -* **[Resources](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::CallBuilder) +* **[Resources](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Part)** + * **[Parts](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Delegate) to the -[Method Builder](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Delegate) to the +[Method Builder](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::RequestValue) and -[decodable](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::RequestValue) and +[decodable](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-oslogin1/5.0.2-beta-1+20230115/google_oslogin1/client::RequestValue) are moved +* [request values](https://docs.rs/google-oslogin1/5.0.2+20230115/google_oslogin1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/oslogin1/src/api.rs b/gen/oslogin1/src/api.rs index e77a05120d..56c7ef6441 100644 --- a/gen/oslogin1/src/api.rs +++ b/gen/oslogin1/src/api.rs @@ -138,7 +138,7 @@ impl<'a, S> CloudOSLogin { CloudOSLogin { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://oslogin.googleapis.com/".to_string(), _root_url: "https://oslogin.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> CloudOSLogin { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/oslogin1/src/client.rs b/gen/oslogin1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/oslogin1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/oslogin1/src/lib.rs b/gen/oslogin1/src/lib.rs index 3af72056bb..024cc1c7b1 100644 --- a/gen/oslogin1/src/lib.rs +++ b/gen/oslogin1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud OS Login* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *oslogin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud OS Login* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *oslogin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud OS Login* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/compute/docs/oslogin/). diff --git a/gen/oslogin1_beta-cli/Cargo.toml b/gen/oslogin1_beta-cli/Cargo.toml index f492a50ec4..bb0574e2ec 100644 --- a/gen/oslogin1_beta-cli/Cargo.toml +++ b/gen/oslogin1_beta-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-oslogin1_beta-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud OS Login (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/oslogin1_beta-cli" @@ -20,13 +20,13 @@ name = "oslogin1-beta" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-oslogin1_beta] path = "../oslogin1_beta" -version = "4.0.1+20220228" +version = "5.0.2+20230115" + diff --git a/gen/oslogin1_beta-cli/README.md b/gen/oslogin1_beta-cli/README.md index 1e5bb40846..b9cfc4c3be 100644 --- a/gen/oslogin1_beta-cli/README.md +++ b/gen/oslogin1_beta-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud OS Login* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud OS Login* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash oslogin1-beta [options] diff --git a/gen/oslogin1_beta-cli/mkdocs.yml b/gen/oslogin1_beta-cli/mkdocs.yml index 7b967de77c..a32b59d824 100644 --- a/gen/oslogin1_beta-cli/mkdocs.yml +++ b/gen/oslogin1_beta-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud OS Login v4.0.1+20220228 +site_name: Cloud OS Login v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-oslogin1_beta-cli site_description: A complete library to interact with Cloud OS Login (protocol v1beta) @@ -7,15 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/oslogin1_beta-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['users_get-login-profile.md', 'Users', 'Get Login Profile'] -- ['users_import-ssh-public-key.md', 'Users', 'Import Ssh Public Key'] -- ['users_projects-delete.md', 'Users', 'Projects Delete'] -- ['users_ssh-public-keys-create.md', 'Users', 'Ssh Public Keys Create'] -- ['users_ssh-public-keys-delete.md', 'Users', 'Ssh Public Keys Delete'] -- ['users_ssh-public-keys-get.md', 'Users', 'Ssh Public Keys Get'] -- ['users_ssh-public-keys-patch.md', 'Users', 'Ssh Public Keys Patch'] +nav: +- Home: 'index.md' +- 'Users': + - 'Get Login Profile': 'users_get-login-profile.md' + - 'Import Ssh Public Key': 'users_import-ssh-public-key.md' + - 'Projects Delete': 'users_projects-delete.md' + - 'Ssh Public Keys Create': 'users_ssh-public-keys-create.md' + - 'Ssh Public Keys Delete': 'users_ssh-public-keys-delete.md' + - 'Ssh Public Keys Get': 'users_ssh-public-keys-get.md' + - 'Ssh Public Keys Patch': 'users_ssh-public-keys-patch.md' theme: readthedocs diff --git a/gen/oslogin1_beta-cli/src/client.rs b/gen/oslogin1_beta-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/oslogin1_beta-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/oslogin1_beta-cli/src/main.rs b/gen/oslogin1_beta-cli/src/main.rs index 5ca356c780..25e1aac06b 100644 --- a/gen/oslogin1_beta-cli/src/main.rs +++ b/gen/oslogin1_beta-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_oslogin1_beta::{api, Error, oauth2}; +use google_oslogin1_beta::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -495,7 +494,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -830,7 +829,7 @@ async fn main() { let mut app = App::new("oslogin1-beta") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230115") .about("You can use OS Login to manage access to your VM instances using IAM roles.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_oslogin1_beta_cli") .arg(Arg::with_name("url") diff --git a/gen/oslogin1_beta/Cargo.toml b/gen/oslogin1_beta/Cargo.toml index d81719527c..382637e792 100644 --- a/gen/oslogin1_beta/Cargo.toml +++ b/gen/oslogin1_beta/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-oslogin1_beta" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud OS Login (protocol v1beta)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/oslogin1_beta" homepage = "https://cloud.google.com/compute/docs/oslogin/" -documentation = "https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-oslogin1_beta/5.0.2+20230115" license = "MIT" keywords = ["oslogin", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/oslogin1_beta/README.md b/gen/oslogin1_beta/README.md index a2500ce084..cf4e7f28de 100644 --- a/gen/oslogin1_beta/README.md +++ b/gen/oslogin1_beta/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-oslogin1_beta` library allows access to all features of the *Google Cloud OS Login* service. -This documentation was generated from *Cloud OS Login* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *oslogin:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud OS Login* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *oslogin:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud OS Login* *v1_beta* API can be found at the [official documentation site](https://cloud.google.com/compute/docs/oslogin/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/CloudOSLogin) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/CloudOSLogin) ... * users - * [*get login profile*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/api::UserGetLoginProfileCall), [*import ssh public key*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/api::UserImportSshPublicKeyCall), [*projects delete*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/api::UserProjectDeleteCall), [*ssh public keys create*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/api::UserSshPublicKeyCreateCall), [*ssh public keys delete*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/api::UserSshPublicKeyDeleteCall), [*ssh public keys get*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/api::UserSshPublicKeyGetCall) and [*ssh public keys patch*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/api::UserSshPublicKeyPatchCall) + * [*get login profile*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/api::UserGetLoginProfileCall), [*import ssh public key*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/api::UserImportSshPublicKeyCall), [*projects delete*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/api::UserProjectDeleteCall), [*ssh public keys create*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/api::UserSshPublicKeyCreateCall), [*ssh public keys delete*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/api::UserSshPublicKeyDeleteCall), [*ssh public keys get*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/api::UserSshPublicKeyGetCall) and [*ssh public keys patch*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/api::UserSshPublicKeyPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/CloudOSLogin)** +* **[Hub](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/CloudOSLogin)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::CallBuilder) -* **[Resources](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::CallBuilder) +* **[Resources](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Part)** + * **[Parts](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::CallBuilder)** +* **[Activities](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Delegate) to the -[Method Builder](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Delegate) to the +[Method Builder](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::RequestValue) and -[decodable](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::RequestValue) and +[decodable](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-oslogin1_beta/5.0.2-beta-1+20230115/google_oslogin1_beta/client::RequestValue) are moved +* [request values](https://docs.rs/google-oslogin1_beta/5.0.2+20230115/google_oslogin1_beta/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/oslogin1_beta/src/api.rs b/gen/oslogin1_beta/src/api.rs index e875b287e0..6eb02b28a7 100644 --- a/gen/oslogin1_beta/src/api.rs +++ b/gen/oslogin1_beta/src/api.rs @@ -139,7 +139,7 @@ impl<'a, S> CloudOSLogin { CloudOSLogin { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://oslogin.googleapis.com/".to_string(), _root_url: "https://oslogin.googleapis.com/".to_string(), } @@ -150,7 +150,7 @@ impl<'a, S> CloudOSLogin { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/oslogin1_beta/src/client.rs b/gen/oslogin1_beta/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/oslogin1_beta/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/oslogin1_beta/src/lib.rs b/gen/oslogin1_beta/src/lib.rs index 8ef655d282..6276087cd5 100644 --- a/gen/oslogin1_beta/src/lib.rs +++ b/gen/oslogin1_beta/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud OS Login* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *oslogin:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud OS Login* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *oslogin:v1beta* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud OS Login* *v1_beta* API can be found at the //! [official documentation site](https://cloud.google.com/compute/docs/oslogin/). diff --git a/gen/pagespeedonline2-cli/Cargo.toml b/gen/pagespeedonline2-cli/Cargo.toml index 3093157ba2..b100eaf5b3 100644 --- a/gen/pagespeedonline2-cli/Cargo.toml +++ b/gen/pagespeedonline2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-pagespeedonline2-cli" -version = "4.0.1+20191206" +version = "5.0.2+20191206" authors = ["Sebastian Thiel "] description = "A complete library to interact with pagespeedonline (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline2-cli" @@ -20,13 +20,13 @@ name = "pagespeedonline2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-pagespeedonline2] path = "../pagespeedonline2" -version = "4.0.1+20191206" +version = "5.0.2+20191206" + diff --git a/gen/pagespeedonline2-cli/README.md b/gen/pagespeedonline2-cli/README.md index d099b0fa83..cf7cb4bdba 100644 --- a/gen/pagespeedonline2-cli/README.md +++ b/gen/pagespeedonline2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *pagespeedonline* API at revision *20191206*. The CLI is at version *4.0.1*. +This documentation was generated from the *pagespeedonline* API at revision *20191206*. The CLI is at version *5.0.2*. ```bash pagespeedonline2 [options] diff --git a/gen/pagespeedonline2-cli/mkdocs.yml b/gen/pagespeedonline2-cli/mkdocs.yml index 0a34652809..4be6292d0f 100644 --- a/gen/pagespeedonline2-cli/mkdocs.yml +++ b/gen/pagespeedonline2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: pagespeedonline v4.0.1+20191206 +site_name: pagespeedonline v5.0.2+20191206 site_url: http://byron.github.io/google-apis-rs/google-pagespeedonline2-cli site_description: A complete library to interact with pagespeedonline (protocol v2) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline2 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['pagespeedapi_runpagespeed.md', 'Pagespeedapi', 'Runpagespeed'] +nav: +- Home: 'index.md' +- 'Pagespeedapi': + - 'Runpagespeed': 'pagespeedapi_runpagespeed.md' theme: readthedocs diff --git a/gen/pagespeedonline2-cli/src/client.rs b/gen/pagespeedonline2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/pagespeedonline2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/pagespeedonline2-cli/src/main.rs b/gen/pagespeedonline2-cli/src/main.rs index bfad71e490..6a45307fde 100644 --- a/gen/pagespeedonline2-cli/src/main.rs +++ b/gen/pagespeedonline2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_pagespeedonline2::{api, Error, oauth2}; +use google_pagespeedonline2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.strategy(value.unwrap_or("")); }, "screenshot" => { - call = call.screenshot(arg_from_str(value.unwrap_or("false"), err, "screenshot", "boolean")); + call = call.screenshot( value.map(|v| arg_from_str(v, err, "screenshot", "boolean")).unwrap_or(false)); }, "rule" => { call = call.add_rule(value.unwrap_or("")); @@ -70,7 +69,7 @@ where call = call.locale(value.unwrap_or("")); }, "filter-third-party-resources" => { - call = call.filter_third_party_resources(arg_from_str(value.unwrap_or("false"), err, "filter-third-party-resources", "boolean")); + call = call.filter_third_party_resources( value.map(|v| arg_from_str(v, err, "filter-third-party-resources", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -231,7 +230,7 @@ async fn main() { let mut app = App::new("pagespeedonline2") .author("Sebastian Thiel ") - .version("4.0.1+20191206") + .version("5.0.2+20191206") .about("Analyzes the performance of a web page and provides tailored suggestions to make that page faster.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_pagespeedonline2_cli") .arg(Arg::with_name("folder") diff --git a/gen/pagespeedonline2/Cargo.toml b/gen/pagespeedonline2/Cargo.toml index 70aac8d621..4fceec70f0 100644 --- a/gen/pagespeedonline2/Cargo.toml +++ b/gen/pagespeedonline2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-pagespeedonline2" -version = "5.0.2-beta-1+20191206" +version = "5.0.2+20191206" authors = ["Sebastian Thiel "] description = "A complete library to interact with pagespeedonline (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline2" homepage = "https://developers.google.com/speed/docs/insights/v2/getting-started" -documentation = "https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206" +documentation = "https://docs.rs/google-pagespeedonline2/5.0.2+20191206" license = "MIT" keywords = ["pagespeedonline", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/pagespeedonline2/README.md b/gen/pagespeedonline2/README.md index 8a396156f3..ee7e236962 100644 --- a/gen/pagespeedonline2/README.md +++ b/gen/pagespeedonline2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-pagespeedonline2` library allows access to all features of the *Google pagespeedonline* service. -This documentation was generated from *pagespeedonline* crate version *5.0.2-beta-1+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *pagespeedonline* crate version *5.0.2+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *pagespeedonline* *v2* API can be found at the [official documentation site](https://developers.google.com/speed/docs/insights/v2/getting-started). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/Pagespeedonline) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/Pagespeedonline) ... * pagespeedapi - * [*runpagespeed*](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/api::PagespeedapiRunpagespeedCall) + * [*runpagespeed*](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/api::PagespeedapiRunpagespeedCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/Pagespeedonline)** +* **[Hub](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/Pagespeedonline)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::CallBuilder) -* **[Resources](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::CallBuilder) +* **[Resources](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Part)** + * **[Parts](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -123,17 +123,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -143,29 +143,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Delegate) to the -[Method Builder](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Delegate) to the +[Method Builder](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::RequestValue) and -[decodable](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::RequestValue) and +[decodable](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-pagespeedonline2/5.0.2-beta-1+20191206/google_pagespeedonline2/client::RequestValue) are moved +* [request values](https://docs.rs/google-pagespeedonline2/5.0.2+20191206/google_pagespeedonline2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/pagespeedonline2/src/api.rs b/gen/pagespeedonline2/src/api.rs index 742267a646..ee923bd758 100644 --- a/gen/pagespeedonline2/src/api.rs +++ b/gen/pagespeedonline2/src/api.rs @@ -102,7 +102,7 @@ impl<'a, S> Pagespeedonline { Pagespeedonline { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/pagespeedonline/v2/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -113,7 +113,7 @@ impl<'a, S> Pagespeedonline { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/pagespeedonline2/src/client.rs b/gen/pagespeedonline2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/pagespeedonline2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/pagespeedonline2/src/lib.rs b/gen/pagespeedonline2/src/lib.rs index 77dfbb01a0..1fa3951d6d 100644 --- a/gen/pagespeedonline2/src/lib.rs +++ b/gen/pagespeedonline2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *pagespeedonline* crate version *5.0.2-beta-1+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *pagespeedonline* crate version *5.0.2+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *pagespeedonline* *v2* API can be found at the //! [official documentation site](https://developers.google.com/speed/docs/insights/v2/getting-started). diff --git a/gen/pagespeedonline4-cli/Cargo.toml b/gen/pagespeedonline4-cli/Cargo.toml index 25aee44f54..f8c6d9c752 100644 --- a/gen/pagespeedonline4-cli/Cargo.toml +++ b/gen/pagespeedonline4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-pagespeedonline4-cli" -version = "4.0.1+20191206" +version = "5.0.2+20191206" authors = ["Sebastian Thiel "] description = "A complete library to interact with pagespeedonline (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline4-cli" @@ -20,13 +20,13 @@ name = "pagespeedonline4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-pagespeedonline4] path = "../pagespeedonline4" -version = "4.0.1+20191206" +version = "5.0.2+20191206" + diff --git a/gen/pagespeedonline4-cli/README.md b/gen/pagespeedonline4-cli/README.md index e863dd14b2..52ed01033e 100644 --- a/gen/pagespeedonline4-cli/README.md +++ b/gen/pagespeedonline4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *pagespeedonline* API at revision *20191206*. The CLI is at version *4.0.1*. +This documentation was generated from the *pagespeedonline* API at revision *20191206*. The CLI is at version *5.0.2*. ```bash pagespeedonline4 [options] diff --git a/gen/pagespeedonline4-cli/mkdocs.yml b/gen/pagespeedonline4-cli/mkdocs.yml index b767e0372d..d3d81dc6c5 100644 --- a/gen/pagespeedonline4-cli/mkdocs.yml +++ b/gen/pagespeedonline4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: pagespeedonline v4.0.1+20191206 +site_name: pagespeedonline v5.0.2+20191206 site_url: http://byron.github.io/google-apis-rs/google-pagespeedonline4-cli site_description: A complete library to interact with pagespeedonline (protocol v4) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline4 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['pagespeedapi_runpagespeed.md', 'Pagespeedapi', 'Runpagespeed'] +nav: +- Home: 'index.md' +- 'Pagespeedapi': + - 'Runpagespeed': 'pagespeedapi_runpagespeed.md' theme: readthedocs diff --git a/gen/pagespeedonline4-cli/src/client.rs b/gen/pagespeedonline4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/pagespeedonline4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/pagespeedonline4-cli/src/main.rs b/gen/pagespeedonline4-cli/src/main.rs index 8688553df0..f1bf63e0dd 100644 --- a/gen/pagespeedonline4-cli/src/main.rs +++ b/gen/pagespeedonline4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_pagespeedonline4::{api, Error, oauth2}; +use google_pagespeedonline4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -67,10 +66,10 @@ where call = call.strategy(value.unwrap_or("")); }, "snapshots" => { - call = call.snapshots(arg_from_str(value.unwrap_or("false"), err, "snapshots", "boolean")); + call = call.snapshots( value.map(|v| arg_from_str(v, err, "snapshots", "boolean")).unwrap_or(false)); }, "screenshot" => { - call = call.screenshot(arg_from_str(value.unwrap_or("false"), err, "screenshot", "boolean")); + call = call.screenshot( value.map(|v| arg_from_str(v, err, "screenshot", "boolean")).unwrap_or(false)); }, "rule" => { call = call.add_rule(value.unwrap_or("")); @@ -79,7 +78,7 @@ where call = call.locale(value.unwrap_or("")); }, "filter-third-party-resources" => { - call = call.filter_third_party_resources(arg_from_str(value.unwrap_or("false"), err, "filter-third-party-resources", "boolean")); + call = call.filter_third_party_resources( value.map(|v| arg_from_str(v, err, "filter-third-party-resources", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -240,7 +239,7 @@ async fn main() { let mut app = App::new("pagespeedonline4") .author("Sebastian Thiel ") - .version("4.0.1+20191206") + .version("5.0.2+20191206") .about("Analyzes the performance of a web page and provides tailored suggestions to make that page faster.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_pagespeedonline4_cli") .arg(Arg::with_name("folder") diff --git a/gen/pagespeedonline4/Cargo.toml b/gen/pagespeedonline4/Cargo.toml index c3063b0f94..d1f9ff305c 100644 --- a/gen/pagespeedonline4/Cargo.toml +++ b/gen/pagespeedonline4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-pagespeedonline4" -version = "5.0.2-beta-1+20191206" +version = "5.0.2+20191206" authors = ["Sebastian Thiel "] description = "A complete library to interact with pagespeedonline (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline4" homepage = "https://developers.google.com/speed/docs/insights/v4/getting-started" -documentation = "https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206" +documentation = "https://docs.rs/google-pagespeedonline4/5.0.2+20191206" license = "MIT" keywords = ["pagespeedonline", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/pagespeedonline4/README.md b/gen/pagespeedonline4/README.md index 77e16acbab..f21a638f90 100644 --- a/gen/pagespeedonline4/README.md +++ b/gen/pagespeedonline4/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-pagespeedonline4` library allows access to all features of the *Google pagespeedonline* service. -This documentation was generated from *pagespeedonline* crate version *5.0.2-beta-1+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *pagespeedonline* crate version *5.0.2+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *pagespeedonline* *v4* API can be found at the [official documentation site](https://developers.google.com/speed/docs/insights/v4/getting-started). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/Pagespeedonline) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/Pagespeedonline) ... * pagespeedapi - * [*runpagespeed*](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/api::PagespeedapiRunpagespeedCall) + * [*runpagespeed*](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/api::PagespeedapiRunpagespeedCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/Pagespeedonline)** +* **[Hub](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/Pagespeedonline)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::CallBuilder) -* **[Resources](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::CallBuilder) +* **[Resources](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Part)** + * **[Parts](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Delegate) to the -[Method Builder](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Delegate) to the +[Method Builder](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::RequestValue) and -[decodable](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::RequestValue) and +[decodable](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-pagespeedonline4/5.0.2-beta-1+20191206/google_pagespeedonline4/client::RequestValue) are moved +* [request values](https://docs.rs/google-pagespeedonline4/5.0.2+20191206/google_pagespeedonline4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/pagespeedonline4/src/api.rs b/gen/pagespeedonline4/src/api.rs index 50366a1f31..382147c2ed 100644 --- a/gen/pagespeedonline4/src/api.rs +++ b/gen/pagespeedonline4/src/api.rs @@ -105,7 +105,7 @@ impl<'a, S> Pagespeedonline { Pagespeedonline { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/pagespeedonline/v4/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -116,7 +116,7 @@ impl<'a, S> Pagespeedonline { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/pagespeedonline4/src/client.rs b/gen/pagespeedonline4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/pagespeedonline4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/pagespeedonline4/src/lib.rs b/gen/pagespeedonline4/src/lib.rs index a3a05e5949..4b69cfe003 100644 --- a/gen/pagespeedonline4/src/lib.rs +++ b/gen/pagespeedonline4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *pagespeedonline* crate version *5.0.2-beta-1+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *pagespeedonline* crate version *5.0.2+20191206*, where *20191206* is the exact revision of the *pagespeedonline:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *pagespeedonline* *v4* API can be found at the //! [official documentation site](https://developers.google.com/speed/docs/insights/v4/getting-started). diff --git a/gen/pagespeedonline5-cli/Cargo.toml b/gen/pagespeedonline5-cli/Cargo.toml index 1ff202188c..84759eb3ec 100644 --- a/gen/pagespeedonline5-cli/Cargo.toml +++ b/gen/pagespeedonline5-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-pagespeedonline5-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Pagespeed Insights (protocol v5)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline5-cli" @@ -20,13 +20,13 @@ name = "pagespeedonline5" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-pagespeedonline5] path = "../pagespeedonline5" -version = "4.0.1+20220302" +version = "5.0.2+20230123" + diff --git a/gen/pagespeedonline5-cli/README.md b/gen/pagespeedonline5-cli/README.md index 6d1e17d635..e0613e6cd0 100644 --- a/gen/pagespeedonline5-cli/README.md +++ b/gen/pagespeedonline5-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Pagespeed Insights* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *Pagespeed Insights* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash pagespeedonline5 [options] diff --git a/gen/pagespeedonline5-cli/mkdocs.yml b/gen/pagespeedonline5-cli/mkdocs.yml index 7410b1a6fb..78a2da9bf1 100644 --- a/gen/pagespeedonline5-cli/mkdocs.yml +++ b/gen/pagespeedonline5-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Pagespeed Insights v4.0.1+20220302 +site_name: Pagespeed Insights v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-pagespeedonline5-cli site_description: A complete library to interact with Pagespeed Insights (protocol v5) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline5 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['pagespeedapi_runpagespeed.md', 'Pagespeedapi', 'Runpagespeed'] +nav: +- Home: 'index.md' +- 'Pagespeedapi': + - 'Runpagespeed': 'pagespeedapi_runpagespeed.md' theme: readthedocs diff --git a/gen/pagespeedonline5-cli/src/client.rs b/gen/pagespeedonline5-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/pagespeedonline5-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/pagespeedonline5-cli/src/main.rs b/gen/pagespeedonline5-cli/src/main.rs index 417a37fd4b..083d8901ba 100644 --- a/gen/pagespeedonline5-cli/src/main.rs +++ b/gen/pagespeedonline5-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_pagespeedonline5::{api, Error, oauth2}; +use google_pagespeedonline5::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -240,7 +239,7 @@ async fn main() { let mut app = App::new("pagespeedonline5") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230123") .about("The PageSpeed Insights API lets you analyze the performance of your website with a simple API. It offers tailored suggestions for how you can optimize your site, and lets you easily integrate PageSpeed Insights analysis into your development tools and workflow. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_pagespeedonline5_cli") .arg(Arg::with_name("url") diff --git a/gen/pagespeedonline5/Cargo.toml b/gen/pagespeedonline5/Cargo.toml index cec915f521..60d0145af9 100644 --- a/gen/pagespeedonline5/Cargo.toml +++ b/gen/pagespeedonline5/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-pagespeedonline5" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Pagespeed Insights (protocol v5)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pagespeedonline5" homepage = "https://developers.google.com/speed/docs/insights/v5/about" -documentation = "https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-pagespeedonline5/5.0.2+20230123" license = "MIT" keywords = ["pagespeedonline", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/pagespeedonline5/README.md b/gen/pagespeedonline5/README.md index 1c1717b7a0..dcb372dc5c 100644 --- a/gen/pagespeedonline5/README.md +++ b/gen/pagespeedonline5/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-pagespeedonline5` library allows access to all features of the *Google Pagespeed Insights* service. -This documentation was generated from *Pagespeed Insights* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *pagespeedonline:v5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Pagespeed Insights* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *pagespeedonline:v5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Pagespeed Insights* *v5* API can be found at the [official documentation site](https://developers.google.com/speed/docs/insights/v5/about). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/PagespeedInsights) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/PagespeedInsights) ... * pagespeedapi - * [*runpagespeed*](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/api::PagespeedapiRunpagespeedCall) + * [*runpagespeed*](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/api::PagespeedapiRunpagespeedCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/PagespeedInsights)** +* **[Hub](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/PagespeedInsights)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::CallBuilder) -* **[Resources](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::CallBuilder) +* **[Resources](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Part)** + * **[Parts](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::CallBuilder)** +* **[Activities](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Delegate) to the -[Method Builder](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Delegate) to the +[Method Builder](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::RequestValue) and -[decodable](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::RequestValue) and +[decodable](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-pagespeedonline5/5.0.2-beta-1+20230123/google_pagespeedonline5/client::RequestValue) are moved +* [request values](https://docs.rs/google-pagespeedonline5/5.0.2+20230123/google_pagespeedonline5/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/pagespeedonline5/src/api.rs b/gen/pagespeedonline5/src/api.rs index d0db7f45f9..4b65622430 100644 --- a/gen/pagespeedonline5/src/api.rs +++ b/gen/pagespeedonline5/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> PagespeedInsights { PagespeedInsights { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://pagespeedonline.googleapis.com/".to_string(), _root_url: "https://pagespeedonline.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> PagespeedInsights { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/pagespeedonline5/src/client.rs b/gen/pagespeedonline5/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/pagespeedonline5/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/pagespeedonline5/src/lib.rs b/gen/pagespeedonline5/src/lib.rs index 3f22c8da48..a08ae203c4 100644 --- a/gen/pagespeedonline5/src/lib.rs +++ b/gen/pagespeedonline5/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Pagespeed Insights* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *pagespeedonline:v5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Pagespeed Insights* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *pagespeedonline:v5* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Pagespeed Insights* *v5* API can be found at the //! [official documentation site](https://developers.google.com/speed/docs/insights/v5/about). diff --git a/gen/partners2-cli/Cargo.toml b/gen/partners2-cli/Cargo.toml index 2d8aa8a3bc..ee862e008a 100644 --- a/gen/partners2-cli/Cargo.toml +++ b/gen/partners2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-partners2-cli" -version = "4.0.1+20180925" +version = "5.0.2+20180925" authors = ["Sebastian Thiel "] description = "A complete library to interact with Partners (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/partners2-cli" @@ -20,13 +20,13 @@ name = "partners2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-partners2] path = "../partners2" -version = "4.0.1+20180925" +version = "5.0.2+20180925" + diff --git a/gen/partners2-cli/README.md b/gen/partners2-cli/README.md index 4761848775..0a2cb62e2f 100644 --- a/gen/partners2-cli/README.md +++ b/gen/partners2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Partners* API at revision *20180925*. The CLI is at version *4.0.1*. +This documentation was generated from the *Partners* API at revision *20180925*. The CLI is at version *5.0.2*. ```bash partners2 [options] diff --git a/gen/partners2-cli/mkdocs.yml b/gen/partners2-cli/mkdocs.yml index 63c80ea96c..7f520359c5 100644 --- a/gen/partners2-cli/mkdocs.yml +++ b/gen/partners2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Partners v4.0.1+20180925 +site_name: Partners v5.0.2+20180925 site_url: http://byron.github.io/google-apis-rs/google-partners2-cli site_description: A complete library to interact with Partners (protocol v2) @@ -7,25 +7,34 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/partners2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['analytics_list.md', 'Analytics', 'List'] -- ['client-messages_log.md', 'Client Messages', 'Log'] -- ['companies_get.md', 'Companies', 'Get'] -- ['companies_leads-create.md', 'Companies', 'Leads Create'] -- ['companies_list.md', 'Companies', 'List'] -- ['leads_list.md', 'Leads', 'List'] -- ['methods_get-partnersstatus.md', 'Methods', 'Get Partnersstatus'] -- ['methods_update-companies.md', 'Methods', 'Update Companies'] -- ['methods_update-leads.md', 'Methods', 'Update Leads'] -- ['offers_history-list.md', 'Offers', 'History List'] -- ['offers_list.md', 'Offers', 'List'] -- ['user-events_log.md', 'User Events', 'Log'] -- ['user-states_list.md', 'User States', 'List'] -- ['users_create-company-relation.md', 'Users', 'Create Company Relation'] -- ['users_delete-company-relation.md', 'Users', 'Delete Company Relation'] -- ['users_get.md', 'Users', 'Get'] -- ['users_update-profile.md', 'Users', 'Update Profile'] +nav: +- Home: 'index.md' +- 'Analytics': + - 'List': 'analytics_list.md' +- 'Client Messages': + - 'Log': 'client-messages_log.md' +- 'Companies': + - 'Get': 'companies_get.md' + - 'Leads Create': 'companies_leads-create.md' + - 'List': 'companies_list.md' +- 'Leads': + - 'List': 'leads_list.md' +- 'Methods': + - 'Get Partnersstatus': 'methods_get-partnersstatus.md' + - 'Update Companies': 'methods_update-companies.md' + - 'Update Leads': 'methods_update-leads.md' +- 'Offers': + - 'History List': 'offers_history-list.md' + - 'List': 'offers_list.md' +- 'User Events': + - 'Log': 'user-events_log.md' +- 'User States': + - 'List': 'user-states_list.md' +- 'Users': + - 'Create Company Relation': 'users_create-company-relation.md' + - 'Delete Company Relation': 'users_delete-company-relation.md' + - 'Get': 'users_get.md' + - 'Update Profile': 'users_update-profile.md' theme: readthedocs diff --git a/gen/partners2-cli/src/client.rs b/gen/partners2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/partners2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/partners2-cli/src/main.rs b/gen/partners2-cli/src/main.rs index 5874c756c0..6bf11169bf 100644 --- a/gen/partners2-cli/src/main.rs +++ b/gen/partners2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_partners2::{api, Error, oauth2}; +use google_partners2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -82,7 +81,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -452,25 +451,25 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); }, "min-monthly-budget-units" => { - call = call.min_monthly_budget_units(value.unwrap_or("")); + call = call.min_monthly_budget_units( value.map(|v| arg_from_str(v, err, "min-monthly-budget-units", "int64")).unwrap_or(-0)); }, "min-monthly-budget-nanos" => { - call = call.min_monthly_budget_nanos(arg_from_str(value.unwrap_or("-0"), err, "min-monthly-budget-nanos", "integer")); + call = call.min_monthly_budget_nanos( value.map(|v| arg_from_str(v, err, "min-monthly-budget-nanos", "int32")).unwrap_or(-0)); }, "min-monthly-budget-currency-code" => { call = call.min_monthly_budget_currency_code(value.unwrap_or("")); }, "max-monthly-budget-units" => { - call = call.max_monthly_budget_units(value.unwrap_or("")); + call = call.max_monthly_budget_units( value.map(|v| arg_from_str(v, err, "max-monthly-budget-units", "int64")).unwrap_or(-0)); }, "max-monthly-budget-nanos" => { - call = call.max_monthly_budget_nanos(arg_from_str(value.unwrap_or("-0"), err, "max-monthly-budget-nanos", "integer")); + call = call.max_monthly_budget_nanos( value.map(|v| arg_from_str(v, err, "max-monthly-budget-nanos", "int32")).unwrap_or(-0)); }, "max-monthly-budget-currency-code" => { call = call.max_monthly_budget_currency_code(value.unwrap_or("")); @@ -565,7 +564,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -759,7 +758,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-metadata-user-overrides-user-id" => { call = call.request_metadata_user_overrides_user_id(value.unwrap_or("")); @@ -882,7 +881,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-metadata-user-overrides-user-id" => { call = call.request_metadata_user_overrides_user_id(value.unwrap_or("")); @@ -980,13 +979,13 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); }, "entire-company" => { - call = call.entire_company(arg_from_str(value.unwrap_or("false"), err, "entire-company", "boolean")); + call = call.entire_company( value.map(|v| arg_from_str(v, err, "entire-company", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2277,7 +2276,7 @@ async fn main() { let mut app = App::new("partners2") .author("Sebastian Thiel ") - .version("4.0.1+20180925") + .version("5.0.2+20180925") .about("Searches certified companies and creates contact leads with them, and also audits the usage of clients.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_partners2_cli") .arg(Arg::with_name("folder") diff --git a/gen/partners2/Cargo.toml b/gen/partners2/Cargo.toml index d6bed5bd1d..81606fb486 100644 --- a/gen/partners2/Cargo.toml +++ b/gen/partners2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-partners2" -version = "5.0.2-beta-1+20180925" +version = "5.0.2+20180925" authors = ["Sebastian Thiel "] description = "A complete library to interact with Partners (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/partners2" homepage = "https://developers.google.com/partners/" -documentation = "https://docs.rs/google-partners2/5.0.2-beta-1+20180925" +documentation = "https://docs.rs/google-partners2/5.0.2+20180925" license = "MIT" keywords = ["partners", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/partners2/README.md b/gen/partners2/README.md index d1cb5c7bfa..9c6eeaae5e 100644 --- a/gen/partners2/README.md +++ b/gen/partners2/README.md @@ -5,36 +5,36 @@ DO NOT EDIT ! --> The `google-partners2` library allows access to all features of the *Google Partners* service. -This documentation was generated from *Partners* crate version *5.0.2-beta-1+20180925*, where *20180925* is the exact revision of the *partners:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Partners* crate version *5.0.2+20180925*, where *20180925* is the exact revision of the *partners:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Partners* *v2* API can be found at the [official documentation site](https://developers.google.com/partners/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/Partners) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/Partners) ... * analytics - * [*list*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::AnalyticListCall) + * [*list*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::AnalyticListCall) * client messages - * [*log*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::ClientMessageLogCall) -* [companies](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::Company) - * [*get*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::CompanyGetCall), [*leads create*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::CompanyLeadCreateCall) and [*list*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::CompanyListCall) -* [leads](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::Lead) - * [*list*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::LeadListCall) + * [*log*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::ClientMessageLogCall) +* [companies](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::Company) + * [*get*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::CompanyGetCall), [*leads create*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::CompanyLeadCreateCall) and [*list*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::CompanyListCall) +* [leads](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::Lead) + * [*list*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::LeadListCall) * offers - * [*history list*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::OfferHistoryListCall) and [*list*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::OfferListCall) + * [*history list*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::OfferHistoryListCall) and [*list*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::OfferListCall) * user events - * [*log*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::UserEventLogCall) + * [*log*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::UserEventLogCall) * user states - * [*list*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::UserStateListCall) -* [users](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::User) - * [*create company relation*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::UserCreateCompanyRelationCall), [*delete company relation*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::UserDeleteCompanyRelationCall), [*get*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::UserGetCall) and [*update profile*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::UserUpdateProfileCall) + * [*list*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::UserStateListCall) +* [users](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::User) + * [*create company relation*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::UserCreateCompanyRelationCall), [*delete company relation*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::UserDeleteCompanyRelationCall), [*get*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::UserGetCall) and [*update profile*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::UserUpdateProfileCall) Other activities are ... -* [get partnersstatus](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::MethodGetPartnersstatuCall) -* [update companies](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::MethodUpdateCompanyCall) -* [update leads](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/api::MethodUpdateLeadCall) +* [get partnersstatus](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::MethodGetPartnersstatuCall) +* [update companies](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::MethodUpdateCompanyCall) +* [update leads](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/api::MethodUpdateLeadCall) @@ -42,17 +42,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/Partners)** +* **[Hub](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/Partners)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::CallBuilder) -* **[Resources](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::CallBuilder) +* **[Resources](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Part)** + * **[Parts](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -153,17 +153,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -173,29 +173,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Delegate) to the -[Method Builder](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Delegate) to the +[Method Builder](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::RequestValue) and -[decodable](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::RequestValue) and +[decodable](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-partners2/5.0.2-beta-1+20180925/google_partners2/client::RequestValue) are moved +* [request values](https://docs.rs/google-partners2/5.0.2+20180925/google_partners2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/partners2/src/api.rs b/gen/partners2/src/api.rs index a7918b4a54..e410a75fa0 100644 --- a/gen/partners2/src/api.rs +++ b/gen/partners2/src/api.rs @@ -110,7 +110,7 @@ impl<'a, S> Partners { Partners { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://partners.googleapis.com/".to_string(), _root_url: "https://partners.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> Partners { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/partners2/src/client.rs b/gen/partners2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/partners2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/partners2/src/lib.rs b/gen/partners2/src/lib.rs index d753e2580b..4baa19f311 100644 --- a/gen/partners2/src/lib.rs +++ b/gen/partners2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Partners* crate version *5.0.2-beta-1+20180925*, where *20180925* is the exact revision of the *partners:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Partners* crate version *5.0.2+20180925*, where *20180925* is the exact revision of the *partners:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Partners* *v2* API can be found at the //! [official documentation site](https://developers.google.com/partners/). diff --git a/gen/paymentsresellersubscription1-cli/Cargo.toml b/gen/paymentsresellersubscription1-cli/Cargo.toml index 4e9d0f93ef..60f84b0819 100644 --- a/gen/paymentsresellersubscription1-cli/Cargo.toml +++ b/gen/paymentsresellersubscription1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-paymentsresellersubscription1-cli" -version = "4.0.1+20220307" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Payments Reseller Subscription (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/paymentsresellersubscription1-cli" @@ -20,13 +20,13 @@ name = "paymentsresellersubscription1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-paymentsresellersubscription1] path = "../paymentsresellersubscription1" -version = "4.0.1+20220307" +version = "5.0.2+20230123" + diff --git a/gen/paymentsresellersubscription1-cli/README.md b/gen/paymentsresellersubscription1-cli/README.md index 56d392205b..18f691985b 100644 --- a/gen/paymentsresellersubscription1-cli/README.md +++ b/gen/paymentsresellersubscription1-cli/README.md @@ -25,12 +25,13 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Payments Reseller Subscription* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *Payments Reseller Subscription* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash paymentsresellersubscription1 [options] partners products-list [-p ]... [-o ] + promotions-find-eligible (-r )... [-p ]... [-o ] promotions-list [-p ]... [-o ] subscriptions-cancel (-r )... [-p ]... [-o ] subscriptions-create (-r )... [-p ]... [-o ] diff --git a/gen/paymentsresellersubscription1-cli/mkdocs.yml b/gen/paymentsresellersubscription1-cli/mkdocs.yml index 29aca2dfdf..23af614624 100644 --- a/gen/paymentsresellersubscription1-cli/mkdocs.yml +++ b/gen/paymentsresellersubscription1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Payments Reseller Subscription v4.0.1+20220307 +site_name: Payments Reseller Subscription v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-paymentsresellersubscription1-cli site_description: A complete library to interact with Payments Reseller Subscription (protocol v1) @@ -7,17 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/paymentsreseller docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['partners_products-list.md', 'Partners', 'Products List'] -- ['partners_promotions-list.md', 'Partners', 'Promotions List'] -- ['partners_subscriptions-cancel.md', 'Partners', 'Subscriptions Cancel'] -- ['partners_subscriptions-create.md', 'Partners', 'Subscriptions Create'] -- ['partners_subscriptions-entitle.md', 'Partners', 'Subscriptions Entitle'] -- ['partners_subscriptions-extend.md', 'Partners', 'Subscriptions Extend'] -- ['partners_subscriptions-get.md', 'Partners', 'Subscriptions Get'] -- ['partners_subscriptions-provision.md', 'Partners', 'Subscriptions Provision'] -- ['partners_subscriptions-undo-cancel.md', 'Partners', 'Subscriptions Undo Cancel'] +nav: +- Home: 'index.md' +- 'Partners': + - 'Products List': 'partners_products-list.md' + - 'Promotions Find Eligible': 'partners_promotions-find-eligible.md' + - 'Promotions List': 'partners_promotions-list.md' + - 'Subscriptions Cancel': 'partners_subscriptions-cancel.md' + - 'Subscriptions Create': 'partners_subscriptions-create.md' + - 'Subscriptions Entitle': 'partners_subscriptions-entitle.md' + - 'Subscriptions Extend': 'partners_subscriptions-extend.md' + - 'Subscriptions Get': 'partners_subscriptions-get.md' + - 'Subscriptions Provision': 'partners_subscriptions-provision.md' + - 'Subscriptions Undo Cancel': 'partners_subscriptions-undo-cancel.md' theme: readthedocs diff --git a/gen/paymentsresellersubscription1-cli/src/client.rs b/gen/paymentsresellersubscription1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/paymentsresellersubscription1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/paymentsresellersubscription1-cli/src/main.rs b/gen/paymentsresellersubscription1-cli/src/main.rs index 0f62364c7f..891521488f 100644 --- a/gen/paymentsresellersubscription1-cli/src/main.rs +++ b/gen/paymentsresellersubscription1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_paymentsresellersubscription1::{api, Error, oauth2}; +use google_paymentsresellersubscription1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); }, _ => { let mut found = false; @@ -76,7 +78,91 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _partners_promotions_find_eligible(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["filter", "page-size", "page-token"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudPaymentsResellerSubscriptionV1FindEligiblePromotionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.partners().promotions_find_eligible(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); v } )); } } @@ -117,7 +203,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -761,6 +847,9 @@ where ("products-list", Some(opt)) => { call_result = self._partners_products_list(opt, dry_run, &mut err).await; }, + ("promotions-find-eligible", Some(opt)) => { + call_result = self._partners_promotions_find_eligible(opt, dry_run, &mut err).await; + }, ("promotions-list", Some(opt)) => { call_result = self._partners_promotions_list(opt, dry_run, &mut err).await; }, @@ -864,9 +953,9 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("partners", "methods: 'products-list', 'promotions-list', 'subscriptions-cancel', 'subscriptions-create', 'subscriptions-entitle', 'subscriptions-extend', 'subscriptions-get', 'subscriptions-provision' and 'subscriptions-undo-cancel'", vec![ + ("partners", "methods: 'products-list', 'promotions-find-eligible', 'promotions-list', 'subscriptions-cancel', 'subscriptions-create', 'subscriptions-entitle', 'subscriptions-extend', 'subscriptions-get', 'subscriptions-provision' and 'subscriptions-undo-cancel'", vec![ ("products-list", - Some(r##"Used by partners to list products that can be resold to their customers. It should be called directly by the partner using service accounts."##), + Some(r##"To retrieve the products that can be resold by the partner. It should be autenticated with a service account."##), "Details at http://byron.github.io/google-apis-rs/google_paymentsresellersubscription1_cli/partners_products-list", vec![ (Some(r##"parent"##), @@ -881,6 +970,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("promotions-find-eligible", + Some(r##"To find eligible promotions for the current user. The API requires user authorization via OAuth. The user is inferred from the authenticated OAuth credential."##), + "Details at http://byron.github.io/google-apis-rs/google_paymentsresellersubscription1_cli/partners_promotions-find-eligible", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent, the partner that can resell. Format: partners/{partner}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -888,7 +1005,7 @@ async fn main() { Some(false)), ]), ("promotions-list", - Some(r##"Used by partners to list promotions, such as free trial, that can be applied on subscriptions. It should be called directly by the partner using service accounts."##), + Some(r##"To retrieve the promotions, such as free trial, that can be used by the partner. It should be autenticated with a service account."##), "Details at http://byron.github.io/google-apis-rs/google_paymentsresellersubscription1_cli/partners_promotions-list", vec![ (Some(r##"parent"##), @@ -994,7 +1111,7 @@ async fn main() { Some(false)), ]), ("subscriptions-extend", - Some(r##"Used by partners to extend a subscription service for their customers on an ongoing basis for the subscription to remain active and renewable. It should be called directly by the partner using service accounts."##), + Some(r##"[Deprecated] New partners should be on auto-extend by default. Used by partners to extend a subscription service for their customers on an ongoing basis for the subscription to remain active and renewable. It should be called directly by the partner using service accounts."##), "Details at http://byron.github.io/google-apis-rs/google_paymentsresellersubscription1_cli/partners_subscriptions-extend", vec![ (Some(r##"name"##), @@ -1105,7 +1222,7 @@ async fn main() { let mut app = App::new("paymentsresellersubscription1") .author("Sebastian Thiel ") - .version("4.0.1+20220307") + .version("5.0.2+20230123") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_paymentsresellersubscription1_cli") .arg(Arg::with_name("folder") diff --git a/gen/paymentsresellersubscription1/Cargo.toml b/gen/paymentsresellersubscription1/Cargo.toml index fbe274d5c7..ca565892f5 100644 --- a/gen/paymentsresellersubscription1/Cargo.toml +++ b/gen/paymentsresellersubscription1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-paymentsresellersubscription1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Payments Reseller Subscription (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/paymentsresellersubscription1" homepage = "https://developers.google.com/payments/reseller/subscription/" -documentation = "https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123" license = "MIT" keywords = ["paymentsresellersubs", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/paymentsresellersubscription1/README.md b/gen/paymentsresellersubscription1/README.md index 9122dca246..1a6d838b0d 100644 --- a/gen/paymentsresellersubscription1/README.md +++ b/gen/paymentsresellersubscription1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-paymentsresellersubscription1` library allows access to all features of the *Google Payments Reseller Subscription* service. -This documentation was generated from *Payments Reseller Subscription* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *paymentsresellersubscription:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Payments Reseller Subscription* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *paymentsresellersubscription:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Payments Reseller Subscription* *v1* API can be found at the [official documentation site](https://developers.google.com/payments/reseller/subscription/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/PaymentsResellerSubscription) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/PaymentsResellerSubscription) ... * partners - * [*products list*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerProductListCall), [*promotions find eligible*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerPromotionFindEligibleCall), [*promotions list*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerPromotionListCall), [*subscriptions cancel*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionCancelCall), [*subscriptions create*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionCreateCall), [*subscriptions entitle*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionEntitleCall), [*subscriptions extend*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionExtendCall), [*subscriptions get*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionGetCall), [*subscriptions provision*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionProvisionCall) and [*subscriptions undo cancel*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionUndoCancelCall) + * [*products list*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerProductListCall), [*promotions find eligible*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerPromotionFindEligibleCall), [*promotions list*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerPromotionListCall), [*subscriptions cancel*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionCancelCall), [*subscriptions create*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionCreateCall), [*subscriptions entitle*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionEntitleCall), [*subscriptions extend*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionExtendCall), [*subscriptions get*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionGetCall), [*subscriptions provision*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionProvisionCall) and [*subscriptions undo cancel*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/api::PartnerSubscriptionUndoCancelCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/PaymentsResellerSubscription)** +* **[Hub](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/PaymentsResellerSubscription)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::CallBuilder) -* **[Resources](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::CallBuilder) +* **[Resources](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Part)** + * **[Parts](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Delegate) to the -[Method Builder](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Delegate) to the +[Method Builder](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::RequestValue) and -[decodable](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::RequestValue) and +[decodable](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-paymentsresellersubscription1/5.0.2-beta-1+20230123/google_paymentsresellersubscription1/client::RequestValue) are moved +* [request values](https://docs.rs/google-paymentsresellersubscription1/5.0.2+20230123/google_paymentsresellersubscription1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/paymentsresellersubscription1/src/api.rs b/gen/paymentsresellersubscription1/src/api.rs index 930d545074..4f78500779 100644 --- a/gen/paymentsresellersubscription1/src/api.rs +++ b/gen/paymentsresellersubscription1/src/api.rs @@ -104,7 +104,7 @@ impl<'a, S> PaymentsResellerSubscription { PaymentsResellerSubscription { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://paymentsresellersubscription.googleapis.com/".to_string(), _root_url: "https://paymentsresellersubscription.googleapis.com/".to_string(), } @@ -115,7 +115,7 @@ impl<'a, S> PaymentsResellerSubscription { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/paymentsresellersubscription1/src/client.rs b/gen/paymentsresellersubscription1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/paymentsresellersubscription1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/paymentsresellersubscription1/src/lib.rs b/gen/paymentsresellersubscription1/src/lib.rs index cc49012317..cf6352c189 100644 --- a/gen/paymentsresellersubscription1/src/lib.rs +++ b/gen/paymentsresellersubscription1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Payments Reseller Subscription* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *paymentsresellersubscription:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Payments Reseller Subscription* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *paymentsresellersubscription:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Payments Reseller Subscription* *v1* API can be found at the //! [official documentation site](https://developers.google.com/payments/reseller/subscription/). diff --git a/gen/people1-cli/Cargo.toml b/gen/people1-cli/Cargo.toml index 5d31104b58..763408e44f 100644 --- a/gen/people1-cli/Cargo.toml +++ b/gen/people1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-people1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with People Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/people1-cli" @@ -20,13 +20,13 @@ name = "people1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-people1] path = "../people1" -version = "4.0.1+20220303" +version = "5.0.2+20230123" + diff --git a/gen/people1-cli/README.md b/gen/people1-cli/README.md index e7199657a9..5865d5af98 100644 --- a/gen/people1-cli/README.md +++ b/gen/people1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *People Service* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *People Service* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash people1 [options] diff --git a/gen/people1-cli/mkdocs.yml b/gen/people1-cli/mkdocs.yml index 02c19cbd3f..104ad232d4 100644 --- a/gen/people1-cli/mkdocs.yml +++ b/gen/people1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: People Service v4.0.1+20220303 +site_name: People Service v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-people1-cli site_description: A complete library to interact with People Service (protocol v1) @@ -7,32 +7,35 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/people1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['contact-groups_batch-get.md', 'Contact Groups', 'Batch Get'] -- ['contact-groups_create.md', 'Contact Groups', 'Create'] -- ['contact-groups_delete.md', 'Contact Groups', 'Delete'] -- ['contact-groups_get.md', 'Contact Groups', 'Get'] -- ['contact-groups_list.md', 'Contact Groups', 'List'] -- ['contact-groups_members-modify.md', 'Contact Groups', 'Members Modify'] -- ['contact-groups_update.md', 'Contact Groups', 'Update'] -- ['other-contacts_copy-other-contact-to-my-contacts-group.md', 'Other Contacts', 'Copy Other Contact To My Contacts Group'] -- ['other-contacts_list.md', 'Other Contacts', 'List'] -- ['other-contacts_search.md', 'Other Contacts', 'Search'] -- ['people_batch-create-contacts.md', 'People', 'Batch Create Contacts'] -- ['people_batch-delete-contacts.md', 'People', 'Batch Delete Contacts'] -- ['people_batch-update-contacts.md', 'People', 'Batch Update Contacts'] -- ['people_connections-list.md', 'People', 'Connections List'] -- ['people_create-contact.md', 'People', 'Create Contact'] -- ['people_delete-contact.md', 'People', 'Delete Contact'] -- ['people_delete-contact-photo.md', 'People', 'Delete Contact Photo'] -- ['people_get.md', 'People', 'Get'] -- ['people_get-batch-get.md', 'People', 'Get Batch Get'] -- ['people_list-directory-people.md', 'People', 'List Directory People'] -- ['people_search-contacts.md', 'People', 'Search Contacts'] -- ['people_search-directory-people.md', 'People', 'Search Directory People'] -- ['people_update-contact.md', 'People', 'Update Contact'] -- ['people_update-contact-photo.md', 'People', 'Update Contact Photo'] +nav: +- Home: 'index.md' +- 'Contact Groups': + - 'Batch Get': 'contact-groups_batch-get.md' + - 'Create': 'contact-groups_create.md' + - 'Delete': 'contact-groups_delete.md' + - 'Get': 'contact-groups_get.md' + - 'List': 'contact-groups_list.md' + - 'Members Modify': 'contact-groups_members-modify.md' + - 'Update': 'contact-groups_update.md' +- 'Other Contacts': + - 'Copy Other Contact To My Contacts Group': 'other-contacts_copy-other-contact-to-my-contacts-group.md' + - 'List': 'other-contacts_list.md' + - 'Search': 'other-contacts_search.md' +- 'People': + - 'Batch Create Contacts': 'people_batch-create-contacts.md' + - 'Batch Delete Contacts': 'people_batch-delete-contacts.md' + - 'Batch Update Contacts': 'people_batch-update-contacts.md' + - 'Connections List': 'people_connections-list.md' + - 'Create Contact': 'people_create-contact.md' + - 'Delete Contact': 'people_delete-contact.md' + - 'Delete Contact Photo': 'people_delete-contact-photo.md' + - 'Get': 'people_get.md' + - 'Get Batch Get': 'people_get-batch-get.md' + - 'List Directory People': 'people_list-directory-people.md' + - 'Search Contacts': 'people_search-contacts.md' + - 'Search Directory People': 'people_search-directory-people.md' + - 'Update Contact': 'people_update-contact.md' + - 'Update Contact Photo': 'people_update-contact-photo.md' theme: readthedocs diff --git a/gen/people1-cli/src/client.rs b/gen/people1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/people1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/people1-cli/src/main.rs b/gen/people1-cli/src/main.rs index 995446f18e..0c36a189b7 100644 --- a/gen/people1-cli/src/main.rs +++ b/gen/people1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_people1::{api, Error, oauth2}; +use google_people1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,10 +60,10 @@ where call = call.add_resource_names(value.unwrap_or("")); }, "max-members" => { - call = call.max_members(arg_from_str(value.unwrap_or("-0"), err, "max-members", "integer")); + call = call.max_members( value.map(|v| arg_from_str(v, err, "max-members", "int32")).unwrap_or(-0)); }, "group-fields" => { - call = call.group_fields(value.unwrap_or("")); + call = call.group_fields( value.map(|v| arg_from_str(v, err, "group-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -214,7 +213,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "delete-contacts" => { - call = call.delete_contacts(arg_from_str(value.unwrap_or("false"), err, "delete-contacts", "boolean")); + call = call.delete_contacts( value.map(|v| arg_from_str(v, err, "delete-contacts", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -270,10 +269,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "max-members" => { - call = call.max_members(arg_from_str(value.unwrap_or("-0"), err, "max-members", "integer")); + call = call.max_members( value.map(|v| arg_from_str(v, err, "max-members", "int32")).unwrap_or(-0)); }, "group-fields" => { - call = call.group_fields(value.unwrap_or("")); + call = call.group_fields( value.map(|v| arg_from_str(v, err, "group-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -335,10 +334,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "group-fields" => { - call = call.group_fields(value.unwrap_or("")); + call = call.group_fields( value.map(|v| arg_from_str(v, err, "group-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -668,16 +667,16 @@ where call = call.add_sources(value.unwrap_or("")); }, "request-sync-token" => { - call = call.request_sync_token(arg_from_str(value.unwrap_or("false"), err, "request-sync-token", "boolean")); + call = call.request_sync_token( value.map(|v| arg_from_str(v, err, "request-sync-token", "boolean")).unwrap_or(false)); }, "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -733,13 +732,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "query" => { call = call.query(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1062,19 +1061,19 @@ where call = call.sort_order(value.unwrap_or("")); }, "request-sync-token" => { - call = call.request_sync_token(arg_from_str(value.unwrap_or("false"), err, "request-sync-token", "boolean")); + call = call.request_sync_token( value.map(|v| arg_from_str(v, err, "request-sync-token", "boolean")).unwrap_or(false)); }, "request-mask-include-field" => { - call = call.request_mask_include_field(value.unwrap_or("")); + call = call.request_mask_include_field( value.map(|v| arg_from_str(v, err, "request-mask-include-field", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "person-fields" => { - call = call.person_fields(value.unwrap_or("")); + call = call.person_fields( value.map(|v| arg_from_str(v, err, "person-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1172,7 +1171,7 @@ where call = call.add_sources(value.unwrap_or("")); }, "person-fields" => { - call = call.person_fields(value.unwrap_or("")); + call = call.person_fields( value.map(|v| arg_from_str(v, err, "person-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1283,7 +1282,7 @@ where call = call.add_sources(value.unwrap_or("")); }, "person-fields" => { - call = call.person_fields(value.unwrap_or("")); + call = call.person_fields( value.map(|v| arg_from_str(v, err, "person-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1342,10 +1341,10 @@ where call = call.add_sources(value.unwrap_or("")); }, "request-mask-include-field" => { - call = call.request_mask_include_field(value.unwrap_or("")); + call = call.request_mask_include_field( value.map(|v| arg_from_str(v, err, "request-mask-include-field", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "person-fields" => { - call = call.person_fields(value.unwrap_or("")); + call = call.person_fields( value.map(|v| arg_from_str(v, err, "person-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1407,10 +1406,10 @@ where call = call.add_resource_names(value.unwrap_or("")); }, "request-mask-include-field" => { - call = call.request_mask_include_field(value.unwrap_or("")); + call = call.request_mask_include_field( value.map(|v| arg_from_str(v, err, "request-mask-include-field", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "person-fields" => { - call = call.person_fields(value.unwrap_or("")); + call = call.person_fields( value.map(|v| arg_from_str(v, err, "person-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1472,16 +1471,16 @@ where call = call.add_sources(value.unwrap_or("")); }, "request-sync-token" => { - call = call.request_sync_token(arg_from_str(value.unwrap_or("false"), err, "request-sync-token", "boolean")); + call = call.request_sync_token( value.map(|v| arg_from_str(v, err, "request-sync-token", "boolean")).unwrap_or(false)); }, "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "merge-sources" => { call = call.add_merge_sources(value.unwrap_or("")); @@ -1543,13 +1542,13 @@ where call = call.add_sources(value.unwrap_or("")); }, "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "query" => { call = call.query(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1608,7 +1607,7 @@ where call = call.add_sources(value.unwrap_or("")); }, "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "query" => { call = call.query(value.unwrap_or("")); @@ -1617,7 +1616,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "merge-sources" => { call = call.add_merge_sources(value.unwrap_or("")); @@ -1715,13 +1714,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-person-fields" => { - call = call.update_person_fields(value.unwrap_or("")); + call = call.update_person_fields( value.map(|v| arg_from_str(v, err, "update-person-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "sources" => { call = call.add_sources(value.unwrap_or("")); }, "person-fields" => { - call = call.person_fields(value.unwrap_or("")); + call = call.person_fields( value.map(|v| arg_from_str(v, err, "person-fields", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2218,7 +2217,7 @@ async fn main() { Some(false)), ]), ("list", - Some(r##"List all "Other contacts", that is contacts that are not in a contact group. "Other contacts" are typically auto created contacts from interactions. Sync tokens expire 7 days after the full sync. A request with an expired sync token will result in a 410 error. In the case of such an error clients should make a full sync request without a `sync_token`. The first page of a full sync request has an additional quota. If the quota is exceeded, a 429 error will be returned. This quota is fixed and can not be increased. When the `sync_token` is specified, resources deleted since the last sync will be returned as a person with `PersonMetadata.deleted` set to true. When the `page_token` or `sync_token` is specified, all other request parameters must match the first call. Writes may have a propagation delay of several minutes for sync requests. Incremental syncs are not intended for read-after-write use cases. See example usage at [List the user's other contacts that have changed](/people/v1/other-contacts#list_the_users_other_contacts_that_have_changed)."##), + Some(r##"List all "Other contacts", that is contacts that are not in a contact group. "Other contacts" are typically auto created contacts from interactions. Sync tokens expire 7 days after the full sync. A request with an expired sync token will get an error with an [google.rpc.ErrorInfo](https://cloud.google.com/apis/design/errors#error_info) with reason "EXPIRED_SYNC_TOKEN". In the case of such an error clients should make a full sync request without a `sync_token`. The first page of a full sync request has an additional quota. If the quota is exceeded, a 429 error will be returned. This quota is fixed and can not be increased. When the `sync_token` is specified, resources deleted since the last sync will be returned as a person with `PersonMetadata.deleted` set to true. When the `page_token` or `sync_token` is specified, all other request parameters must match the first call. Writes may have a propagation delay of several minutes for sync requests. Incremental syncs are not intended for read-after-write use cases. See example usage at [List the user's other contacts that have changed](/people/v1/other-contacts#list_the_users_other_contacts_that_have_changed)."##), "Details at http://byron.github.io/google-apis-rs/google_people1_cli/other-contacts_list", vec![ (Some(r##"v"##), @@ -2319,7 +2318,7 @@ async fn main() { Some(false)), ]), ("connections-list", - Some(r##"Provides a list of the authenticated user's contacts. Sync tokens expire 7 days after the full sync. A request with an expired sync token will result in a 410 error. In the case of such an error clients should make a full sync request without a `sync_token`. The first page of a full sync request has an additional quota. If the quota is exceeded, a 429 error will be returned. This quota is fixed and can not be increased. When the `sync_token` is specified, resources deleted since the last sync will be returned as a person with `PersonMetadata.deleted` set to true. When the `page_token` or `sync_token` is specified, all other request parameters must match the first call. Writes may have a propagation delay of several minutes for sync requests. Incremental syncs are not intended for read-after-write use cases. See example usage at [List the user's contacts that have changed](/people/v1/contacts#list_the_users_contacts_that_have_changed)."##), + Some(r##"Provides a list of the authenticated user's contacts. Sync tokens expire 7 days after the full sync. A request with an expired sync token will get an error with an [google.rpc.ErrorInfo](https://cloud.google.com/apis/design/errors#error_info) with reason "EXPIRED_SYNC_TOKEN". In the case of such an error clients should make a full sync request without a `sync_token`. The first page of a full sync request has an additional quota. If the quota is exceeded, a 429 error will be returned. This quota is fixed and can not be increased. When the `sync_token` is specified, resources deleted since the last sync will be returned as a person with `PersonMetadata.deleted` set to true. When the `page_token` or `sync_token` is specified, all other request parameters must match the first call. Writes may have a propagation delay of several minutes for sync requests. Incremental syncs are not intended for read-after-write use cases. See example usage at [List the user's contacts that have changed](/people/v1/contacts#list_the_users_contacts_that_have_changed)."##), "Details at http://byron.github.io/google-apis-rs/google_people1_cli/people_connections-list", vec![ (Some(r##"resource-name"##), @@ -2498,7 +2497,7 @@ async fn main() { vec![ (Some(r##"resource-name"##), None, - Some(r##"The resource name for the person, assigned by the server. An ASCII string with a max length of 27 characters, in the form of `people/{person_id}`."##), + Some(r##"The resource name for the person, assigned by the server. An ASCII string in the form of `people/{person_id}`."##), Some(true), Some(false)), @@ -2554,7 +2553,7 @@ async fn main() { let mut app = App::new("people1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230123") .about("Provides access to information about profiles and contacts.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_people1_cli") .arg(Arg::with_name("url") diff --git a/gen/people1/Cargo.toml b/gen/people1/Cargo.toml index f063d67f62..fcaa029a7f 100644 --- a/gen/people1/Cargo.toml +++ b/gen/people1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-people1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with People Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/people1" homepage = "https://developers.google.com/people/" -documentation = "https://docs.rs/google-people1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-people1/5.0.2+20230123" license = "MIT" keywords = ["people", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/people1/README.md b/gen/people1/README.md index 095255720e..1f1f13a6b1 100644 --- a/gen/people1/README.md +++ b/gen/people1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-people1` library allows access to all features of the *Google People Service* service. -This documentation was generated from *People Service* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *people:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *People Service* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *people:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *People Service* *v1* API can be found at the [official documentation site](https://developers.google.com/people/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/PeopleService) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-people1/5.0.2+20230123/google_people1/PeopleService) ... -* [contact groups](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroup) - * [*batch get*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroupBatchGetCall), [*create*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroupCreateCall), [*delete*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroupDeleteCall), [*get*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroupGetCall), [*list*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroupListCall), [*members modify*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroupMemberModifyCall) and [*update*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::ContactGroupUpdateCall) +* [contact groups](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroup) + * [*batch get*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroupBatchGetCall), [*create*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroupCreateCall), [*delete*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroupDeleteCall), [*get*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroupGetCall), [*list*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroupListCall), [*members modify*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroupMemberModifyCall) and [*update*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::ContactGroupUpdateCall) * other contacts - * [*copy other contact to my contacts group*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::OtherContactCopyOtherContactToMyContactsGroupCall), [*list*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::OtherContactListCall) and [*search*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::OtherContactSearchCall) -* [people](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::Person) - * [*batch create contacts*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonBatchCreateContactCall), [*batch delete contacts*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonBatchDeleteContactCall), [*batch update contacts*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonBatchUpdateContactCall), [*connections list*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonConnectionListCall), [*create contact*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonCreateContactCall), [*delete contact*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonDeleteContactCall), [*delete contact photo*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonDeleteContactPhotoCall), [*get*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonGetCall), [*get batch get*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonGetBatchGetCall), [*list directory people*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonListDirectoryPersonCall), [*search contacts*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonSearchContactCall), [*search directory people*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonSearchDirectoryPersonCall), [*update contact*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonUpdateContactCall) and [*update contact photo*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/api::PersonUpdateContactPhotoCall) + * [*copy other contact to my contacts group*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::OtherContactCopyOtherContactToMyContactsGroupCall), [*list*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::OtherContactListCall) and [*search*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::OtherContactSearchCall) +* [people](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::Person) + * [*batch create contacts*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonBatchCreateContactCall), [*batch delete contacts*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonBatchDeleteContactCall), [*batch update contacts*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonBatchUpdateContactCall), [*connections list*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonConnectionListCall), [*create contact*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonCreateContactCall), [*delete contact*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonDeleteContactCall), [*delete contact photo*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonDeleteContactPhotoCall), [*get*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonGetCall), [*get batch get*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonGetBatchGetCall), [*list directory people*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonListDirectoryPersonCall), [*search contacts*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonSearchContactCall), [*search directory people*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonSearchDirectoryPersonCall), [*update contact*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonUpdateContactCall) and [*update contact photo*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/api::PersonUpdateContactPhotoCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/PeopleService)** +* **[Hub](https://docs.rs/google-people1/5.0.2+20230123/google_people1/PeopleService)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::CallBuilder) -* **[Resources](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::CallBuilder) +* **[Resources](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Part)** + * **[Parts](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Delegate) to the -[Method Builder](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Delegate) to the +[Method Builder](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::RequestValue) and -[decodable](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::RequestValue) and +[decodable](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-people1/5.0.2-beta-1+20230123/google_people1/client::RequestValue) are moved +* [request values](https://docs.rs/google-people1/5.0.2+20230123/google_people1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/people1/src/api.rs b/gen/people1/src/api.rs index eec5717534..fcdd54f657 100644 --- a/gen/people1/src/api.rs +++ b/gen/people1/src/api.rs @@ -167,7 +167,7 @@ impl<'a, S> PeopleService { PeopleService { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://people.googleapis.com/".to_string(), _root_url: "https://people.googleapis.com/".to_string(), } @@ -184,7 +184,7 @@ impl<'a, S> PeopleService { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/people1/src/client.rs b/gen/people1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/people1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/people1/src/lib.rs b/gen/people1/src/lib.rs index a987d0a8cf..86ba2c0733 100644 --- a/gen/people1/src/lib.rs +++ b/gen/people1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *People Service* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *people:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *People Service* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *people:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *People Service* *v1* API can be found at the //! [official documentation site](https://developers.google.com/people/). diff --git a/gen/photoslibrary1-cli/Cargo.toml b/gen/photoslibrary1-cli/Cargo.toml index 85b673e0b3..3d28a470b2 100644 --- a/gen/photoslibrary1-cli/Cargo.toml +++ b/gen/photoslibrary1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-photoslibrary1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Photos Library (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/photoslibrary1-cli" @@ -20,13 +20,13 @@ name = "photoslibrary1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-photoslibrary1] path = "../photoslibrary1" -version = "4.0.1+20220303" +version = "5.0.2+20230117" + diff --git a/gen/photoslibrary1-cli/README.md b/gen/photoslibrary1-cli/README.md index 7304e70689..2be48efaad 100644 --- a/gen/photoslibrary1-cli/README.md +++ b/gen/photoslibrary1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Photos Library* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *Photos Library* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash photoslibrary1 [options] diff --git a/gen/photoslibrary1-cli/mkdocs.yml b/gen/photoslibrary1-cli/mkdocs.yml index 961eff0830..42b168baf9 100644 --- a/gen/photoslibrary1-cli/mkdocs.yml +++ b/gen/photoslibrary1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Photos Library v4.0.1+20220303 +site_name: Photos Library v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-photoslibrary1-cli site_description: A complete library to interact with Photos Library (protocol v1) @@ -7,27 +7,30 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/photoslibrary1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['albums_add-enrichment.md', 'Albums', 'Add Enrichment'] -- ['albums_batch-add-media-items.md', 'Albums', 'Batch Add Media Items'] -- ['albums_batch-remove-media-items.md', 'Albums', 'Batch Remove Media Items'] -- ['albums_create.md', 'Albums', 'Create'] -- ['albums_get.md', 'Albums', 'Get'] -- ['albums_list.md', 'Albums', 'List'] -- ['albums_patch.md', 'Albums', 'Patch'] -- ['albums_share.md', 'Albums', 'Share'] -- ['albums_unshare.md', 'Albums', 'Unshare'] -- ['media-items_batch-create.md', 'Media Items', 'Batch Create'] -- ['media-items_batch-get.md', 'Media Items', 'Batch Get'] -- ['media-items_get.md', 'Media Items', 'Get'] -- ['media-items_list.md', 'Media Items', 'List'] -- ['media-items_patch.md', 'Media Items', 'Patch'] -- ['media-items_search.md', 'Media Items', 'Search'] -- ['shared-albums_get.md', 'Shared Albums', 'Get'] -- ['shared-albums_join.md', 'Shared Albums', 'Join'] -- ['shared-albums_leave.md', 'Shared Albums', 'Leave'] -- ['shared-albums_list.md', 'Shared Albums', 'List'] +nav: +- Home: 'index.md' +- 'Albums': + - 'Add Enrichment': 'albums_add-enrichment.md' + - 'Batch Add Media Items': 'albums_batch-add-media-items.md' + - 'Batch Remove Media Items': 'albums_batch-remove-media-items.md' + - 'Create': 'albums_create.md' + - 'Get': 'albums_get.md' + - 'List': 'albums_list.md' + - 'Patch': 'albums_patch.md' + - 'Share': 'albums_share.md' + - 'Unshare': 'albums_unshare.md' +- 'Media Items': + - 'Batch Create': 'media-items_batch-create.md' + - 'Batch Get': 'media-items_batch-get.md' + - 'Get': 'media-items_get.md' + - 'List': 'media-items_list.md' + - 'Patch': 'media-items_patch.md' + - 'Search': 'media-items_search.md' +- 'Shared Albums': + - 'Get': 'shared-albums_get.md' + - 'Join': 'shared-albums_join.md' + - 'Leave': 'shared-albums_leave.md' + - 'List': 'shared-albums_list.md' theme: readthedocs diff --git a/gen/photoslibrary1-cli/src/client.rs b/gen/photoslibrary1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/photoslibrary1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/photoslibrary1-cli/src/main.rs b/gen/photoslibrary1-cli/src/main.rs index e175b977e9..8d1dbba90a 100644 --- a/gen/photoslibrary1-cli/src/main.rs +++ b/gen/photoslibrary1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_photoslibrary1::{api, Error, oauth2}; +use google_photoslibrary1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -478,10 +477,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "exclude-non-app-created-data" => { - call = call.exclude_non_app_created_data(arg_from_str(value.unwrap_or("false"), err, "exclude-non-app-created-data", "boolean")); + call = call.exclude_non_app_created_data( value.map(|v| arg_from_str(v, err, "exclude-non-app-created-data", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -583,7 +582,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1008,7 +1007,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1117,7 +1116,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1492,10 +1491,10 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "exclude-non-app-created-data" => { - call = call.exclude_non_app_created_data(arg_from_str(value.unwrap_or("false"), err, "exclude-non-app-created-data", "boolean")); + call = call.exclude_non_app_created_data( value.map(|v| arg_from_str(v, err, "exclude-non-app-created-data", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2152,7 +2151,7 @@ async fn main() { let mut app = App::new("photoslibrary1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230117") .about("Manage photos, videos, and albums in Google Photos ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_photoslibrary1_cli") .arg(Arg::with_name("url") diff --git a/gen/photoslibrary1/Cargo.toml b/gen/photoslibrary1/Cargo.toml index 00ee895ab9..e5c23d1d3a 100644 --- a/gen/photoslibrary1/Cargo.toml +++ b/gen/photoslibrary1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-photoslibrary1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Photos Library (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/photoslibrary1" homepage = "https://developers.google.com/photos/" -documentation = "https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-photoslibrary1/5.0.2+20230117" license = "MIT" keywords = ["photoslibrary", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/photoslibrary1/README.md b/gen/photoslibrary1/README.md index 058035f10a..d5c72b8cfe 100644 --- a/gen/photoslibrary1/README.md +++ b/gen/photoslibrary1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-photoslibrary1` library allows access to all features of the *Google Photos Library* service. -This documentation was generated from *Photos Library* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *photoslibrary:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Photos Library* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *photoslibrary:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Photos Library* *v1* API can be found at the [official documentation site](https://developers.google.com/photos/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/PhotosLibrary) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/PhotosLibrary) ... -* [albums](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::Album) - * [*add enrichment*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumAddEnrichmentCall), [*batch add media items*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumBatchAddMediaItemCall), [*batch remove media items*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumBatchRemoveMediaItemCall), [*create*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumCreateCall), [*get*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumGetCall), [*list*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumListCall), [*patch*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumPatchCall), [*share*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumShareCall) and [*unshare*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::AlbumUnshareCall) -* [media items](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::MediaItem) - * [*batch create*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::MediaItemBatchCreateCall), [*batch get*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::MediaItemBatchGetCall), [*get*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::MediaItemGetCall), [*list*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::MediaItemListCall), [*patch*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::MediaItemPatchCall) and [*search*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::MediaItemSearchCall) +* [albums](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::Album) + * [*add enrichment*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumAddEnrichmentCall), [*batch add media items*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumBatchAddMediaItemCall), [*batch remove media items*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumBatchRemoveMediaItemCall), [*create*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumCreateCall), [*get*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumGetCall), [*list*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumListCall), [*patch*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumPatchCall), [*share*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumShareCall) and [*unshare*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::AlbumUnshareCall) +* [media items](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::MediaItem) + * [*batch create*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::MediaItemBatchCreateCall), [*batch get*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::MediaItemBatchGetCall), [*get*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::MediaItemGetCall), [*list*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::MediaItemListCall), [*patch*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::MediaItemPatchCall) and [*search*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::MediaItemSearchCall) * shared albums - * [*get*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::SharedAlbumGetCall), [*join*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::SharedAlbumJoinCall), [*leave*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::SharedAlbumLeaveCall) and [*list*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/api::SharedAlbumListCall) + * [*get*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::SharedAlbumGetCall), [*join*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::SharedAlbumJoinCall), [*leave*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::SharedAlbumLeaveCall) and [*list*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/api::SharedAlbumListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/PhotosLibrary)** +* **[Hub](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/PhotosLibrary)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::CallBuilder) -* **[Resources](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::CallBuilder) +* **[Resources](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Part)** + * **[Parts](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Delegate) to the -[Method Builder](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Delegate) to the +[Method Builder](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::RequestValue) and -[decodable](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::RequestValue) and +[decodable](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-photoslibrary1/5.0.2-beta-1+20230117/google_photoslibrary1/client::RequestValue) are moved +* [request values](https://docs.rs/google-photoslibrary1/5.0.2+20230117/google_photoslibrary1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/photoslibrary1/src/api.rs b/gen/photoslibrary1/src/api.rs index 5bc5c85bce..95223ec1bc 100644 --- a/gen/photoslibrary1/src/api.rs +++ b/gen/photoslibrary1/src/api.rs @@ -142,7 +142,7 @@ impl<'a, S> PhotosLibrary { PhotosLibrary { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://photoslibrary.googleapis.com/".to_string(), _root_url: "https://photoslibrary.googleapis.com/".to_string(), } @@ -159,7 +159,7 @@ impl<'a, S> PhotosLibrary { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/photoslibrary1/src/client.rs b/gen/photoslibrary1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/photoslibrary1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/photoslibrary1/src/lib.rs b/gen/photoslibrary1/src/lib.rs index b3d63d62b3..c715e3b2ae 100644 --- a/gen/photoslibrary1/src/lib.rs +++ b/gen/photoslibrary1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Photos Library* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *photoslibrary:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Photos Library* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *photoslibrary:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Photos Library* *v1* API can be found at the //! [official documentation site](https://developers.google.com/photos/). diff --git a/gen/playablelocations3-cli/Cargo.toml b/gen/playablelocations3-cli/Cargo.toml index fc9f1ee1a3..c9a9467b54 100644 --- a/gen/playablelocations3-cli/Cargo.toml +++ b/gen/playablelocations3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-playablelocations3-cli" -version = "4.0.1+20200707" +version = "5.0.2+20200707" authors = ["Sebastian Thiel "] description = "A complete library to interact with Playable Locations (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playablelocations3-cli" @@ -20,13 +20,13 @@ name = "playablelocations3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-playablelocations3] path = "../playablelocations3" -version = "4.0.1+20200707" +version = "5.0.2+20200707" + diff --git a/gen/playablelocations3-cli/README.md b/gen/playablelocations3-cli/README.md index e1a4b49768..12a40ed153 100644 --- a/gen/playablelocations3-cli/README.md +++ b/gen/playablelocations3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Playable Locations* API at revision *20200707*. The CLI is at version *4.0.1*. +This documentation was generated from the *Playable Locations* API at revision *20200707*. The CLI is at version *5.0.2*. ```bash playablelocations3 [options] diff --git a/gen/playablelocations3-cli/mkdocs.yml b/gen/playablelocations3-cli/mkdocs.yml index 907f02a645..48d2e98794 100644 --- a/gen/playablelocations3-cli/mkdocs.yml +++ b/gen/playablelocations3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Playable Locations v4.0.1+20200707 +site_name: Playable Locations v5.0.2+20200707 site_url: http://byron.github.io/google-apis-rs/google-playablelocations3-cli site_description: A complete library to interact with Playable Locations (protocol v3) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/playablelocation docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['methods_log-impressions.md', 'Methods', 'Log Impressions'] -- ['methods_log-player-reports.md', 'Methods', 'Log Player Reports'] -- ['methods_sample-playable-locations.md', 'Methods', 'Sample Playable Locations'] +nav: +- Home: 'index.md' +- 'Methods': + - 'Log Impressions': 'methods_log-impressions.md' + - 'Log Player Reports': 'methods_log-player-reports.md' + - 'Sample Playable Locations': 'methods_sample-playable-locations.md' theme: readthedocs diff --git a/gen/playablelocations3-cli/src/client.rs b/gen/playablelocations3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/playablelocations3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/playablelocations3-cli/src/main.rs b/gen/playablelocations3-cli/src/main.rs index aa94194ea9..b518eb379e 100644 --- a/gen/playablelocations3-cli/src/main.rs +++ b/gen/playablelocations3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_playablelocations3::{api, Error, oauth2}; +use google_playablelocations3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -492,7 +491,7 @@ async fn main() { let mut app = App::new("playablelocations3") .author("Sebastian Thiel ") - .version("4.0.1+20200707") + .version("5.0.2+20200707") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_playablelocations3_cli") .arg(Arg::with_name("folder") diff --git a/gen/playablelocations3/Cargo.toml b/gen/playablelocations3/Cargo.toml index 9998ca6ee3..49eef7d866 100644 --- a/gen/playablelocations3/Cargo.toml +++ b/gen/playablelocations3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-playablelocations3" -version = "5.0.2-beta-1+20200707" +version = "5.0.2+20200707" authors = ["Sebastian Thiel "] description = "A complete library to interact with Playable Locations (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playablelocations3" homepage = "https://developers.google.com/maps/contact-sales/" -documentation = "https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707" +documentation = "https://docs.rs/google-playablelocations3/5.0.2+20200707" license = "MIT" keywords = ["playablelocations", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/playablelocations3/README.md b/gen/playablelocations3/README.md index 2ca04da867..ccce5df9ab 100644 --- a/gen/playablelocations3/README.md +++ b/gen/playablelocations3/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-playablelocations3` library allows access to all features of the *Google Playable Locations* service. -This documentation was generated from *Playable Locations* crate version *5.0.2-beta-1+20200707*, where *20200707* is the exact revision of the *playablelocations:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Playable Locations* crate version *5.0.2+20200707*, where *20200707* is the exact revision of the *playablelocations:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Playable Locations* *v3* API can be found at the [official documentation site](https://developers.google.com/maps/contact-sales/). # Features -Use the following functionality with ease from the central [hub](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/PlayableLocations) ... +Use the following functionality with ease from the central [hub](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/PlayableLocations) ... -* [log impressions](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/api::MethodLogImpressionCall) -* [log player reports](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/api::MethodLogPlayerReportCall) -* [sample playable locations](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/api::MethodSamplePlayableLocationCall) +* [log impressions](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/api::MethodLogImpressionCall) +* [log player reports](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/api::MethodLogPlayerReportCall) +* [sample playable locations](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/api::MethodSamplePlayableLocationCall) @@ -24,17 +24,17 @@ Use the following functionality with ease from the central [hub](https://docs.rs The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/PlayableLocations)** +* **[Hub](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/PlayableLocations)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::CallBuilder) -* **[Resources](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::CallBuilder) +* **[Resources](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Part)** + * **[Parts](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Delegate) to the -[Method Builder](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Delegate) to the +[Method Builder](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::RequestValue) and -[decodable](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::RequestValue) and +[decodable](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-playablelocations3/5.0.2-beta-1+20200707/google_playablelocations3/client::RequestValue) are moved +* [request values](https://docs.rs/google-playablelocations3/5.0.2+20200707/google_playablelocations3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/playablelocations3/src/api.rs b/gen/playablelocations3/src/api.rs index c4900d43bd..a0ed777495 100644 --- a/gen/playablelocations3/src/api.rs +++ b/gen/playablelocations3/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> PlayableLocations { PlayableLocations { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://playablelocations.googleapis.com/".to_string(), _root_url: "https://playablelocations.googleapis.com/".to_string(), } @@ -114,7 +114,7 @@ impl<'a, S> PlayableLocations { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/playablelocations3/src/client.rs b/gen/playablelocations3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/playablelocations3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/playablelocations3/src/lib.rs b/gen/playablelocations3/src/lib.rs index 3c5995d2ba..60c6fa1920 100644 --- a/gen/playablelocations3/src/lib.rs +++ b/gen/playablelocations3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Playable Locations* crate version *5.0.2-beta-1+20200707*, where *20200707* is the exact revision of the *playablelocations:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Playable Locations* crate version *5.0.2+20200707*, where *20200707* is the exact revision of the *playablelocations:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Playable Locations* *v3* API can be found at the //! [official documentation site](https://developers.google.com/maps/contact-sales/). diff --git a/gen/playcustomapp1-cli/Cargo.toml b/gen/playcustomapp1-cli/Cargo.toml index 3e0cf048a9..967d8404ec 100644 --- a/gen/playcustomapp1-cli/Cargo.toml +++ b/gen/playcustomapp1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-playcustomapp1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with playcustomapp (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playcustomapp1-cli" @@ -20,13 +20,13 @@ name = "playcustomapp1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-playcustomapp1] path = "../playcustomapp1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/playcustomapp1-cli/README.md b/gen/playcustomapp1-cli/README.md index 718320a891..6c5a52c9f3 100644 --- a/gen/playcustomapp1-cli/README.md +++ b/gen/playcustomapp1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *playcustomapp* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *playcustomapp* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash playcustomapp1 [options] diff --git a/gen/playcustomapp1-cli/mkdocs.yml b/gen/playcustomapp1-cli/mkdocs.yml index ab1ef33579..138e0553b7 100644 --- a/gen/playcustomapp1-cli/mkdocs.yml +++ b/gen/playcustomapp1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: playcustomapp v4.0.1+20220305 +site_name: playcustomapp v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-playcustomapp1-cli site_description: A complete library to interact with playcustomapp (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/playcustomapp1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_custom-apps-create.md', 'Accounts', 'Custom Apps Create'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Custom Apps Create': 'accounts_custom-apps-create.md' theme: readthedocs diff --git a/gen/playcustomapp1-cli/src/client.rs b/gen/playcustomapp1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/playcustomapp1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/playcustomapp1-cli/src/main.rs b/gen/playcustomapp1-cli/src/main.rs index 128a263694..7c81235efc 100644 --- a/gen/playcustomapp1-cli/src/main.rs +++ b/gen/playcustomapp1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_playcustomapp1::{api, Error, oauth2}; +use google_playcustomapp1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -272,7 +271,7 @@ async fn main() { let mut app = App::new("playcustomapp1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("API to create and publish custom Android apps") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_playcustomapp1_cli") .arg(Arg::with_name("url") diff --git a/gen/playcustomapp1/Cargo.toml b/gen/playcustomapp1/Cargo.toml index 4e6ff4a73b..ebeaea9b39 100644 --- a/gen/playcustomapp1/Cargo.toml +++ b/gen/playcustomapp1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-playcustomapp1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with playcustomapp (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playcustomapp1" homepage = "https://developers.google.com/android/work/play/custom-app-api/" -documentation = "https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-playcustomapp1/5.0.2+20230123" license = "MIT" keywords = ["playcustomapp", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/playcustomapp1/README.md b/gen/playcustomapp1/README.md index a6332a743c..ddadc428ac 100644 --- a/gen/playcustomapp1/README.md +++ b/gen/playcustomapp1/README.md @@ -5,21 +5,21 @@ DO NOT EDIT ! --> The `google-playcustomapp1` library allows access to all features of the *Google playcustomapp* service. -This documentation was generated from *playcustomapp* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *playcustomapp:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *playcustomapp* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *playcustomapp:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *playcustomapp* *v1* API can be found at the [official documentation site](https://developers.google.com/android/work/play/custom-app-api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/Playcustomapp) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/Playcustomapp) ... * accounts - * [*custom apps create*](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/api::AccountCustomAppCreateCall) + * [*custom apps create*](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/api::AccountCustomAppCreateCall) Upload supported by ... -* [*custom apps create accounts*](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/api::AccountCustomAppCreateCall) +* [*custom apps create accounts*](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/api::AccountCustomAppCreateCall) @@ -27,17 +27,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/Playcustomapp)** +* **[Hub](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/Playcustomapp)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::CallBuilder) -* **[Resources](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::CallBuilder) +* **[Resources](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Part)** + * **[Parts](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Delegate) to the -[Method Builder](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Delegate) to the +[Method Builder](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::RequestValue) and -[decodable](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::RequestValue) and +[decodable](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-playcustomapp1/5.0.2-beta-1+20230123/google_playcustomapp1/client::RequestValue) are moved +* [request values](https://docs.rs/google-playcustomapp1/5.0.2+20230123/google_playcustomapp1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/playcustomapp1/src/api.rs b/gen/playcustomapp1/src/api.rs index 0d25863d74..2fee005b42 100644 --- a/gen/playcustomapp1/src/api.rs +++ b/gen/playcustomapp1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Playcustomapp { Playcustomapp { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://playcustomapp.googleapis.com/".to_string(), _root_url: "https://playcustomapp.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Playcustomapp { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/playcustomapp1/src/client.rs b/gen/playcustomapp1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/playcustomapp1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/playcustomapp1/src/lib.rs b/gen/playcustomapp1/src/lib.rs index 12732b6064..b2f30fb0ce 100644 --- a/gen/playcustomapp1/src/lib.rs +++ b/gen/playcustomapp1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *playcustomapp* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *playcustomapp:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *playcustomapp* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *playcustomapp:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *playcustomapp* *v1* API can be found at the //! [official documentation site](https://developers.google.com/android/work/play/custom-app-api/). diff --git a/gen/playintegrity1-cli/Cargo.toml b/gen/playintegrity1-cli/Cargo.toml index 5c6df840f0..3f48e9dce5 100644 --- a/gen/playintegrity1-cli/Cargo.toml +++ b/gen/playintegrity1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-playintegrity1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Play Integrity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playintegrity1-cli" @@ -20,13 +20,13 @@ name = "playintegrity1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-playintegrity1] path = "../playintegrity1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/playintegrity1-cli/README.md b/gen/playintegrity1-cli/README.md index 17b4f08068..9eeab06817 100644 --- a/gen/playintegrity1-cli/README.md +++ b/gen/playintegrity1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Play Integrity* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Play Integrity* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash playintegrity1 [options] diff --git a/gen/playintegrity1-cli/mkdocs.yml b/gen/playintegrity1-cli/mkdocs.yml index 6da88027ea..9c0846309a 100644 --- a/gen/playintegrity1-cli/mkdocs.yml +++ b/gen/playintegrity1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Play Integrity v4.0.1+20220305 +site_name: Play Integrity v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-playintegrity1-cli site_description: A complete library to interact with Play Integrity (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/playintegrity1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['methods_decode-integrity-token.md', 'Methods', 'Decode Integrity Token'] +nav: +- Home: 'index.md' +- 'Methods': + - 'Decode Integrity Token': 'methods_decode-integrity-token.md' theme: readthedocs diff --git a/gen/playintegrity1-cli/src/client.rs b/gen/playintegrity1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/playintegrity1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/playintegrity1-cli/src/main.rs b/gen/playintegrity1-cli/src/main.rs index daa607aad7..682843bb3d 100644 --- a/gen/playintegrity1-cli/src/main.rs +++ b/gen/playintegrity1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_playintegrity1::{api, Error, oauth2}; +use google_playintegrity1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -260,8 +259,8 @@ async fn main() { let mut app = App::new("playintegrity1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") - .about("Play Integrity") + .version("5.0.2+20230123") + .about("The Play Integrity API helps you check that you're interacting with your genuine app on a genuine Android device powered by Google Play services. The Play Integrity API has replaced SafetyNet Attestation and Android Device Verification.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_playintegrity1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/playintegrity1/Cargo.toml b/gen/playintegrity1/Cargo.toml index f285ba1cc7..738d82840c 100644 --- a/gen/playintegrity1/Cargo.toml +++ b/gen/playintegrity1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-playintegrity1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Play Integrity (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playintegrity1" homepage = "https://developer.android.com/google/play/integrity" -documentation = "https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-playintegrity1/5.0.2+20230123" license = "MIT" keywords = ["playintegrity", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/playintegrity1/README.md b/gen/playintegrity1/README.md index af2b8907ea..30154d35e6 100644 --- a/gen/playintegrity1/README.md +++ b/gen/playintegrity1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-playintegrity1` library allows access to all features of the *Google Play Integrity* service. -This documentation was generated from *Play Integrity* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *playintegrity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Play Integrity* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *playintegrity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Play Integrity* *v1* API can be found at the [official documentation site](https://developer.android.com/google/play/integrity). # Features -Use the following functionality with ease from the central [hub](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/PlayIntegrity) ... +Use the following functionality with ease from the central [hub](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/PlayIntegrity) ... -* [decode integrity token](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/api::MethodDecodeIntegrityTokenCall) +* [decode integrity token](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/api::MethodDecodeIntegrityTokenCall) @@ -22,17 +22,17 @@ Use the following functionality with ease from the central [hub](https://docs.rs The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/PlayIntegrity)** +* **[Hub](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/PlayIntegrity)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::CallBuilder) -* **[Resources](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::CallBuilder) +* **[Resources](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Part)** + * **[Parts](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -123,17 +123,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -143,29 +143,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Delegate) to the -[Method Builder](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Delegate) to the +[Method Builder](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::RequestValue) and -[decodable](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::RequestValue) and +[decodable](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-playintegrity1/5.0.2-beta-1+20230123/google_playintegrity1/client::RequestValue) are moved +* [request values](https://docs.rs/google-playintegrity1/5.0.2+20230123/google_playintegrity1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/playintegrity1/src/api.rs b/gen/playintegrity1/src/api.rs index 97527432d3..5b7efd69d1 100644 --- a/gen/playintegrity1/src/api.rs +++ b/gen/playintegrity1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> PlayIntegrity { PlayIntegrity { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://playintegrity.googleapis.com/".to_string(), _root_url: "https://playintegrity.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> PlayIntegrity { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/playintegrity1/src/client.rs b/gen/playintegrity1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/playintegrity1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/playintegrity1/src/lib.rs b/gen/playintegrity1/src/lib.rs index 19f99ee476..be2a11fd05 100644 --- a/gen/playintegrity1/src/lib.rs +++ b/gen/playintegrity1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Play Integrity* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *playintegrity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Play Integrity* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *playintegrity:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Play Integrity* *v1* API can be found at the //! [official documentation site](https://developer.android.com/google/play/integrity). diff --git a/gen/playmoviespartner1-cli/Cargo.toml b/gen/playmoviespartner1-cli/Cargo.toml index 80f576b975..19eb64bc51 100644 --- a/gen/playmoviespartner1-cli/Cargo.toml +++ b/gen/playmoviespartner1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-playmoviespartner1-cli" -version = "4.0.1+20170919" +version = "5.0.2+20170919" authors = ["Sebastian Thiel "] description = "A complete library to interact with Play Movies (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playmoviespartner1-cli" @@ -20,13 +20,13 @@ name = "playmoviespartner1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-playmoviespartner1] path = "../playmoviespartner1" -version = "4.0.1+20170919" +version = "5.0.2+20170919" + diff --git a/gen/playmoviespartner1-cli/README.md b/gen/playmoviespartner1-cli/README.md index 30bb2fa991..c0dc328ded 100644 --- a/gen/playmoviespartner1-cli/README.md +++ b/gen/playmoviespartner1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Play Movies* API at revision *20170919*. The CLI is at version *4.0.1*. +This documentation was generated from the *Play Movies* API at revision *20170919*. The CLI is at version *5.0.2*. ```bash playmoviespartner1 [options] diff --git a/gen/playmoviespartner1-cli/mkdocs.yml b/gen/playmoviespartner1-cli/mkdocs.yml index 3707939bf7..9ca84eeb01 100644 --- a/gen/playmoviespartner1-cli/mkdocs.yml +++ b/gen/playmoviespartner1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Play Movies v4.0.1+20170919 +site_name: Play Movies v5.0.2+20170919 site_url: http://byron.github.io/google-apis-rs/google-playmoviespartner1-cli site_description: A complete library to interact with Play Movies (protocol v1) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/playmoviespartne docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_avails-get.md', 'Accounts', 'Avails Get'] -- ['accounts_avails-list.md', 'Accounts', 'Avails List'] -- ['accounts_orders-get.md', 'Accounts', 'Orders Get'] -- ['accounts_orders-list.md', 'Accounts', 'Orders List'] -- ['accounts_store-infos-country-get.md', 'Accounts', 'Store Infos Country Get'] -- ['accounts_store-infos-list.md', 'Accounts', 'Store Infos List'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Avails Get': 'accounts_avails-get.md' + - 'Avails List': 'accounts_avails-list.md' + - 'Orders Get': 'accounts_orders-get.md' + - 'Orders List': 'accounts_orders-list.md' + - 'Store Infos Country Get': 'accounts_store-infos-country-get.md' + - 'Store Infos List': 'accounts_store-infos-list.md' theme: readthedocs diff --git a/gen/playmoviespartner1-cli/src/client.rs b/gen/playmoviespartner1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/playmoviespartner1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/playmoviespartner1-cli/src/main.rs b/gen/playmoviespartner1-cli/src/main.rs index cc347b62a4..d95ac402f5 100644 --- a/gen/playmoviespartner1-cli/src/main.rs +++ b/gen/playmoviespartner1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_playmoviespartner1::{api, Error, oauth2}; +use google_playmoviespartner1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -128,7 +127,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "alt-ids" => { call = call.add_alt_ids(value.unwrap_or("")); @@ -257,7 +256,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -389,7 +388,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -730,7 +729,7 @@ async fn main() { let mut app = App::new("playmoviespartner1") .author("Sebastian Thiel ") - .version("4.0.1+20170919") + .version("5.0.2+20170919") .about("Gets the delivery status of titles for Google Play Movies Partners.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_playmoviespartner1_cli") .arg(Arg::with_name("url") diff --git a/gen/playmoviespartner1/Cargo.toml b/gen/playmoviespartner1/Cargo.toml index a86710b5c7..4e9994e94c 100644 --- a/gen/playmoviespartner1/Cargo.toml +++ b/gen/playmoviespartner1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-playmoviespartner1" -version = "5.0.2-beta-1+20170919" +version = "5.0.2+20170919" authors = ["Sebastian Thiel "] description = "A complete library to interact with Play Movies (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/playmoviespartner1" homepage = "https://developers.google.com/playmoviespartner/" -documentation = "https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919" +documentation = "https://docs.rs/google-playmoviespartner1/5.0.2+20170919" license = "MIT" keywords = ["playmoviespartner", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/playmoviespartner1/README.md b/gen/playmoviespartner1/README.md index 7f8cc406e4..11cb573d30 100644 --- a/gen/playmoviespartner1/README.md +++ b/gen/playmoviespartner1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-playmoviespartner1` library allows access to all features of the *Google Play Movies* service. -This documentation was generated from *Play Movies* crate version *5.0.2-beta-1+20170919*, where *20170919* is the exact revision of the *playmoviespartner:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Play Movies* crate version *5.0.2+20170919*, where *20170919* is the exact revision of the *playmoviespartner:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Play Movies* *v1* API can be found at the [official documentation site](https://developers.google.com/playmoviespartner/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/PlayMovies) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/PlayMovies) ... * accounts - * [*avails get*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/api::AccountAvailGetCall), [*avails list*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/api::AccountAvailListCall), [*orders get*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/api::AccountOrderGetCall), [*orders list*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/api::AccountOrderListCall), [*store infos country get*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/api::AccountStoreInfoCountryGetCall) and [*store infos list*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/api::AccountStoreInfoListCall) + * [*avails get*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/api::AccountAvailGetCall), [*avails list*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/api::AccountAvailListCall), [*orders get*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/api::AccountOrderGetCall), [*orders list*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/api::AccountOrderListCall), [*store infos country get*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/api::AccountStoreInfoCountryGetCall) and [*store infos list*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/api::AccountStoreInfoListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/PlayMovies)** +* **[Hub](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/PlayMovies)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::CallBuilder) -* **[Resources](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::CallBuilder) +* **[Resources](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Part)** + * **[Parts](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -118,17 +118,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -138,29 +138,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Delegate) to the -[Method Builder](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Delegate) to the +[Method Builder](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::RequestValue) and -[decodable](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::RequestValue) and +[decodable](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-playmoviespartner1/5.0.2-beta-1+20170919/google_playmoviespartner1/client::RequestValue) are moved +* [request values](https://docs.rs/google-playmoviespartner1/5.0.2+20170919/google_playmoviespartner1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/playmoviespartner1/src/api.rs b/gen/playmoviespartner1/src/api.rs index 371af8b61a..17f894e699 100644 --- a/gen/playmoviespartner1/src/api.rs +++ b/gen/playmoviespartner1/src/api.rs @@ -119,7 +119,7 @@ impl<'a, S> PlayMovies { PlayMovies { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://playmoviespartner.googleapis.com/".to_string(), _root_url: "https://playmoviespartner.googleapis.com/".to_string(), } @@ -130,7 +130,7 @@ impl<'a, S> PlayMovies { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/playmoviespartner1/src/client.rs b/gen/playmoviespartner1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/playmoviespartner1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/playmoviespartner1/src/lib.rs b/gen/playmoviespartner1/src/lib.rs index e79bbdf67e..e8632f2410 100644 --- a/gen/playmoviespartner1/src/lib.rs +++ b/gen/playmoviespartner1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Play Movies* crate version *5.0.2-beta-1+20170919*, where *20170919* is the exact revision of the *playmoviespartner:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Play Movies* crate version *5.0.2+20170919*, where *20170919* is the exact revision of the *playmoviespartner:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Play Movies* *v1* API can be found at the //! [official documentation site](https://developers.google.com/playmoviespartner/). diff --git a/gen/plus1-cli/Cargo.toml b/gen/plus1-cli/Cargo.toml index 41b201e21e..79d8c29026 100644 --- a/gen/plus1-cli/Cargo.toml +++ b/gen/plus1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-plus1-cli" -version = "4.0.1+20190616" +version = "5.0.2+20190616" authors = ["Sebastian Thiel "] description = "A complete library to interact with plus (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/plus1-cli" @@ -20,13 +20,13 @@ name = "plus1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-plus1] path = "../plus1" -version = "4.0.1+20190616" +version = "5.0.2+20190616" + diff --git a/gen/plus1-cli/README.md b/gen/plus1-cli/README.md index c54b812d2f..0f9b9e7591 100644 --- a/gen/plus1-cli/README.md +++ b/gen/plus1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *plus* API at revision *20190616*. The CLI is at version *4.0.1*. +This documentation was generated from the *plus* API at revision *20190616*. The CLI is at version *5.0.2*. ```bash plus1 [options] diff --git a/gen/plus1-cli/mkdocs.yml b/gen/plus1-cli/mkdocs.yml index d7b3351419..e15bb174e5 100644 --- a/gen/plus1-cli/mkdocs.yml +++ b/gen/plus1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: plus v4.0.1+20190616 +site_name: plus v5.0.2+20190616 site_url: http://byron.github.io/google-apis-rs/google-plus1-cli site_description: A complete library to interact with plus (protocol v1) @@ -7,17 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/plus1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['activities_get.md', 'Activities', 'Get'] -- ['activities_list.md', 'Activities', 'List'] -- ['activities_search.md', 'Activities', 'Search'] -- ['comments_get.md', 'Comments', 'Get'] -- ['comments_list.md', 'Comments', 'List'] -- ['people_get.md', 'People', 'Get'] -- ['people_list.md', 'People', 'List'] -- ['people_list-by-activity.md', 'People', 'List By Activity'] -- ['people_search.md', 'People', 'Search'] +nav: +- Home: 'index.md' +- 'Activities': + - 'Get': 'activities_get.md' + - 'List': 'activities_list.md' + - 'Search': 'activities_search.md' +- 'Comments': + - 'Get': 'comments_get.md' + - 'List': 'comments_list.md' +- 'People': + - 'Get': 'people_get.md' + - 'List': 'people_list.md' + - 'List By Activity': 'people_list-by-activity.md' + - 'Search': 'people_search.md' theme: readthedocs diff --git a/gen/plus1-cli/src/client.rs b/gen/plus1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/plus1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/plus1-cli/src/main.rs b/gen/plus1-cli/src/main.rs index 3b929cfc46..57c387087e 100644 --- a/gen/plus1-cli/src/main.rs +++ b/gen/plus1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_plus1::{api, Error, oauth2}; +use google_plus1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -175,7 +174,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -292,7 +291,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -406,7 +405,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -465,7 +464,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -524,7 +523,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "language" => { call = call.language(value.unwrap_or("")); @@ -931,7 +930,7 @@ async fn main() { let mut app = App::new("plus1") .author("Sebastian Thiel ") - .version("4.0.1+20190616") + .version("5.0.2+20190616") .about("Builds on top of the Google+ platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_plus1_cli") .arg(Arg::with_name("url") diff --git a/gen/plus1/Cargo.toml b/gen/plus1/Cargo.toml index 01ecb05eea..e16ab22b16 100644 --- a/gen/plus1/Cargo.toml +++ b/gen/plus1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-plus1" -version = "5.0.2-beta-1+20190616" +version = "5.0.2+20190616" authors = ["Sebastian Thiel "] description = "A complete library to interact with plus (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/plus1" homepage = "https://developers.google.com/+/api/" -documentation = "https://docs.rs/google-plus1/5.0.2-beta-1+20190616" +documentation = "https://docs.rs/google-plus1/5.0.2+20190616" license = "MIT" keywords = ["plus", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/plus1/README.md b/gen/plus1/README.md index f1aa8de784..6884995551 100644 --- a/gen/plus1/README.md +++ b/gen/plus1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-plus1` library allows access to all features of the *Google plus* service. -This documentation was generated from *plus* crate version *5.0.2-beta-1+20190616*, where *20190616* is the exact revision of the *plus:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *plus* crate version *5.0.2+20190616*, where *20190616* is the exact revision of the *plus:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *plus* *v1* API can be found at the [official documentation site](https://developers.google.com/+/api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/Plus) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/Plus) ... -* [activities](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::Activity) - * [*get*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::ActivityGetCall), [*list*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::ActivityListCall) and [*search*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::ActivitySearchCall) -* [comments](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::Comment) - * [*get*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::CommentGetCall) and [*list*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::CommentListCall) -* [people](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::Person) - * [*get*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::PersonGetCall), [*list*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::PersonListCall), [*list by activity*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::PersonListByActivityCall) and [*search*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/api::PersonSearchCall) +* [activities](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::Activity) + * [*get*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::ActivityGetCall), [*list*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::ActivityListCall) and [*search*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::ActivitySearchCall) +* [comments](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::Comment) + * [*get*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::CommentGetCall) and [*list*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::CommentListCall) +* [people](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::Person) + * [*get*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::PersonGetCall), [*list*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::PersonListCall), [*list by activity*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::PersonListByActivityCall) and [*search*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/api::PersonSearchCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/Plus)** +* **[Hub](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/Plus)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::CallBuilder) -* **[Resources](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::CallBuilder) +* **[Resources](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Part)** + * **[Parts](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Delegate) to the -[Method Builder](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Delegate) to the +[Method Builder](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::RequestValue) and -[decodable](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::RequestValue) and +[decodable](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-plus1/5.0.2-beta-1+20190616/google_plus1/client::RequestValue) are moved +* [request values](https://docs.rs/google-plus1/5.0.2+20190616/google_plus1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/plus1/src/api.rs b/gen/plus1/src/api.rs index e0eb84a749..de222f3896 100644 --- a/gen/plus1/src/api.rs +++ b/gen/plus1/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> Plus { Plus { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/plus/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -151,7 +151,7 @@ impl<'a, S> Plus { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/plus1/src/client.rs b/gen/plus1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/plus1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/plus1/src/lib.rs b/gen/plus1/src/lib.rs index 8329d39b90..f02c887642 100644 --- a/gen/plus1/src/lib.rs +++ b/gen/plus1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *plus* crate version *5.0.2-beta-1+20190616*, where *20190616* is the exact revision of the *plus:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *plus* crate version *5.0.2+20190616*, where *20190616* is the exact revision of the *plus:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *plus* *v1* API can be found at the //! [official documentation site](https://developers.google.com/+/api/). diff --git a/gen/plusdomains1-cli/Cargo.toml b/gen/plusdomains1-cli/Cargo.toml index 6ed2bfbee3..4c40bad697 100644 --- a/gen/plusdomains1-cli/Cargo.toml +++ b/gen/plusdomains1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-plusdomains1-cli" -version = "4.0.1+20190616" +version = "5.0.2+20190616" authors = ["Sebastian Thiel "] description = "A complete library to interact with plusDomains (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/plusdomains1-cli" @@ -20,13 +20,13 @@ name = "plusdomains1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-plusdomains1] path = "../plusdomains1" -version = "4.0.1+20190616" +version = "5.0.2+20190616" + diff --git a/gen/plusdomains1-cli/README.md b/gen/plusdomains1-cli/README.md index 385d5d75de..35e3cb48c4 100644 --- a/gen/plusdomains1-cli/README.md +++ b/gen/plusdomains1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *plusDomains* API at revision *20190616*. The CLI is at version *4.0.1*. +This documentation was generated from the *plusDomains* API at revision *20190616*. The CLI is at version *5.0.2*. ```bash plusdomains1 [options] diff --git a/gen/plusdomains1-cli/mkdocs.yml b/gen/plusdomains1-cli/mkdocs.yml index 862198dda8..d9eca5877b 100644 --- a/gen/plusdomains1-cli/mkdocs.yml +++ b/gen/plusdomains1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: plusDomains v4.0.1+20190616 +site_name: plusDomains v5.0.2+20190616 site_url: http://byron.github.io/google-apis-rs/google-plusdomains1-cli site_description: A complete library to interact with plusDomains (protocol v1) @@ -7,18 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/plusdomains1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['activities_get.md', 'Activities', 'Get'] -- ['activities_list.md', 'Activities', 'List'] -- ['audiences_list.md', 'Audiences', 'List'] -- ['circles_list.md', 'Circles', 'List'] -- ['comments_get.md', 'Comments', 'Get'] -- ['comments_list.md', 'Comments', 'List'] -- ['media_insert.md', 'Media', 'Insert'] -- ['people_get.md', 'People', 'Get'] -- ['people_list.md', 'People', 'List'] -- ['people_list-by-activity.md', 'People', 'List By Activity'] +nav: +- Home: 'index.md' +- 'Activities': + - 'Get': 'activities_get.md' + - 'List': 'activities_list.md' +- 'Audiences': + - 'List': 'audiences_list.md' +- 'Circles': + - 'List': 'circles_list.md' +- 'Comments': + - 'Get': 'comments_get.md' + - 'List': 'comments_list.md' +- 'Media': + - 'Insert': 'media_insert.md' +- 'People': + - 'Get': 'people_get.md' + - 'List': 'people_list.md' + - 'List By Activity': 'people_list-by-activity.md' theme: readthedocs diff --git a/gen/plusdomains1-cli/src/client.rs b/gen/plusdomains1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/plusdomains1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/plusdomains1-cli/src/main.rs b/gen/plusdomains1-cli/src/main.rs index 520650d574..f12d949900 100644 --- a/gen/plusdomains1-cli/src/main.rs +++ b/gen/plusdomains1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_plusdomains1::{api, Error, oauth2}; +use google_plusdomains1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -172,7 +171,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -231,7 +230,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -345,7 +344,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -566,7 +565,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -625,7 +624,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -1106,7 +1105,7 @@ async fn main() { let mut app = App::new("plusdomains1") .author("Sebastian Thiel ") - .version("4.0.1+20190616") + .version("5.0.2+20190616") .about("Builds on top of the Google+ platform for Google Apps Domains.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_plusdomains1_cli") .arg(Arg::with_name("url") diff --git a/gen/plusdomains1/Cargo.toml b/gen/plusdomains1/Cargo.toml index 6ff482a94b..1690334886 100644 --- a/gen/plusdomains1/Cargo.toml +++ b/gen/plusdomains1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-plusdomains1" -version = "5.0.2-beta-1+20190616" +version = "5.0.2+20190616" authors = ["Sebastian Thiel "] description = "A complete library to interact with plusDomains (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/plusdomains1" homepage = "https://developers.google.com/+/domains/" -documentation = "https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616" +documentation = "https://docs.rs/google-plusdomains1/5.0.2+20190616" license = "MIT" keywords = ["plusDomains", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/plusdomains1/README.md b/gen/plusdomains1/README.md index 569032d8db..d0f3273c3f 100644 --- a/gen/plusdomains1/README.md +++ b/gen/plusdomains1/README.md @@ -5,31 +5,31 @@ DO NOT EDIT ! --> The `google-plusdomains1` library allows access to all features of the *Google plusDomains* service. -This documentation was generated from *plusDomains* crate version *5.0.2-beta-1+20190616*, where *20190616* is the exact revision of the *plusDomains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *plusDomains* crate version *5.0.2+20190616*, where *20190616* is the exact revision of the *plusDomains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *plusDomains* *v1* API can be found at the [official documentation site](https://developers.google.com/+/domains/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/PlusDomains) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/PlusDomains) ... -* [activities](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::Activity) - * [*get*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::ActivityGetCall) and [*list*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::ActivityListCall) -* [audiences](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::Audience) - * [*list*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::AudienceListCall) -* [circles](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::Circle) - * [*list*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::CircleListCall) -* [comments](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::Comment) - * [*get*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::CommentGetCall) and [*list*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::CommentListCall) -* [media](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::Media) - * [*insert*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::MediaInsertCall) -* [people](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::Person) - * [*get*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::PersonGetCall), [*list*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::PersonListCall) and [*list by activity*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::PersonListByActivityCall) +* [activities](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::Activity) + * [*get*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::ActivityGetCall) and [*list*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::ActivityListCall) +* [audiences](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::Audience) + * [*list*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::AudienceListCall) +* [circles](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::Circle) + * [*list*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::CircleListCall) +* [comments](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::Comment) + * [*get*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::CommentGetCall) and [*list*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::CommentListCall) +* [media](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::Media) + * [*insert*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::MediaInsertCall) +* [people](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::Person) + * [*get*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::PersonGetCall), [*list*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::PersonListCall) and [*list by activity*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::PersonListByActivityCall) Upload supported by ... -* [*insert media*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/api::MediaInsertCall) +* [*insert media*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/api::MediaInsertCall) @@ -37,17 +37,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/PlusDomains)** +* **[Hub](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/PlusDomains)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::CallBuilder) -* **[Resources](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::CallBuilder) +* **[Resources](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Part)** + * **[Parts](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -136,17 +136,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -156,29 +156,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Delegate) to the -[Method Builder](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Delegate) to the +[Method Builder](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::RequestValue) and -[decodable](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::RequestValue) and +[decodable](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-plusdomains1/5.0.2-beta-1+20190616/google_plusdomains1/client::RequestValue) are moved +* [request values](https://docs.rs/google-plusdomains1/5.0.2+20190616/google_plusdomains1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/plusdomains1/src/api.rs b/gen/plusdomains1/src/api.rs index cd0447791d..61f7afe7c1 100644 --- a/gen/plusdomains1/src/api.rs +++ b/gen/plusdomains1/src/api.rs @@ -150,7 +150,7 @@ impl<'a, S> PlusDomains { PlusDomains { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/plusDomains/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -176,7 +176,7 @@ impl<'a, S> PlusDomains { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/plusdomains1/src/client.rs b/gen/plusdomains1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/plusdomains1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/plusdomains1/src/lib.rs b/gen/plusdomains1/src/lib.rs index b1bb28bd3e..5fd0e7954b 100644 --- a/gen/plusdomains1/src/lib.rs +++ b/gen/plusdomains1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *plusDomains* crate version *5.0.2-beta-1+20190616*, where *20190616* is the exact revision of the *plusDomains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *plusDomains* crate version *5.0.2+20190616*, where *20190616* is the exact revision of the *plusDomains:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *plusDomains* *v1* API can be found at the //! [official documentation site](https://developers.google.com/+/domains/). diff --git a/gen/policyanalyzer1-cli/Cargo.toml b/gen/policyanalyzer1-cli/Cargo.toml index fb150b4786..0f7e66b9af 100644 --- a/gen/policyanalyzer1-cli/Cargo.toml +++ b/gen/policyanalyzer1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-policyanalyzer1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Policy Analyzer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/policyanalyzer1-cli" @@ -20,13 +20,13 @@ name = "policyanalyzer1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-policyanalyzer1] path = "../policyanalyzer1" -version = "4.0.1+20220227" +version = "5.0.2+20230122" + diff --git a/gen/policyanalyzer1-cli/README.md b/gen/policyanalyzer1-cli/README.md index 58f4c28812..a2f1595cf7 100644 --- a/gen/policyanalyzer1-cli/README.md +++ b/gen/policyanalyzer1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Policy Analyzer* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Policy Analyzer* API at revision *20230122*. The CLI is at version *5.0.2*. ```bash policyanalyzer1 [options] diff --git a/gen/policyanalyzer1-cli/mkdocs.yml b/gen/policyanalyzer1-cli/mkdocs.yml index 863691a222..be6845abf4 100644 --- a/gen/policyanalyzer1-cli/mkdocs.yml +++ b/gen/policyanalyzer1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Policy Analyzer v4.0.1+20220227 +site_name: Policy Analyzer v5.0.2+20230122 site_url: http://byron.github.io/google-apis-rs/google-policyanalyzer1-cli site_description: A complete library to interact with Policy Analyzer (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/policyanalyzer1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-activity-types-activities-query.md', 'Projects', 'Locations Activity Types Activities Query'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Activity Types Activities Query': 'projects_locations-activity-types-activities-query.md' theme: readthedocs diff --git a/gen/policyanalyzer1-cli/src/client.rs b/gen/policyanalyzer1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/policyanalyzer1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/policyanalyzer1-cli/src/main.rs b/gen/policyanalyzer1-cli/src/main.rs index 2d9c9ec8af..fbcc99c958 100644 --- a/gen/policyanalyzer1-cli/src/main.rs +++ b/gen/policyanalyzer1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_policyanalyzer1::{api, Error, oauth2}; +use google_policyanalyzer1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -231,7 +230,7 @@ async fn main() { let mut app = App::new("policyanalyzer1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230122") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_policyanalyzer1_cli") .arg(Arg::with_name("url") diff --git a/gen/policyanalyzer1/Cargo.toml b/gen/policyanalyzer1/Cargo.toml index dad96aa54f..ad51cf95e7 100644 --- a/gen/policyanalyzer1/Cargo.toml +++ b/gen/policyanalyzer1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-policyanalyzer1" -version = "5.0.2-beta-1+20230122" +version = "5.0.2+20230122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Policy Analyzer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/policyanalyzer1" homepage = "https://www.google.com" -documentation = "https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122" +documentation = "https://docs.rs/google-policyanalyzer1/5.0.2+20230122" license = "MIT" keywords = ["policyanalyzer", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/policyanalyzer1/README.md b/gen/policyanalyzer1/README.md index 47bd0a1dd1..40e8fe62fa 100644 --- a/gen/policyanalyzer1/README.md +++ b/gen/policyanalyzer1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-policyanalyzer1` library allows access to all features of the *Google Policy Analyzer* service. -This documentation was generated from *Policy Analyzer* crate version *5.0.2-beta-1+20230122*, where *20230122* is the exact revision of the *policyanalyzer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Policy Analyzer* crate version *5.0.2+20230122*, where *20230122* is the exact revision of the *policyanalyzer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Policy Analyzer* *v1* API can be found at the [official documentation site](https://www.google.com). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/PolicyAnalyzer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/PolicyAnalyzer) ... * projects - * [*locations activity types activities query*](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/api::ProjectLocationActivityTypeActivityQueryCall) + * [*locations activity types activities query*](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/api::ProjectLocationActivityTypeActivityQueryCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/PolicyAnalyzer)** +* **[Hub](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/PolicyAnalyzer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::CallBuilder) -* **[Resources](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::CallBuilder) +* **[Resources](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Part)** + * **[Parts](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -121,17 +121,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -141,29 +141,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Delegate) to the -[Method Builder](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Delegate) to the +[Method Builder](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::RequestValue) and -[decodable](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::RequestValue) and +[decodable](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-policyanalyzer1/5.0.2-beta-1+20230122/google_policyanalyzer1/client::RequestValue) are moved +* [request values](https://docs.rs/google-policyanalyzer1/5.0.2+20230122/google_policyanalyzer1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/policyanalyzer1/src/api.rs b/gen/policyanalyzer1/src/api.rs index c8b54d6170..a8d110e867 100644 --- a/gen/policyanalyzer1/src/api.rs +++ b/gen/policyanalyzer1/src/api.rs @@ -122,7 +122,7 @@ impl<'a, S> PolicyAnalyzer { PolicyAnalyzer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://policyanalyzer.googleapis.com/".to_string(), _root_url: "https://policyanalyzer.googleapis.com/".to_string(), } @@ -133,7 +133,7 @@ impl<'a, S> PolicyAnalyzer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/policyanalyzer1/src/client.rs b/gen/policyanalyzer1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/policyanalyzer1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/policyanalyzer1/src/lib.rs b/gen/policyanalyzer1/src/lib.rs index 49e58a7534..6e08a59778 100644 --- a/gen/policyanalyzer1/src/lib.rs +++ b/gen/policyanalyzer1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Policy Analyzer* crate version *5.0.2-beta-1+20230122*, where *20230122* is the exact revision of the *policyanalyzer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Policy Analyzer* crate version *5.0.2+20230122*, where *20230122* is the exact revision of the *policyanalyzer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Policy Analyzer* *v1* API can be found at the //! [official documentation site](https://www.google.com). diff --git a/gen/policysimulator1-cli/Cargo.toml b/gen/policysimulator1-cli/Cargo.toml index f9bb346965..1db3fb47c1 100644 --- a/gen/policysimulator1-cli/Cargo.toml +++ b/gen/policysimulator1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-policysimulator1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Policy Simulator (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/policysimulator1-cli" @@ -20,13 +20,13 @@ name = "policysimulator1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-policysimulator1] path = "../policysimulator1" -version = "4.0.1+20220227" +version = "5.0.2+20230115" + diff --git a/gen/policysimulator1-cli/README.md b/gen/policysimulator1-cli/README.md index 46ff3cf262..bf74c26475 100644 --- a/gen/policysimulator1-cli/README.md +++ b/gen/policysimulator1-cli/README.md @@ -25,24 +25,30 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Policy Simulator* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Policy Simulator* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash policysimulator1 [options] folders locations-replays-create (-r )... [-p ]... [-o ] locations-replays-get [-p ]... [-o ] + locations-replays-operations-get [-p ]... [-o ] + locations-replays-operations-list [-p ]... [-o ] locations-replays-results-list [-p ]... [-o ] operations get [-p ]... [-o ] - list [-p ]... [-o ] + list [-p ]... [-o ] organizations locations-replays-create (-r )... [-p ]... [-o ] locations-replays-get [-p ]... [-o ] + locations-replays-operations-get [-p ]... [-o ] + locations-replays-operations-list [-p ]... [-o ] locations-replays-results-list [-p ]... [-o ] projects locations-replays-create (-r )... [-p ]... [-o ] locations-replays-get [-p ]... [-o ] + locations-replays-operations-get [-p ]... [-o ] + locations-replays-operations-list [-p ]... [-o ] locations-replays-results-list [-p ]... [-o ] policysimulator1 --help diff --git a/gen/policysimulator1-cli/mkdocs.yml b/gen/policysimulator1-cli/mkdocs.yml index 94d3d9bc48..ede07ae0e5 100644 --- a/gen/policysimulator1-cli/mkdocs.yml +++ b/gen/policysimulator1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Policy Simulator v4.0.1+20220227 +site_name: Policy Simulator v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-policysimulator1-cli site_description: A complete library to interact with Policy Simulator (protocol v1) @@ -7,19 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/policysimulator1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_locations-replays-create.md', 'Folders', 'Locations Replays Create'] -- ['folders_locations-replays-get.md', 'Folders', 'Locations Replays Get'] -- ['folders_locations-replays-results-list.md', 'Folders', 'Locations Replays Results List'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['organizations_locations-replays-create.md', 'Organizations', 'Locations Replays Create'] -- ['organizations_locations-replays-get.md', 'Organizations', 'Locations Replays Get'] -- ['organizations_locations-replays-results-list.md', 'Organizations', 'Locations Replays Results List'] -- ['projects_locations-replays-create.md', 'Projects', 'Locations Replays Create'] -- ['projects_locations-replays-get.md', 'Projects', 'Locations Replays Get'] -- ['projects_locations-replays-results-list.md', 'Projects', 'Locations Replays Results List'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Locations Replays Create': 'folders_locations-replays-create.md' + - 'Locations Replays Get': 'folders_locations-replays-get.md' + - 'Locations Replays Operations Get': 'folders_locations-replays-operations-get.md' + - 'Locations Replays Operations List': 'folders_locations-replays-operations-list.md' + - 'Locations Replays Results List': 'folders_locations-replays-results-list.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Organizations': + - 'Locations Replays Create': 'organizations_locations-replays-create.md' + - 'Locations Replays Get': 'organizations_locations-replays-get.md' + - 'Locations Replays Operations Get': 'organizations_locations-replays-operations-get.md' + - 'Locations Replays Operations List': 'organizations_locations-replays-operations-list.md' + - 'Locations Replays Results List': 'organizations_locations-replays-results-list.md' +- 'Projects': + - 'Locations Replays Create': 'projects_locations-replays-create.md' + - 'Locations Replays Get': 'projects_locations-replays-get.md' + - 'Locations Replays Operations Get': 'projects_locations-replays-operations-get.md' + - 'Locations Replays Operations List': 'projects_locations-replays-operations-list.md' + - 'Locations Replays Results List': 'projects_locations-replays-results-list.md' theme: readthedocs diff --git a/gen/policysimulator1-cli/src/client.rs b/gen/policysimulator1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/policysimulator1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/policysimulator1-cli/src/main.rs b/gen/policysimulator1-cli/src/main.rs index 2f11119448..f0210d3eb9 100644 --- a/gen/policysimulator1-cli/src/main.rs +++ b/gen/policysimulator1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_policysimulator1::{api, Error, oauth2}; +use google_policysimulator1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -200,6 +199,120 @@ where } } + async fn _folders_locations_replays_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.folders().locations_replays_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _folders_locations_replays_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.folders().locations_replays_operations_list(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_locations_replays_results_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.folders().locations_replays_results_list(opt.value_of("parent").unwrap_or("")); @@ -210,7 +323,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -313,7 +426,7 @@ where async fn _operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { - let mut call = self.hub.operations().list(); + let mut call = self.hub.operations().list(opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { @@ -321,10 +434,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); - }, - "name" => { - call = call.name(value.unwrap_or("")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -342,7 +452,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["filter", "name", "page-size", "page-token"].iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -525,6 +635,120 @@ where } } + async fn _organizations_locations_replays_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().locations_replays_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_locations_replays_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().locations_replays_operations_list(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_locations_replays_results_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().locations_replays_results_list(opt.value_of("parent").unwrap_or("")); @@ -535,7 +759,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -733,6 +957,120 @@ where } } + async fn _projects_locations_replays_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_replays_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_replays_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_replays_operations_list(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_replays_results_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_replays_results_list(opt.value_of("parent").unwrap_or("")); @@ -743,7 +1081,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -805,6 +1143,12 @@ where ("locations-replays-get", Some(opt)) => { call_result = self._folders_locations_replays_get(opt, dry_run, &mut err).await; }, + ("locations-replays-operations-get", Some(opt)) => { + call_result = self._folders_locations_replays_operations_get(opt, dry_run, &mut err).await; + }, + ("locations-replays-operations-list", Some(opt)) => { + call_result = self._folders_locations_replays_operations_list(opt, dry_run, &mut err).await; + }, ("locations-replays-results-list", Some(opt)) => { call_result = self._folders_locations_replays_results_list(opt, dry_run, &mut err).await; }, @@ -836,6 +1180,12 @@ where ("locations-replays-get", Some(opt)) => { call_result = self._organizations_locations_replays_get(opt, dry_run, &mut err).await; }, + ("locations-replays-operations-get", Some(opt)) => { + call_result = self._organizations_locations_replays_operations_get(opt, dry_run, &mut err).await; + }, + ("locations-replays-operations-list", Some(opt)) => { + call_result = self._organizations_locations_replays_operations_list(opt, dry_run, &mut err).await; + }, ("locations-replays-results-list", Some(opt)) => { call_result = self._organizations_locations_replays_results_list(opt, dry_run, &mut err).await; }, @@ -853,6 +1203,12 @@ where ("locations-replays-get", Some(opt)) => { call_result = self._projects_locations_replays_get(opt, dry_run, &mut err).await; }, + ("locations-replays-operations-get", Some(opt)) => { + call_result = self._projects_locations_replays_operations_get(opt, dry_run, &mut err).await; + }, + ("locations-replays-operations-list", Some(opt)) => { + call_result = self._projects_locations_replays_operations_list(opt, dry_run, &mut err).await; + }, ("locations-replays-results-list", Some(opt)) => { call_result = self._projects_locations_replays_results_list(opt, dry_run, &mut err).await; }, @@ -935,7 +1291,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("folders", "methods: 'locations-replays-create', 'locations-replays-get' and 'locations-replays-results-list'", vec![ + ("folders", "methods: 'locations-replays-create', 'locations-replays-get', 'locations-replays-operations-get', 'locations-replays-operations-list' and 'locations-replays-results-list'", vec![ ("locations-replays-create", Some(r##"Creates and starts a Replay using the given ReplayConfig."##), "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/folders_locations-replays-create", @@ -980,6 +1336,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-replays-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/folders_locations-replays-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-replays-operations-list", + Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), + "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/folders_locations-replays-operations-list", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation's parent resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1037,6 +1437,12 @@ async fn main() { Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/operations_list", vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation's parent resource."##), + Some(true), + Some(false)), + (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), @@ -1051,7 +1457,7 @@ async fn main() { ]), ]), - ("organizations", "methods: 'locations-replays-create', 'locations-replays-get' and 'locations-replays-results-list'", vec![ + ("organizations", "methods: 'locations-replays-create', 'locations-replays-get', 'locations-replays-operations-get', 'locations-replays-operations-list' and 'locations-replays-results-list'", vec![ ("locations-replays-create", Some(r##"Creates and starts a Replay using the given ReplayConfig."##), "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/organizations_locations-replays-create", @@ -1096,6 +1502,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-replays-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/organizations_locations-replays-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-replays-operations-list", + Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), + "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/organizations_locations-replays-operations-list", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation's parent resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1126,7 +1576,7 @@ async fn main() { ]), ]), - ("projects", "methods: 'locations-replays-create', 'locations-replays-get' and 'locations-replays-results-list'", vec![ + ("projects", "methods: 'locations-replays-create', 'locations-replays-get', 'locations-replays-operations-get', 'locations-replays-operations-list' and 'locations-replays-results-list'", vec![ ("locations-replays-create", Some(r##"Creates and starts a Replay using the given ReplayConfig."##), "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/projects_locations-replays-create", @@ -1171,6 +1621,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-replays-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/projects_locations-replays-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-replays-operations-list", + Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), + "Details at http://byron.github.io/google-apis-rs/google_policysimulator1_cli/projects_locations-replays-operations-list", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation's parent resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1205,7 +1699,7 @@ async fn main() { let mut app = App::new("policysimulator1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230115") .about(" Policy Simulator is a collection of endpoints for creating, running, and viewing a Replay. A `Replay` is a type of simulation that lets you see how your members' access to resources might change if you changed your IAM policy. During a `Replay`, Policy Simulator re-evaluates, or replays, past access attempts under both the current policy and your proposed policy, and compares those results to determine how your members' access might change under the proposed policy.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_policysimulator1_cli") .arg(Arg::with_name("url") diff --git a/gen/policysimulator1/Cargo.toml b/gen/policysimulator1/Cargo.toml index 42ad8797cb..cd231679c7 100644 --- a/gen/policysimulator1/Cargo.toml +++ b/gen/policysimulator1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-policysimulator1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Policy Simulator (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/policysimulator1" homepage = "https://cloud.google.com/iam/docs/simulating-access" -documentation = "https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-policysimulator1/5.0.2+20230115" license = "MIT" keywords = ["policysimulator", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/policysimulator1/README.md b/gen/policysimulator1/README.md index e4b23664db..6be137e099 100644 --- a/gen/policysimulator1/README.md +++ b/gen/policysimulator1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-policysimulator1` library allows access to all features of the *Google Policy Simulator* service. -This documentation was generated from *Policy Simulator* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *policysimulator:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Policy Simulator* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *policysimulator:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Policy Simulator* *v1* API can be found at the [official documentation site](https://cloud.google.com/iam/docs/simulating-access). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/PolicySimulator) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/PolicySimulator) ... * folders - * [*locations replays create*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::FolderLocationReplayCreateCall), [*locations replays get*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::FolderLocationReplayGetCall), [*locations replays operations get*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::FolderLocationReplayOperationGetCall), [*locations replays operations list*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::FolderLocationReplayOperationListCall) and [*locations replays results list*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::FolderLocationReplayResultListCall) + * [*locations replays create*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::FolderLocationReplayCreateCall), [*locations replays get*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::FolderLocationReplayGetCall), [*locations replays operations get*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::FolderLocationReplayOperationGetCall), [*locations replays operations list*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::FolderLocationReplayOperationListCall) and [*locations replays results list*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::FolderLocationReplayResultListCall) * operations - * [*get*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::OperationGetCall) and [*list*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::OperationListCall) + * [*get*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::OperationGetCall) and [*list*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::OperationListCall) * organizations - * [*locations replays create*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::OrganizationLocationReplayCreateCall), [*locations replays get*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::OrganizationLocationReplayGetCall), [*locations replays operations get*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::OrganizationLocationReplayOperationGetCall), [*locations replays operations list*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::OrganizationLocationReplayOperationListCall) and [*locations replays results list*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::OrganizationLocationReplayResultListCall) + * [*locations replays create*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::OrganizationLocationReplayCreateCall), [*locations replays get*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::OrganizationLocationReplayGetCall), [*locations replays operations get*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::OrganizationLocationReplayOperationGetCall), [*locations replays operations list*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::OrganizationLocationReplayOperationListCall) and [*locations replays results list*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::OrganizationLocationReplayResultListCall) * projects - * [*locations replays create*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::ProjectLocationReplayCreateCall), [*locations replays get*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::ProjectLocationReplayGetCall), [*locations replays operations get*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::ProjectLocationReplayOperationGetCall), [*locations replays operations list*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::ProjectLocationReplayOperationListCall) and [*locations replays results list*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/api::ProjectLocationReplayResultListCall) + * [*locations replays create*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::ProjectLocationReplayCreateCall), [*locations replays get*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::ProjectLocationReplayGetCall), [*locations replays operations get*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::ProjectLocationReplayOperationGetCall), [*locations replays operations list*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::ProjectLocationReplayOperationListCall) and [*locations replays results list*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/api::ProjectLocationReplayResultListCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/PolicySimulator)** +* **[Hub](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/PolicySimulator)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::CallBuilder) -* **[Resources](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::CallBuilder) +* **[Resources](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Part)** + * **[Parts](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -136,17 +136,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -156,29 +156,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Delegate) to the -[Method Builder](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Delegate) to the +[Method Builder](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::RequestValue) and -[decodable](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::RequestValue) and +[decodable](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-policysimulator1/5.0.2-beta-1+20230115/google_policysimulator1/client::RequestValue) are moved +* [request values](https://docs.rs/google-policysimulator1/5.0.2+20230115/google_policysimulator1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/policysimulator1/src/api.rs b/gen/policysimulator1/src/api.rs index 4434dbcfe8..85457bbeb7 100644 --- a/gen/policysimulator1/src/api.rs +++ b/gen/policysimulator1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> PolicySimulator { PolicySimulator { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://policysimulator.googleapis.com/".to_string(), _root_url: "https://policysimulator.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> PolicySimulator { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/policysimulator1/src/client.rs b/gen/policysimulator1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/policysimulator1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/policysimulator1/src/lib.rs b/gen/policysimulator1/src/lib.rs index d93c46174c..2cfc041d3e 100644 --- a/gen/policysimulator1/src/lib.rs +++ b/gen/policysimulator1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Policy Simulator* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *policysimulator:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Policy Simulator* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *policysimulator:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Policy Simulator* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/iam/docs/simulating-access). diff --git a/gen/policytroubleshooter1-cli/Cargo.toml b/gen/policytroubleshooter1-cli/Cargo.toml index 95ec474fa3..a7d60f3c42 100644 --- a/gen/policytroubleshooter1-cli/Cargo.toml +++ b/gen/policytroubleshooter1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-policytroubleshooter1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Policy Troubleshooter (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/policytroubleshooter1-cli" @@ -20,13 +20,13 @@ name = "policytroubleshooter1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-policytroubleshooter1] path = "../policytroubleshooter1" -version = "4.0.1+20220227" +version = "5.0.2+20230121" + diff --git a/gen/policytroubleshooter1-cli/README.md b/gen/policytroubleshooter1-cli/README.md index 660ed317da..5e05c766bc 100644 --- a/gen/policytroubleshooter1-cli/README.md +++ b/gen/policytroubleshooter1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Policy Troubleshooter* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Policy Troubleshooter* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash policytroubleshooter1 [options] diff --git a/gen/policytroubleshooter1-cli/mkdocs.yml b/gen/policytroubleshooter1-cli/mkdocs.yml index 2b0118af0f..639bc1efbe 100644 --- a/gen/policytroubleshooter1-cli/mkdocs.yml +++ b/gen/policytroubleshooter1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Policy Troubleshooter v4.0.1+20220227 +site_name: Policy Troubleshooter v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-policytroubleshooter1-cli site_description: A complete library to interact with Policy Troubleshooter (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/policytroublesho docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['iam_troubleshoot.md', 'Iam', 'Troubleshoot'] +nav: +- Home: 'index.md' +- 'Iam': + - 'Troubleshoot': 'iam_troubleshoot.md' theme: readthedocs diff --git a/gen/policytroubleshooter1-cli/src/client.rs b/gen/policytroubleshooter1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/policytroubleshooter1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/policytroubleshooter1-cli/src/main.rs b/gen/policytroubleshooter1-cli/src/main.rs index c9b0413366..e726549dda 100644 --- a/gen/policytroubleshooter1-cli/src/main.rs +++ b/gen/policytroubleshooter1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_policytroubleshooter1::{api, Error, oauth2}; +use google_policytroubleshooter1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -256,7 +255,7 @@ async fn main() { let mut app = App::new("policytroubleshooter1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230121") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_policytroubleshooter1_cli") .arg(Arg::with_name("url") diff --git a/gen/policytroubleshooter1/Cargo.toml b/gen/policytroubleshooter1/Cargo.toml index 2162263129..4883cd2d70 100644 --- a/gen/policytroubleshooter1/Cargo.toml +++ b/gen/policytroubleshooter1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-policytroubleshooter1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Policy Troubleshooter (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/policytroubleshooter1" homepage = "https://cloud.google.com/iam/" -documentation = "https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-policytroubleshooter1/5.0.2+20230121" license = "MIT" keywords = ["policytroubleshooter", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/policytroubleshooter1/README.md b/gen/policytroubleshooter1/README.md index d8bf2f1480..b22aee08d5 100644 --- a/gen/policytroubleshooter1/README.md +++ b/gen/policytroubleshooter1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-policytroubleshooter1` library allows access to all features of the *Google Policy Troubleshooter* service. -This documentation was generated from *Policy Troubleshooter* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *policytroubleshooter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Policy Troubleshooter* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *policytroubleshooter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Policy Troubleshooter* *v1* API can be found at the [official documentation site](https://cloud.google.com/iam/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/PolicyTroubleshooter) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/PolicyTroubleshooter) ... * iam - * [*troubleshoot*](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/api::IamTroubleshootCall) + * [*troubleshoot*](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/api::IamTroubleshootCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/PolicyTroubleshooter)** +* **[Hub](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/PolicyTroubleshooter)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::CallBuilder) -* **[Resources](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::CallBuilder) +* **[Resources](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Part)** + * **[Parts](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Delegate) to the -[Method Builder](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Delegate) to the +[Method Builder](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::RequestValue) and -[decodable](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::RequestValue) and +[decodable](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-policytroubleshooter1/5.0.2-beta-1+20230121/google_policytroubleshooter1/client::RequestValue) are moved +* [request values](https://docs.rs/google-policytroubleshooter1/5.0.2+20230121/google_policytroubleshooter1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/policytroubleshooter1/src/api.rs b/gen/policytroubleshooter1/src/api.rs index a1d4121164..9cc4c08de6 100644 --- a/gen/policytroubleshooter1/src/api.rs +++ b/gen/policytroubleshooter1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> PolicyTroubleshooter { PolicyTroubleshooter { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://policytroubleshooter.googleapis.com/".to_string(), _root_url: "https://policytroubleshooter.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> PolicyTroubleshooter { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/policytroubleshooter1/src/client.rs b/gen/policytroubleshooter1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/policytroubleshooter1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/policytroubleshooter1/src/lib.rs b/gen/policytroubleshooter1/src/lib.rs index d6f3426ad4..c91ecfa3f8 100644 --- a/gen/policytroubleshooter1/src/lib.rs +++ b/gen/policytroubleshooter1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Policy Troubleshooter* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *policytroubleshooter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Policy Troubleshooter* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *policytroubleshooter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Policy Troubleshooter* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/iam/). diff --git a/gen/prediction1d6-cli/Cargo.toml b/gen/prediction1d6-cli/Cargo.toml index c37173b0f8..88e431cd4a 100644 --- a/gen/prediction1d6-cli/Cargo.toml +++ b/gen/prediction1d6-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-prediction1d6-cli" -version = "4.0.1+20160511" +version = "5.0.2+20160511" authors = ["Sebastian Thiel "] description = "A complete library to interact with prediction (protocol v1.6)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/prediction1d6-cli" @@ -20,13 +20,13 @@ name = "prediction1d6" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-prediction1d6] path = "../prediction1d6" -version = "4.0.1+20160511" +version = "5.0.2+20160511" + diff --git a/gen/prediction1d6-cli/README.md b/gen/prediction1d6-cli/README.md index a1ad508c30..142babdd50 100644 --- a/gen/prediction1d6-cli/README.md +++ b/gen/prediction1d6-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *prediction* API at revision *20160511*. The CLI is at version *4.0.1*. +This documentation was generated from the *prediction* API at revision *20160511*. The CLI is at version *5.0.2*. ```bash prediction1d6 [options] diff --git a/gen/prediction1d6-cli/mkdocs.yml b/gen/prediction1d6-cli/mkdocs.yml index f1494a6013..2a24b0997c 100644 --- a/gen/prediction1d6-cli/mkdocs.yml +++ b/gen/prediction1d6-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: prediction v4.0.1+20160511 +site_name: prediction v5.0.2+20160511 site_url: http://byron.github.io/google-apis-rs/google-prediction1d6-cli site_description: A complete library to interact with prediction (protocol v1.6) @@ -7,16 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/prediction1d6-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['hostedmodels_predict.md', 'Hostedmodels', 'Predict'] -- ['trainedmodels_analyze.md', 'Trainedmodels', 'Analyze'] -- ['trainedmodels_delete.md', 'Trainedmodels', 'Delete'] -- ['trainedmodels_get.md', 'Trainedmodels', 'Get'] -- ['trainedmodels_insert.md', 'Trainedmodels', 'Insert'] -- ['trainedmodels_list.md', 'Trainedmodels', 'List'] -- ['trainedmodels_predict.md', 'Trainedmodels', 'Predict'] -- ['trainedmodels_update.md', 'Trainedmodels', 'Update'] +nav: +- Home: 'index.md' +- 'Hostedmodels': + - 'Predict': 'hostedmodels_predict.md' +- 'Trainedmodels': + - 'Analyze': 'trainedmodels_analyze.md' + - 'Delete': 'trainedmodels_delete.md' + - 'Get': 'trainedmodels_get.md' + - 'Insert': 'trainedmodels_insert.md' + - 'List': 'trainedmodels_list.md' + - 'Predict': 'trainedmodels_predict.md' + - 'Update': 'trainedmodels_update.md' theme: readthedocs diff --git a/gen/prediction1d6-cli/src/client.rs b/gen/prediction1d6-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/prediction1d6-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/prediction1d6-cli/src/main.rs b/gen/prediction1d6-cli/src/main.rs index c48eaf3a11..0db6b83277 100644 --- a/gen/prediction1d6-cli/src/main.rs +++ b/gen/prediction1d6-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_prediction1d6::{api, Error, oauth2}; +use google_prediction1d6::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -383,7 +382,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -956,7 +955,7 @@ async fn main() { let mut app = App::new("prediction1d6") .author("Sebastian Thiel ") - .version("4.0.1+20160511") + .version("5.0.2+20160511") .about("Lets you access a cloud hosted machine learning service that makes it easy to build smart apps") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_prediction1d6_cli") .arg(Arg::with_name("url") diff --git a/gen/prediction1d6/Cargo.toml b/gen/prediction1d6/Cargo.toml index 32c0f55bc9..fd3f079f85 100644 --- a/gen/prediction1d6/Cargo.toml +++ b/gen/prediction1d6/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-prediction1d6" -version = "5.0.2-beta-1+20160511" +version = "5.0.2+20160511" authors = ["Sebastian Thiel "] description = "A complete library to interact with prediction (protocol v1.6)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/prediction1d6" homepage = "https://developers.google.com/prediction/docs/developer-guide" -documentation = "https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511" +documentation = "https://docs.rs/google-prediction1d6/5.0.2+20160511" license = "MIT" keywords = ["prediction", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/prediction1d6/README.md b/gen/prediction1d6/README.md index 64efa20ca9..5e2cefeab0 100644 --- a/gen/prediction1d6/README.md +++ b/gen/prediction1d6/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-prediction1d6` library allows access to all features of the *Google prediction* service. -This documentation was generated from *prediction* crate version *5.0.2-beta-1+20160511*, where *20160511* is the exact revision of the *prediction:v1.6* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *prediction* crate version *5.0.2+20160511*, where *20160511* is the exact revision of the *prediction:v1.6* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *prediction* *v1d6* API can be found at the [official documentation site](https://developers.google.com/prediction/docs/developer-guide). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/Prediction) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/Prediction) ... * hostedmodels - * [*predict*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::HostedmodelPredictCall) + * [*predict*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::HostedmodelPredictCall) * trainedmodels - * [*analyze*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::TrainedmodelAnalyzeCall), [*delete*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::TrainedmodelDeleteCall), [*get*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::TrainedmodelGetCall), [*insert*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::TrainedmodelInsertCall), [*list*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::TrainedmodelListCall), [*predict*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::TrainedmodelPredictCall) and [*update*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/api::TrainedmodelUpdateCall) + * [*analyze*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::TrainedmodelAnalyzeCall), [*delete*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::TrainedmodelDeleteCall), [*get*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::TrainedmodelGetCall), [*insert*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::TrainedmodelInsertCall), [*list*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::TrainedmodelListCall), [*predict*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::TrainedmodelPredictCall) and [*update*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/api::TrainedmodelUpdateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/Prediction)** +* **[Hub](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/Prediction)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::CallBuilder) -* **[Resources](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::CallBuilder) +* **[Resources](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Part)** + * **[Parts](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::CallBuilder)** +* **[Activities](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Delegate) to the -[Method Builder](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Delegate) to the +[Method Builder](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::RequestValue) and -[decodable](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::RequestValue) and +[decodable](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-prediction1d6/5.0.2-beta-1+20160511/google_prediction1d6/client::RequestValue) are moved +* [request values](https://docs.rs/google-prediction1d6/5.0.2+20160511/google_prediction1d6/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/prediction1d6/src/api.rs b/gen/prediction1d6/src/api.rs index 9e7b8be9a9..b0ae9edd20 100644 --- a/gen/prediction1d6/src/api.rs +++ b/gen/prediction1d6/src/api.rs @@ -141,7 +141,7 @@ impl<'a, S> Prediction { Prediction { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/prediction/v1.6/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -155,7 +155,7 @@ impl<'a, S> Prediction { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/prediction1d6/src/client.rs b/gen/prediction1d6/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/prediction1d6/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/prediction1d6/src/lib.rs b/gen/prediction1d6/src/lib.rs index e97c65bce1..e16b5d3187 100644 --- a/gen/prediction1d6/src/lib.rs +++ b/gen/prediction1d6/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *prediction* crate version *5.0.2-beta-1+20160511*, where *20160511* is the exact revision of the *prediction:v1.6* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *prediction* crate version *5.0.2+20160511*, where *20160511* is the exact revision of the *prediction:v1.6* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *prediction* *v1d6* API can be found at the //! [official documentation site](https://developers.google.com/prediction/docs/developer-guide). diff --git a/gen/privateca1-cli/Cargo.toml b/gen/privateca1-cli/Cargo.toml index f9f3315f50..ea46760e96 100644 --- a/gen/privateca1-cli/Cargo.toml +++ b/gen/privateca1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-privateca1-cli" -version = "4.0.1+20220209" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Certificate Authority Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/privateca1-cli" @@ -20,13 +20,13 @@ name = "privateca1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-privateca1] path = "../privateca1" -version = "4.0.1+20220209" +version = "5.0.2+20230105" + diff --git a/gen/privateca1-cli/README.md b/gen/privateca1-cli/README.md index 0fdadc613e..3940f6506c 100644 --- a/gen/privateca1-cli/README.md +++ b/gen/privateca1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Certificate Authority Service* API at revision *20220209*. The CLI is at version *4.0.1*. +This documentation was generated from the *Certificate Authority Service* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash privateca1 [options] diff --git a/gen/privateca1-cli/mkdocs.yml b/gen/privateca1-cli/mkdocs.yml index 784b846d62..5c7deb467f 100644 --- a/gen/privateca1-cli/mkdocs.yml +++ b/gen/privateca1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Certificate Authority Service v4.0.1+20220209 +site_name: Certificate Authority Service v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-privateca1-cli site_description: A complete library to interact with Certificate Authority Service (protocol v1) @@ -7,52 +7,53 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/privateca1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-ca-pools-certificate-authorities-activate.md', 'Projects', 'Locations Ca Pools Certificate Authorities Activate'] -- ['projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-get.md', 'Projects', 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Get'] -- ['projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-get-iam-policy.md', 'Projects', 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Get Iam Policy'] -- ['projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-list.md', 'Projects', 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists List'] -- ['projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-patch.md', 'Projects', 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Patch'] -- ['projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-set-iam-policy.md', 'Projects', 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Set Iam Policy'] -- ['projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-test-iam-permissions.md', 'Projects', 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Test Iam Permissions'] -- ['projects_locations-ca-pools-certificate-authorities-create.md', 'Projects', 'Locations Ca Pools Certificate Authorities Create'] -- ['projects_locations-ca-pools-certificate-authorities-delete.md', 'Projects', 'Locations Ca Pools Certificate Authorities Delete'] -- ['projects_locations-ca-pools-certificate-authorities-disable.md', 'Projects', 'Locations Ca Pools Certificate Authorities Disable'] -- ['projects_locations-ca-pools-certificate-authorities-enable.md', 'Projects', 'Locations Ca Pools Certificate Authorities Enable'] -- ['projects_locations-ca-pools-certificate-authorities-fetch.md', 'Projects', 'Locations Ca Pools Certificate Authorities Fetch'] -- ['projects_locations-ca-pools-certificate-authorities-get.md', 'Projects', 'Locations Ca Pools Certificate Authorities Get'] -- ['projects_locations-ca-pools-certificate-authorities-list.md', 'Projects', 'Locations Ca Pools Certificate Authorities List'] -- ['projects_locations-ca-pools-certificate-authorities-patch.md', 'Projects', 'Locations Ca Pools Certificate Authorities Patch'] -- ['projects_locations-ca-pools-certificate-authorities-undelete.md', 'Projects', 'Locations Ca Pools Certificate Authorities Undelete'] -- ['projects_locations-ca-pools-certificates-create.md', 'Projects', 'Locations Ca Pools Certificates Create'] -- ['projects_locations-ca-pools-certificates-get.md', 'Projects', 'Locations Ca Pools Certificates Get'] -- ['projects_locations-ca-pools-certificates-list.md', 'Projects', 'Locations Ca Pools Certificates List'] -- ['projects_locations-ca-pools-certificates-patch.md', 'Projects', 'Locations Ca Pools Certificates Patch'] -- ['projects_locations-ca-pools-certificates-revoke.md', 'Projects', 'Locations Ca Pools Certificates Revoke'] -- ['projects_locations-ca-pools-create.md', 'Projects', 'Locations Ca Pools Create'] -- ['projects_locations-ca-pools-delete.md', 'Projects', 'Locations Ca Pools Delete'] -- ['projects_locations-ca-pools-fetch-ca-certs.md', 'Projects', 'Locations Ca Pools Fetch Ca Certs'] -- ['projects_locations-ca-pools-get.md', 'Projects', 'Locations Ca Pools Get'] -- ['projects_locations-ca-pools-get-iam-policy.md', 'Projects', 'Locations Ca Pools Get Iam Policy'] -- ['projects_locations-ca-pools-list.md', 'Projects', 'Locations Ca Pools List'] -- ['projects_locations-ca-pools-patch.md', 'Projects', 'Locations Ca Pools Patch'] -- ['projects_locations-ca-pools-set-iam-policy.md', 'Projects', 'Locations Ca Pools Set Iam Policy'] -- ['projects_locations-ca-pools-test-iam-permissions.md', 'Projects', 'Locations Ca Pools Test Iam Permissions'] -- ['projects_locations-certificate-templates-create.md', 'Projects', 'Locations Certificate Templates Create'] -- ['projects_locations-certificate-templates-delete.md', 'Projects', 'Locations Certificate Templates Delete'] -- ['projects_locations-certificate-templates-get.md', 'Projects', 'Locations Certificate Templates Get'] -- ['projects_locations-certificate-templates-get-iam-policy.md', 'Projects', 'Locations Certificate Templates Get Iam Policy'] -- ['projects_locations-certificate-templates-list.md', 'Projects', 'Locations Certificate Templates List'] -- ['projects_locations-certificate-templates-patch.md', 'Projects', 'Locations Certificate Templates Patch'] -- ['projects_locations-certificate-templates-set-iam-policy.md', 'Projects', 'Locations Certificate Templates Set Iam Policy'] -- ['projects_locations-certificate-templates-test-iam-permissions.md', 'Projects', 'Locations Certificate Templates Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Ca Pools Certificate Authorities Activate': 'projects_locations-ca-pools-certificate-authorities-activate.md' + - 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Get': 'projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-get.md' + - 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Get Iam Policy': 'projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-get-iam-policy.md' + - 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists List': 'projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-list.md' + - 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Patch': 'projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-patch.md' + - 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Set Iam Policy': 'projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-set-iam-policy.md' + - 'Locations Ca Pools Certificate Authorities Certificate Revocation Lists Test Iam Permissions': 'projects_locations-ca-pools-certificate-authorities-certificate-revocation-lists-test-iam-permissions.md' + - 'Locations Ca Pools Certificate Authorities Create': 'projects_locations-ca-pools-certificate-authorities-create.md' + - 'Locations Ca Pools Certificate Authorities Delete': 'projects_locations-ca-pools-certificate-authorities-delete.md' + - 'Locations Ca Pools Certificate Authorities Disable': 'projects_locations-ca-pools-certificate-authorities-disable.md' + - 'Locations Ca Pools Certificate Authorities Enable': 'projects_locations-ca-pools-certificate-authorities-enable.md' + - 'Locations Ca Pools Certificate Authorities Fetch': 'projects_locations-ca-pools-certificate-authorities-fetch.md' + - 'Locations Ca Pools Certificate Authorities Get': 'projects_locations-ca-pools-certificate-authorities-get.md' + - 'Locations Ca Pools Certificate Authorities List': 'projects_locations-ca-pools-certificate-authorities-list.md' + - 'Locations Ca Pools Certificate Authorities Patch': 'projects_locations-ca-pools-certificate-authorities-patch.md' + - 'Locations Ca Pools Certificate Authorities Undelete': 'projects_locations-ca-pools-certificate-authorities-undelete.md' + - 'Locations Ca Pools Certificates Create': 'projects_locations-ca-pools-certificates-create.md' + - 'Locations Ca Pools Certificates Get': 'projects_locations-ca-pools-certificates-get.md' + - 'Locations Ca Pools Certificates List': 'projects_locations-ca-pools-certificates-list.md' + - 'Locations Ca Pools Certificates Patch': 'projects_locations-ca-pools-certificates-patch.md' + - 'Locations Ca Pools Certificates Revoke': 'projects_locations-ca-pools-certificates-revoke.md' + - 'Locations Ca Pools Create': 'projects_locations-ca-pools-create.md' + - 'Locations Ca Pools Delete': 'projects_locations-ca-pools-delete.md' + - 'Locations Ca Pools Fetch Ca Certs': 'projects_locations-ca-pools-fetch-ca-certs.md' + - 'Locations Ca Pools Get': 'projects_locations-ca-pools-get.md' + - 'Locations Ca Pools Get Iam Policy': 'projects_locations-ca-pools-get-iam-policy.md' + - 'Locations Ca Pools List': 'projects_locations-ca-pools-list.md' + - 'Locations Ca Pools Patch': 'projects_locations-ca-pools-patch.md' + - 'Locations Ca Pools Set Iam Policy': 'projects_locations-ca-pools-set-iam-policy.md' + - 'Locations Ca Pools Test Iam Permissions': 'projects_locations-ca-pools-test-iam-permissions.md' + - 'Locations Certificate Templates Create': 'projects_locations-certificate-templates-create.md' + - 'Locations Certificate Templates Delete': 'projects_locations-certificate-templates-delete.md' + - 'Locations Certificate Templates Get': 'projects_locations-certificate-templates-get.md' + - 'Locations Certificate Templates Get Iam Policy': 'projects_locations-certificate-templates-get-iam-policy.md' + - 'Locations Certificate Templates List': 'projects_locations-certificate-templates-list.md' + - 'Locations Certificate Templates Patch': 'projects_locations-certificate-templates-patch.md' + - 'Locations Certificate Templates Set Iam Policy': 'projects_locations-certificate-templates-set-iam-policy.md' + - 'Locations Certificate Templates Test Iam Permissions': 'projects_locations-certificate-templates-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/privateca1-cli/src/client.rs b/gen/privateca1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/privateca1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/privateca1-cli/src/main.rs b/gen/privateca1-cli/src/main.rs index 6549430e54..d629997f38 100644 --- a/gen/privateca1-cli/src/main.rs +++ b/gen/privateca1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_privateca1::{api, Error, oauth2}; +use google_privateca1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -198,7 +197,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -257,7 +256,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -360,7 +359,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -731,11 +730,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "skip-grace-period" => { + call = call.skip_grace_period( value.map(|v| arg_from_str(v, err, "skip-grace-period", "boolean")).unwrap_or(false)); + }, "request-id" => { call = call.request_id(value.unwrap_or("")); }, "ignore-active-certificates" => { - call = call.ignore_active_certificates(arg_from_str(value.unwrap_or("false"), err, "ignore-active-certificates", "boolean")); + call = call.ignore_active_certificates( value.map(|v| arg_from_str(v, err, "ignore-active-certificates", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -750,7 +752,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["ignore-active-certificates", "request-id"].iter().map(|v|*v)); + v.extend(["ignore-active-certificates", "request-id", "skip-grace-period"].iter().map(|v|*v)); v } )); } } @@ -1068,7 +1070,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1212,7 +1214,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -1474,7 +1476,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -1594,7 +1596,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1774,7 +1776,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2236,7 +2238,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2295,7 +2297,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2422,7 +2424,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2882,7 +2884,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2941,7 +2943,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3065,7 +3067,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -3351,7 +3353,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3601,7 +3603,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3928,7 +3930,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4000,7 +4002,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4028,7 +4030,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4512,7 +4514,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4584,7 +4586,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4612,7 +4614,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4712,7 +4714,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4784,7 +4786,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4812,7 +4814,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4978,7 +4980,7 @@ async fn main() { let mut app = App::new("privateca1") .author("Sebastian Thiel ") - .version("4.0.1+20220209") + .version("5.0.2+20230105") .about("The Certificate Authority Service API is a highly-available, scalable service that enables you to simplify and automate the management of private certificate authorities (CAs) while staying in control of your private keys. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_privateca1_cli") .arg(Arg::with_name("url") diff --git a/gen/privateca1/Cargo.toml b/gen/privateca1/Cargo.toml index 282d40e15c..70073cdbd3 100644 --- a/gen/privateca1/Cargo.toml +++ b/gen/privateca1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-privateca1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Certificate Authority Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/privateca1" homepage = "https://cloud.google.com/" -documentation = "https://docs.rs/google-privateca1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-privateca1/5.0.2+20230105" license = "MIT" keywords = ["privateca", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/privateca1/README.md b/gen/privateca1/README.md index 750bfa4758..f17e4aece3 100644 --- a/gen/privateca1/README.md +++ b/gen/privateca1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-privateca1` library allows access to all features of the *Google Certificate Authority Service* service. -This documentation was generated from *Certificate Authority Service* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *privateca:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Certificate Authority Service* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *privateca:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Certificate Authority Service* *v1* API can be found at the [official documentation site](https://cloud.google.com/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/CertificateAuthorityService) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/CertificateAuthorityService) ... * projects - * [*locations ca pools certificate authorities activate*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityActivateCall), [*locations ca pools certificate authorities certificate revocation lists get*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListGetCall), [*locations ca pools certificate authorities certificate revocation lists get iam policy*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListGetIamPolicyCall), [*locations ca pools certificate authorities certificate revocation lists list*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListListCall), [*locations ca pools certificate authorities certificate revocation lists patch*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListPatchCall), [*locations ca pools certificate authorities certificate revocation lists set iam policy*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListSetIamPolicyCall), [*locations ca pools certificate authorities certificate revocation lists test iam permissions*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListTestIamPermissionCall), [*locations ca pools certificate authorities create*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCreateCall), [*locations ca pools certificate authorities delete*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityDeleteCall), [*locations ca pools certificate authorities disable*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityDisableCall), [*locations ca pools certificate authorities enable*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityEnableCall), [*locations ca pools certificate authorities fetch*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityFetchCall), [*locations ca pools certificate authorities get*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityGetCall), [*locations ca pools certificate authorities list*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityListCall), [*locations ca pools certificate authorities patch*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityPatchCall), [*locations ca pools certificate authorities undelete*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityUndeleteCall), [*locations ca pools certificates create*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateCreateCall), [*locations ca pools certificates get*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateGetCall), [*locations ca pools certificates list*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateListCall), [*locations ca pools certificates patch*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificatePatchCall), [*locations ca pools certificates revoke*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateRevokeCall), [*locations ca pools create*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolCreateCall), [*locations ca pools delete*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolDeleteCall), [*locations ca pools fetch ca certs*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolFetchCaCertCall), [*locations ca pools get*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolGetCall), [*locations ca pools get iam policy*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolGetIamPolicyCall), [*locations ca pools list*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolListCall), [*locations ca pools patch*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolPatchCall), [*locations ca pools set iam policy*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolSetIamPolicyCall), [*locations ca pools test iam permissions*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCaPoolTestIamPermissionCall), [*locations certificate templates create*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplateCreateCall), [*locations certificate templates delete*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplateDeleteCall), [*locations certificate templates get*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplateGetCall), [*locations certificate templates get iam policy*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplateGetIamPolicyCall), [*locations certificate templates list*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplateListCall), [*locations certificate templates patch*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplatePatchCall), [*locations certificate templates set iam policy*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplateSetIamPolicyCall), [*locations certificate templates test iam permissions*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationCertificateTemplateTestIamPermissionCall), [*locations get*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/api::ProjectLocationOperationListCall) + * [*locations ca pools certificate authorities activate*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityActivateCall), [*locations ca pools certificate authorities certificate revocation lists get*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListGetCall), [*locations ca pools certificate authorities certificate revocation lists get iam policy*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListGetIamPolicyCall), [*locations ca pools certificate authorities certificate revocation lists list*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListListCall), [*locations ca pools certificate authorities certificate revocation lists patch*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListPatchCall), [*locations ca pools certificate authorities certificate revocation lists set iam policy*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListSetIamPolicyCall), [*locations ca pools certificate authorities certificate revocation lists test iam permissions*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCertificateRevocationListTestIamPermissionCall), [*locations ca pools certificate authorities create*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityCreateCall), [*locations ca pools certificate authorities delete*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityDeleteCall), [*locations ca pools certificate authorities disable*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityDisableCall), [*locations ca pools certificate authorities enable*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityEnableCall), [*locations ca pools certificate authorities fetch*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityFetchCall), [*locations ca pools certificate authorities get*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityGetCall), [*locations ca pools certificate authorities list*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityListCall), [*locations ca pools certificate authorities patch*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityPatchCall), [*locations ca pools certificate authorities undelete*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateAuthorityUndeleteCall), [*locations ca pools certificates create*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateCreateCall), [*locations ca pools certificates get*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateGetCall), [*locations ca pools certificates list*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateListCall), [*locations ca pools certificates patch*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificatePatchCall), [*locations ca pools certificates revoke*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCertificateRevokeCall), [*locations ca pools create*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolCreateCall), [*locations ca pools delete*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolDeleteCall), [*locations ca pools fetch ca certs*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolFetchCaCertCall), [*locations ca pools get*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolGetCall), [*locations ca pools get iam policy*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolGetIamPolicyCall), [*locations ca pools list*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolListCall), [*locations ca pools patch*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolPatchCall), [*locations ca pools set iam policy*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolSetIamPolicyCall), [*locations ca pools test iam permissions*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCaPoolTestIamPermissionCall), [*locations certificate templates create*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplateCreateCall), [*locations certificate templates delete*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplateDeleteCall), [*locations certificate templates get*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplateGetCall), [*locations certificate templates get iam policy*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplateGetIamPolicyCall), [*locations certificate templates list*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplateListCall), [*locations certificate templates patch*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplatePatchCall), [*locations certificate templates set iam policy*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplateSetIamPolicyCall), [*locations certificate templates test iam permissions*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationCertificateTemplateTestIamPermissionCall), [*locations get*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/CertificateAuthorityService)** +* **[Hub](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/CertificateAuthorityService)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::CallBuilder) -* **[Resources](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::CallBuilder) +* **[Resources](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Part)** + * **[Parts](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -140,17 +140,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -160,29 +160,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Delegate) to the -[Method Builder](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Delegate) to the +[Method Builder](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::RequestValue) and -[decodable](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::RequestValue) and +[decodable](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-privateca1/5.0.2-beta-1+20230105/google_privateca1/client::RequestValue) are moved +* [request values](https://docs.rs/google-privateca1/5.0.2+20230105/google_privateca1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/privateca1/src/api.rs b/gen/privateca1/src/api.rs index 3f3f4511ec..3952ff920e 100644 --- a/gen/privateca1/src/api.rs +++ b/gen/privateca1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> CertificateAuthorityService { CertificateAuthorityService { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://privateca.googleapis.com/".to_string(), _root_url: "https://privateca.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> CertificateAuthorityService { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/privateca1/src/client.rs b/gen/privateca1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/privateca1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/privateca1/src/lib.rs b/gen/privateca1/src/lib.rs index bcd53bec68..4d930f579c 100644 --- a/gen/privateca1/src/lib.rs +++ b/gen/privateca1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Certificate Authority Service* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *privateca:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Certificate Authority Service* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *privateca:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Certificate Authority Service* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/). diff --git a/gen/privateca1_beta1-cli/Cargo.toml b/gen/privateca1_beta1-cli/Cargo.toml index e31eb43de6..6c3140c04d 100644 --- a/gen/privateca1_beta1-cli/Cargo.toml +++ b/gen/privateca1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-privateca1_beta1-cli" -version = "4.0.1+20220209" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Certificate Authority Service (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/privateca1_beta1-cli" @@ -20,13 +20,13 @@ name = "privateca1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-privateca1_beta1] path = "../privateca1_beta1" -version = "4.0.1+20220209" +version = "5.0.2+20230105" + diff --git a/gen/privateca1_beta1-cli/README.md b/gen/privateca1_beta1-cli/README.md index b8df220e80..de5b403381 100644 --- a/gen/privateca1_beta1-cli/README.md +++ b/gen/privateca1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Certificate Authority Service* API at revision *20220209*. The CLI is at version *4.0.1*. +This documentation was generated from the *Certificate Authority Service* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash privateca1-beta1 [options] diff --git a/gen/privateca1_beta1-cli/mkdocs.yml b/gen/privateca1_beta1-cli/mkdocs.yml index d9709652b0..37e1508663 100644 --- a/gen/privateca1_beta1-cli/mkdocs.yml +++ b/gen/privateca1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Certificate Authority Service v4.0.1+20220209 +site_name: Certificate Authority Service v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-privateca1_beta1-cli site_description: A complete library to interact with Certificate Authority Service (protocol v1beta1) @@ -7,43 +7,44 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/privateca1_beta1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-certificate-authorities-activate.md', 'Projects', 'Locations Certificate Authorities Activate'] -- ['projects_locations-certificate-authorities-certificate-revocation-lists-get.md', 'Projects', 'Locations Certificate Authorities Certificate Revocation Lists Get'] -- ['projects_locations-certificate-authorities-certificate-revocation-lists-get-iam-policy.md', 'Projects', 'Locations Certificate Authorities Certificate Revocation Lists Get Iam Policy'] -- ['projects_locations-certificate-authorities-certificate-revocation-lists-list.md', 'Projects', 'Locations Certificate Authorities Certificate Revocation Lists List'] -- ['projects_locations-certificate-authorities-certificate-revocation-lists-patch.md', 'Projects', 'Locations Certificate Authorities Certificate Revocation Lists Patch'] -- ['projects_locations-certificate-authorities-certificate-revocation-lists-set-iam-policy.md', 'Projects', 'Locations Certificate Authorities Certificate Revocation Lists Set Iam Policy'] -- ['projects_locations-certificate-authorities-certificate-revocation-lists-test-iam-permissions.md', 'Projects', 'Locations Certificate Authorities Certificate Revocation Lists Test Iam Permissions'] -- ['projects_locations-certificate-authorities-certificates-create.md', 'Projects', 'Locations Certificate Authorities Certificates Create'] -- ['projects_locations-certificate-authorities-certificates-get.md', 'Projects', 'Locations Certificate Authorities Certificates Get'] -- ['projects_locations-certificate-authorities-certificates-list.md', 'Projects', 'Locations Certificate Authorities Certificates List'] -- ['projects_locations-certificate-authorities-certificates-patch.md', 'Projects', 'Locations Certificate Authorities Certificates Patch'] -- ['projects_locations-certificate-authorities-certificates-revoke.md', 'Projects', 'Locations Certificate Authorities Certificates Revoke'] -- ['projects_locations-certificate-authorities-create.md', 'Projects', 'Locations Certificate Authorities Create'] -- ['projects_locations-certificate-authorities-disable.md', 'Projects', 'Locations Certificate Authorities Disable'] -- ['projects_locations-certificate-authorities-enable.md', 'Projects', 'Locations Certificate Authorities Enable'] -- ['projects_locations-certificate-authorities-fetch.md', 'Projects', 'Locations Certificate Authorities Fetch'] -- ['projects_locations-certificate-authorities-get.md', 'Projects', 'Locations Certificate Authorities Get'] -- ['projects_locations-certificate-authorities-get-iam-policy.md', 'Projects', 'Locations Certificate Authorities Get Iam Policy'] -- ['projects_locations-certificate-authorities-list.md', 'Projects', 'Locations Certificate Authorities List'] -- ['projects_locations-certificate-authorities-patch.md', 'Projects', 'Locations Certificate Authorities Patch'] -- ['projects_locations-certificate-authorities-restore.md', 'Projects', 'Locations Certificate Authorities Restore'] -- ['projects_locations-certificate-authorities-schedule-delete.md', 'Projects', 'Locations Certificate Authorities Schedule Delete'] -- ['projects_locations-certificate-authorities-set-iam-policy.md', 'Projects', 'Locations Certificate Authorities Set Iam Policy'] -- ['projects_locations-certificate-authorities-test-iam-permissions.md', 'Projects', 'Locations Certificate Authorities Test Iam Permissions'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-reusable-configs-get.md', 'Projects', 'Locations Reusable Configs Get'] -- ['projects_locations-reusable-configs-get-iam-policy.md', 'Projects', 'Locations Reusable Configs Get Iam Policy'] -- ['projects_locations-reusable-configs-list.md', 'Projects', 'Locations Reusable Configs List'] -- ['projects_locations-reusable-configs-set-iam-policy.md', 'Projects', 'Locations Reusable Configs Set Iam Policy'] -- ['projects_locations-reusable-configs-test-iam-permissions.md', 'Projects', 'Locations Reusable Configs Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Certificate Authorities Activate': 'projects_locations-certificate-authorities-activate.md' + - 'Locations Certificate Authorities Certificate Revocation Lists Get': 'projects_locations-certificate-authorities-certificate-revocation-lists-get.md' + - 'Locations Certificate Authorities Certificate Revocation Lists Get Iam Policy': 'projects_locations-certificate-authorities-certificate-revocation-lists-get-iam-policy.md' + - 'Locations Certificate Authorities Certificate Revocation Lists List': 'projects_locations-certificate-authorities-certificate-revocation-lists-list.md' + - 'Locations Certificate Authorities Certificate Revocation Lists Patch': 'projects_locations-certificate-authorities-certificate-revocation-lists-patch.md' + - 'Locations Certificate Authorities Certificate Revocation Lists Set Iam Policy': 'projects_locations-certificate-authorities-certificate-revocation-lists-set-iam-policy.md' + - 'Locations Certificate Authorities Certificate Revocation Lists Test Iam Permissions': 'projects_locations-certificate-authorities-certificate-revocation-lists-test-iam-permissions.md' + - 'Locations Certificate Authorities Certificates Create': 'projects_locations-certificate-authorities-certificates-create.md' + - 'Locations Certificate Authorities Certificates Get': 'projects_locations-certificate-authorities-certificates-get.md' + - 'Locations Certificate Authorities Certificates List': 'projects_locations-certificate-authorities-certificates-list.md' + - 'Locations Certificate Authorities Certificates Patch': 'projects_locations-certificate-authorities-certificates-patch.md' + - 'Locations Certificate Authorities Certificates Revoke': 'projects_locations-certificate-authorities-certificates-revoke.md' + - 'Locations Certificate Authorities Create': 'projects_locations-certificate-authorities-create.md' + - 'Locations Certificate Authorities Disable': 'projects_locations-certificate-authorities-disable.md' + - 'Locations Certificate Authorities Enable': 'projects_locations-certificate-authorities-enable.md' + - 'Locations Certificate Authorities Fetch': 'projects_locations-certificate-authorities-fetch.md' + - 'Locations Certificate Authorities Get': 'projects_locations-certificate-authorities-get.md' + - 'Locations Certificate Authorities Get Iam Policy': 'projects_locations-certificate-authorities-get-iam-policy.md' + - 'Locations Certificate Authorities List': 'projects_locations-certificate-authorities-list.md' + - 'Locations Certificate Authorities Patch': 'projects_locations-certificate-authorities-patch.md' + - 'Locations Certificate Authorities Restore': 'projects_locations-certificate-authorities-restore.md' + - 'Locations Certificate Authorities Schedule Delete': 'projects_locations-certificate-authorities-schedule-delete.md' + - 'Locations Certificate Authorities Set Iam Policy': 'projects_locations-certificate-authorities-set-iam-policy.md' + - 'Locations Certificate Authorities Test Iam Permissions': 'projects_locations-certificate-authorities-test-iam-permissions.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Reusable Configs Get': 'projects_locations-reusable-configs-get.md' + - 'Locations Reusable Configs Get Iam Policy': 'projects_locations-reusable-configs-get-iam-policy.md' + - 'Locations Reusable Configs List': 'projects_locations-reusable-configs-list.md' + - 'Locations Reusable Configs Set Iam Policy': 'projects_locations-reusable-configs-set-iam-policy.md' + - 'Locations Reusable Configs Test Iam Permissions': 'projects_locations-reusable-configs-test-iam-permissions.md' theme: readthedocs diff --git a/gen/privateca1_beta1-cli/src/client.rs b/gen/privateca1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/privateca1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/privateca1_beta1-cli/src/main.rs b/gen/privateca1_beta1-cli/src/main.rs index f5e81bc814..d1435a8f8a 100644 --- a/gen/privateca1_beta1-cli/src/main.rs +++ b/gen/privateca1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_privateca1_beta1::{api, Error, oauth2}; +use google_privateca1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -198,7 +197,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -257,7 +256,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -359,7 +358,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -820,7 +819,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -998,7 +997,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -1589,7 +1588,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1648,7 +1647,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1823,7 +1822,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2280,7 +2279,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2530,7 +2529,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2641,7 +2640,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2700,7 +2699,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3175,7 +3174,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3247,7 +3246,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3275,7 +3274,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3559,7 +3558,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3687,7 +3686,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3715,7 +3714,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3903,7 +3902,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3947,7 +3946,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3975,7 +3974,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4003,7 +4002,7 @@ async fn main() { let mut app = App::new("privateca1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220209") + .version("5.0.2+20230105") .about("The Certificate Authority Service API is a highly-available, scalable service that enables you to simplify and automate the management of private certificate authorities (CAs) while staying in control of your private keys. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_privateca1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/privateca1_beta1/Cargo.toml b/gen/privateca1_beta1/Cargo.toml index 45911113e2..25b97fa395 100644 --- a/gen/privateca1_beta1/Cargo.toml +++ b/gen/privateca1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-privateca1_beta1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Certificate Authority Service (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/privateca1_beta1" homepage = "https://cloud.google.com/" -documentation = "https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-privateca1_beta1/5.0.2+20230105" license = "MIT" keywords = ["privateca", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/privateca1_beta1/README.md b/gen/privateca1_beta1/README.md index d7496c31fc..7d542859d6 100644 --- a/gen/privateca1_beta1/README.md +++ b/gen/privateca1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-privateca1_beta1` library allows access to all features of the *Google Certificate Authority Service* service. -This documentation was generated from *Certificate Authority Service* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *privateca:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Certificate Authority Service* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *privateca:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Certificate Authority Service* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/CertificateAuthorityService) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/CertificateAuthorityService) ... * projects - * [*locations certificate authorities activate*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityActivateCall), [*locations certificate authorities certificate revocation lists get*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListGetCall), [*locations certificate authorities certificate revocation lists get iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListGetIamPolicyCall), [*locations certificate authorities certificate revocation lists list*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListListCall), [*locations certificate authorities certificate revocation lists patch*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListPatchCall), [*locations certificate authorities certificate revocation lists set iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListSetIamPolicyCall), [*locations certificate authorities certificate revocation lists test iam permissions*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListTestIamPermissionCall), [*locations certificate authorities certificates create*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateCreateCall), [*locations certificate authorities certificates get*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateGetCall), [*locations certificate authorities certificates list*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateListCall), [*locations certificate authorities certificates patch*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificatePatchCall), [*locations certificate authorities certificates revoke*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevokeCall), [*locations certificate authorities create*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCreateCall), [*locations certificate authorities disable*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityDisableCall), [*locations certificate authorities enable*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityEnableCall), [*locations certificate authorities fetch*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityFetchCall), [*locations certificate authorities get*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityGetCall), [*locations certificate authorities get iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityGetIamPolicyCall), [*locations certificate authorities list*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityListCall), [*locations certificate authorities patch*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityPatchCall), [*locations certificate authorities restore*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityRestoreCall), [*locations certificate authorities schedule delete*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityScheduleDeleteCall), [*locations certificate authorities set iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthoritySetIamPolicyCall), [*locations certificate authorities test iam permissions*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityTestIamPermissionCall), [*locations get*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationOperationListCall), [*locations reusable configs get*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigGetCall), [*locations reusable configs get iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigGetIamPolicyCall), [*locations reusable configs list*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigListCall), [*locations reusable configs set iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigSetIamPolicyCall) and [*locations reusable configs test iam permissions*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigTestIamPermissionCall) + * [*locations certificate authorities activate*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityActivateCall), [*locations certificate authorities certificate revocation lists get*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListGetCall), [*locations certificate authorities certificate revocation lists get iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListGetIamPolicyCall), [*locations certificate authorities certificate revocation lists list*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListListCall), [*locations certificate authorities certificate revocation lists patch*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListPatchCall), [*locations certificate authorities certificate revocation lists set iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListSetIamPolicyCall), [*locations certificate authorities certificate revocation lists test iam permissions*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevocationListTestIamPermissionCall), [*locations certificate authorities certificates create*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateCreateCall), [*locations certificate authorities certificates get*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateGetCall), [*locations certificate authorities certificates list*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateListCall), [*locations certificate authorities certificates patch*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificatePatchCall), [*locations certificate authorities certificates revoke*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCertificateRevokeCall), [*locations certificate authorities create*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityCreateCall), [*locations certificate authorities disable*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityDisableCall), [*locations certificate authorities enable*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityEnableCall), [*locations certificate authorities fetch*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityFetchCall), [*locations certificate authorities get*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityGetCall), [*locations certificate authorities get iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityGetIamPolicyCall), [*locations certificate authorities list*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityListCall), [*locations certificate authorities patch*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityPatchCall), [*locations certificate authorities restore*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityRestoreCall), [*locations certificate authorities schedule delete*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityScheduleDeleteCall), [*locations certificate authorities set iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthoritySetIamPolicyCall), [*locations certificate authorities test iam permissions*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationCertificateAuthorityTestIamPermissionCall), [*locations get*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationOperationListCall), [*locations reusable configs get*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigGetCall), [*locations reusable configs get iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigGetIamPolicyCall), [*locations reusable configs list*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigListCall), [*locations reusable configs set iam policy*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigSetIamPolicyCall) and [*locations reusable configs test iam permissions*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/api::ProjectLocationReusableConfigTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/CertificateAuthorityService)** +* **[Hub](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/CertificateAuthorityService)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-privateca1_beta1/5.0.2-beta-1+20230105/google_privateca1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-privateca1_beta1/5.0.2+20230105/google_privateca1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/privateca1_beta1/src/api.rs b/gen/privateca1_beta1/src/api.rs index f83ba32813..4a7e7bcb53 100644 --- a/gen/privateca1_beta1/src/api.rs +++ b/gen/privateca1_beta1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> CertificateAuthorityService { CertificateAuthorityService { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://privateca.googleapis.com/".to_string(), _root_url: "https://privateca.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> CertificateAuthorityService { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/privateca1_beta1/src/client.rs b/gen/privateca1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/privateca1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/privateca1_beta1/src/lib.rs b/gen/privateca1_beta1/src/lib.rs index 1cb833dd7e..c5bfdc6c5a 100644 --- a/gen/privateca1_beta1/src/lib.rs +++ b/gen/privateca1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Certificate Authority Service* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *privateca:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Certificate Authority Service* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *privateca:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Certificate Authority Service* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/). diff --git a/gen/prod_tt_sasportal1_alpha1-cli/Cargo.toml b/gen/prod_tt_sasportal1_alpha1-cli/Cargo.toml index de9f04c1fd..6fda706f09 100644 --- a/gen/prod_tt_sasportal1_alpha1-cli/Cargo.toml +++ b/gen/prod_tt_sasportal1_alpha1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-prod_tt_sasportal1_alpha1-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with SAS Portal Testing (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/prod_tt_sasportal1_alpha1-cli" @@ -20,13 +20,13 @@ name = "prod-tt-sasportal1-alpha1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-prod_tt_sasportal1_alpha1] path = "../prod_tt_sasportal1_alpha1" -version = "4.0.1+20220303" +version = "5.0.2+20230121" + diff --git a/gen/prod_tt_sasportal1_alpha1-cli/README.md b/gen/prod_tt_sasportal1_alpha1-cli/README.md index 88a827eeb3..fe87898d47 100644 --- a/gen/prod_tt_sasportal1_alpha1-cli/README.md +++ b/gen/prod_tt_sasportal1_alpha1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *SAS Portal Testing* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *SAS Portal Testing* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash prod-tt-sasportal1-alpha1 [options] diff --git a/gen/prod_tt_sasportal1_alpha1-cli/mkdocs.yml b/gen/prod_tt_sasportal1_alpha1-cli/mkdocs.yml index 4731e8436c..162f39720b 100644 --- a/gen/prod_tt_sasportal1_alpha1-cli/mkdocs.yml +++ b/gen/prod_tt_sasportal1_alpha1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: SAS Portal Testing v4.0.1+20220303 +site_name: SAS Portal Testing v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-prod_tt_sasportal1_alpha1-cli site_description: A complete library to interact with SAS Portal Testing (protocol v1alpha1) @@ -7,85 +7,90 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/prod_tt_sasporta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['customers_deployments-create.md', 'Customers', 'Deployments Create'] -- ['customers_deployments-delete.md', 'Customers', 'Deployments Delete'] -- ['customers_deployments-devices-create.md', 'Customers', 'Deployments Devices Create'] -- ['customers_deployments-devices-create-signed.md', 'Customers', 'Deployments Devices Create Signed'] -- ['customers_deployments-devices-list.md', 'Customers', 'Deployments Devices List'] -- ['customers_deployments-get.md', 'Customers', 'Deployments Get'] -- ['customers_deployments-list.md', 'Customers', 'Deployments List'] -- ['customers_deployments-move.md', 'Customers', 'Deployments Move'] -- ['customers_deployments-patch.md', 'Customers', 'Deployments Patch'] -- ['customers_devices-create.md', 'Customers', 'Devices Create'] -- ['customers_devices-create-signed.md', 'Customers', 'Devices Create Signed'] -- ['customers_devices-delete.md', 'Customers', 'Devices Delete'] -- ['customers_devices-get.md', 'Customers', 'Devices Get'] -- ['customers_devices-list.md', 'Customers', 'Devices List'] -- ['customers_devices-move.md', 'Customers', 'Devices Move'] -- ['customers_devices-patch.md', 'Customers', 'Devices Patch'] -- ['customers_devices-sign-device.md', 'Customers', 'Devices Sign Device'] -- ['customers_devices-update-signed.md', 'Customers', 'Devices Update Signed'] -- ['customers_get.md', 'Customers', 'Get'] -- ['customers_list.md', 'Customers', 'List'] -- ['customers_nodes-create.md', 'Customers', 'Nodes Create'] -- ['customers_nodes-delete.md', 'Customers', 'Nodes Delete'] -- ['customers_nodes-deployments-create.md', 'Customers', 'Nodes Deployments Create'] -- ['customers_nodes-deployments-list.md', 'Customers', 'Nodes Deployments List'] -- ['customers_nodes-devices-create.md', 'Customers', 'Nodes Devices Create'] -- ['customers_nodes-devices-create-signed.md', 'Customers', 'Nodes Devices Create Signed'] -- ['customers_nodes-devices-list.md', 'Customers', 'Nodes Devices List'] -- ['customers_nodes-get.md', 'Customers', 'Nodes Get'] -- ['customers_nodes-list.md', 'Customers', 'Nodes List'] -- ['customers_nodes-move.md', 'Customers', 'Nodes Move'] -- ['customers_nodes-nodes-create.md', 'Customers', 'Nodes Nodes Create'] -- ['customers_nodes-nodes-list.md', 'Customers', 'Nodes Nodes List'] -- ['customers_nodes-patch.md', 'Customers', 'Nodes Patch'] -- ['customers_patch.md', 'Customers', 'Patch'] -- ['deployments_devices-delete.md', 'Deployments', 'Devices Delete'] -- ['deployments_devices-get.md', 'Deployments', 'Devices Get'] -- ['deployments_devices-move.md', 'Deployments', 'Devices Move'] -- ['deployments_devices-patch.md', 'Deployments', 'Devices Patch'] -- ['deployments_devices-sign-device.md', 'Deployments', 'Devices Sign Device'] -- ['deployments_devices-update-signed.md', 'Deployments', 'Devices Update Signed'] -- ['deployments_get.md', 'Deployments', 'Get'] -- ['installer_generate-secret.md', 'Installer', 'Generate Secret'] -- ['installer_validate.md', 'Installer', 'Validate'] -- ['nodes_deployments-delete.md', 'Nodes', 'Deployments Delete'] -- ['nodes_deployments-devices-create.md', 'Nodes', 'Deployments Devices Create'] -- ['nodes_deployments-devices-create-signed.md', 'Nodes', 'Deployments Devices Create Signed'] -- ['nodes_deployments-devices-list.md', 'Nodes', 'Deployments Devices List'] -- ['nodes_deployments-get.md', 'Nodes', 'Deployments Get'] -- ['nodes_deployments-list.md', 'Nodes', 'Deployments List'] -- ['nodes_deployments-move.md', 'Nodes', 'Deployments Move'] -- ['nodes_deployments-patch.md', 'Nodes', 'Deployments Patch'] -- ['nodes_devices-create.md', 'Nodes', 'Devices Create'] -- ['nodes_devices-create-signed.md', 'Nodes', 'Devices Create Signed'] -- ['nodes_devices-delete.md', 'Nodes', 'Devices Delete'] -- ['nodes_devices-get.md', 'Nodes', 'Devices Get'] -- ['nodes_devices-list.md', 'Nodes', 'Devices List'] -- ['nodes_devices-move.md', 'Nodes', 'Devices Move'] -- ['nodes_devices-patch.md', 'Nodes', 'Devices Patch'] -- ['nodes_devices-sign-device.md', 'Nodes', 'Devices Sign Device'] -- ['nodes_devices-update-signed.md', 'Nodes', 'Devices Update Signed'] -- ['nodes_get.md', 'Nodes', 'Get'] -- ['nodes_nodes-create.md', 'Nodes', 'Nodes Create'] -- ['nodes_nodes-delete.md', 'Nodes', 'Nodes Delete'] -- ['nodes_nodes-deployments-create.md', 'Nodes', 'Nodes Deployments Create'] -- ['nodes_nodes-deployments-list.md', 'Nodes', 'Nodes Deployments List'] -- ['nodes_nodes-devices-create.md', 'Nodes', 'Nodes Devices Create'] -- ['nodes_nodes-devices-create-signed.md', 'Nodes', 'Nodes Devices Create Signed'] -- ['nodes_nodes-devices-list.md', 'Nodes', 'Nodes Devices List'] -- ['nodes_nodes-get.md', 'Nodes', 'Nodes Get'] -- ['nodes_nodes-list.md', 'Nodes', 'Nodes List'] -- ['nodes_nodes-move.md', 'Nodes', 'Nodes Move'] -- ['nodes_nodes-nodes-create.md', 'Nodes', 'Nodes Nodes Create'] -- ['nodes_nodes-nodes-list.md', 'Nodes', 'Nodes Nodes List'] -- ['nodes_nodes-patch.md', 'Nodes', 'Nodes Patch'] -- ['policies_get.md', 'Policies', 'Get'] -- ['policies_set.md', 'Policies', 'Set'] -- ['policies_test.md', 'Policies', 'Test'] +nav: +- Home: 'index.md' +- 'Customers': + - 'Deployments Create': 'customers_deployments-create.md' + - 'Deployments Delete': 'customers_deployments-delete.md' + - 'Deployments Devices Create': 'customers_deployments-devices-create.md' + - 'Deployments Devices Create Signed': 'customers_deployments-devices-create-signed.md' + - 'Deployments Devices List': 'customers_deployments-devices-list.md' + - 'Deployments Get': 'customers_deployments-get.md' + - 'Deployments List': 'customers_deployments-list.md' + - 'Deployments Move': 'customers_deployments-move.md' + - 'Deployments Patch': 'customers_deployments-patch.md' + - 'Devices Create': 'customers_devices-create.md' + - 'Devices Create Signed': 'customers_devices-create-signed.md' + - 'Devices Delete': 'customers_devices-delete.md' + - 'Devices Get': 'customers_devices-get.md' + - 'Devices List': 'customers_devices-list.md' + - 'Devices Move': 'customers_devices-move.md' + - 'Devices Patch': 'customers_devices-patch.md' + - 'Devices Sign Device': 'customers_devices-sign-device.md' + - 'Devices Update Signed': 'customers_devices-update-signed.md' + - 'Get': 'customers_get.md' + - 'List': 'customers_list.md' + - 'Nodes Create': 'customers_nodes-create.md' + - 'Nodes Delete': 'customers_nodes-delete.md' + - 'Nodes Deployments Create': 'customers_nodes-deployments-create.md' + - 'Nodes Deployments List': 'customers_nodes-deployments-list.md' + - 'Nodes Devices Create': 'customers_nodes-devices-create.md' + - 'Nodes Devices Create Signed': 'customers_nodes-devices-create-signed.md' + - 'Nodes Devices List': 'customers_nodes-devices-list.md' + - 'Nodes Get': 'customers_nodes-get.md' + - 'Nodes List': 'customers_nodes-list.md' + - 'Nodes Move': 'customers_nodes-move.md' + - 'Nodes Nodes Create': 'customers_nodes-nodes-create.md' + - 'Nodes Nodes List': 'customers_nodes-nodes-list.md' + - 'Nodes Patch': 'customers_nodes-patch.md' + - 'Patch': 'customers_patch.md' +- 'Deployments': + - 'Devices Delete': 'deployments_devices-delete.md' + - 'Devices Get': 'deployments_devices-get.md' + - 'Devices Move': 'deployments_devices-move.md' + - 'Devices Patch': 'deployments_devices-patch.md' + - 'Devices Sign Device': 'deployments_devices-sign-device.md' + - 'Devices Update Signed': 'deployments_devices-update-signed.md' + - 'Get': 'deployments_get.md' +- 'Installer': + - 'Generate Secret': 'installer_generate-secret.md' + - 'Validate': 'installer_validate.md' +- 'Nodes': + - 'Deployments Delete': 'nodes_deployments-delete.md' + - 'Deployments Devices Create': 'nodes_deployments-devices-create.md' + - 'Deployments Devices Create Signed': 'nodes_deployments-devices-create-signed.md' + - 'Deployments Devices List': 'nodes_deployments-devices-list.md' + - 'Deployments Get': 'nodes_deployments-get.md' + - 'Deployments List': 'nodes_deployments-list.md' + - 'Deployments Move': 'nodes_deployments-move.md' + - 'Deployments Patch': 'nodes_deployments-patch.md' + - 'Devices Create': 'nodes_devices-create.md' + - 'Devices Create Signed': 'nodes_devices-create-signed.md' + - 'Devices Delete': 'nodes_devices-delete.md' + - 'Devices Get': 'nodes_devices-get.md' + - 'Devices List': 'nodes_devices-list.md' + - 'Devices Move': 'nodes_devices-move.md' + - 'Devices Patch': 'nodes_devices-patch.md' + - 'Devices Sign Device': 'nodes_devices-sign-device.md' + - 'Devices Update Signed': 'nodes_devices-update-signed.md' + - 'Get': 'nodes_get.md' + - 'Nodes Create': 'nodes_nodes-create.md' + - 'Nodes Delete': 'nodes_nodes-delete.md' + - 'Nodes Deployments Create': 'nodes_nodes-deployments-create.md' + - 'Nodes Deployments List': 'nodes_nodes-deployments-list.md' + - 'Nodes Devices Create': 'nodes_nodes-devices-create.md' + - 'Nodes Devices Create Signed': 'nodes_nodes-devices-create-signed.md' + - 'Nodes Devices List': 'nodes_nodes-devices-list.md' + - 'Nodes Get': 'nodes_nodes-get.md' + - 'Nodes List': 'nodes_nodes-list.md' + - 'Nodes Move': 'nodes_nodes-move.md' + - 'Nodes Nodes Create': 'nodes_nodes-nodes-create.md' + - 'Nodes Nodes List': 'nodes_nodes-nodes-list.md' + - 'Nodes Patch': 'nodes_nodes-patch.md' +- 'Policies': + - 'Get': 'policies_get.md' + - 'Set': 'policies_set.md' + - 'Test': 'policies_test.md' theme: readthedocs diff --git a/gen/prod_tt_sasportal1_alpha1-cli/src/client.rs b/gen/prod_tt_sasportal1_alpha1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/prod_tt_sasportal1_alpha1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/prod_tt_sasportal1_alpha1-cli/src/main.rs b/gen/prod_tt_sasportal1_alpha1-cli/src/main.rs index 2f75cf8ea2..1227121d3c 100644 --- a/gen/prod_tt_sasportal1_alpha1-cli/src/main.rs +++ b/gen/prod_tt_sasportal1_alpha1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_prod_tt_sasportal1_alpha1::{api, Error, oauth2}; +use google_prod_tt_sasportal1_alpha1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -222,9 +221,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -250,6 +251,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -261,9 +263,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -284,7 +288,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -440,7 +444,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -554,7 +558,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -734,7 +738,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -814,9 +818,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -842,6 +848,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -853,9 +860,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -876,7 +885,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1136,7 +1145,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1304,9 +1313,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1332,6 +1343,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1343,9 +1355,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1366,7 +1380,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1381,7 +1395,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1461,9 +1475,11 @@ where "device.active-config.installation-params.antenna-beamwidth" => Some(("device.activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-downtilt" => Some(("device.activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-gain" => Some(("device.activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.antenna-gain-new-field" => Some(("device.activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-model" => Some(("device.activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.cpe-cbsd-indication" => Some(("device.activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.active-config.installation-params.eirp-capability" => Some(("device.activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.eirp-capability-new-field" => Some(("device.activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height" => Some(("device.activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height-type" => Some(("device.activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.horizontal-accuracy" => Some(("device.activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1489,6 +1505,7 @@ where "device.device-metadata.nrqz-validation.cpi-id" => Some(("device.deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.latitude" => Some(("device.deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.longitude" => Some(("device.deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device.device-metadata.nrqz-validation.state" => Some(("device.deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.display-name" => Some(("device.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.fcc-id" => Some(("device.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.name" => Some(("device.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1500,9 +1517,11 @@ where "device.preloaded-config.installation-params.antenna-beamwidth" => Some(("device.preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-downtilt" => Some(("device.preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-gain" => Some(("device.preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.antenna-gain-new-field" => Some(("device.preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-model" => Some(("device.preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.cpe-cbsd-indication" => Some(("device.preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.eirp-capability" => Some(("device.preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.eirp-capability-new-field" => Some(("device.preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height" => Some(("device.preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height-type" => Some(("device.preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.horizontal-accuracy" => Some(("device.preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1523,7 +1542,7 @@ where "device.serial-number" => Some(("device.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.state" => Some(("device.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1731,7 +1750,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2017,7 +2036,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2100,9 +2119,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -2128,6 +2149,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2139,9 +2161,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -2162,7 +2186,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2318,7 +2342,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2432,7 +2456,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2666,7 +2690,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2760,7 +2784,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2851,7 +2875,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3120,9 +3144,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3148,6 +3174,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3159,9 +3186,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3182,7 +3211,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3197,7 +3226,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3277,9 +3306,11 @@ where "device.active-config.installation-params.antenna-beamwidth" => Some(("device.activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-downtilt" => Some(("device.activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-gain" => Some(("device.activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.antenna-gain-new-field" => Some(("device.activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-model" => Some(("device.activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.cpe-cbsd-indication" => Some(("device.activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.active-config.installation-params.eirp-capability" => Some(("device.activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.eirp-capability-new-field" => Some(("device.activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height" => Some(("device.activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height-type" => Some(("device.activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.horizontal-accuracy" => Some(("device.activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3305,6 +3336,7 @@ where "device.device-metadata.nrqz-validation.cpi-id" => Some(("device.deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.latitude" => Some(("device.deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.longitude" => Some(("device.deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device.device-metadata.nrqz-validation.state" => Some(("device.deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.display-name" => Some(("device.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.fcc-id" => Some(("device.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.name" => Some(("device.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3316,9 +3348,11 @@ where "device.preloaded-config.installation-params.antenna-beamwidth" => Some(("device.preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-downtilt" => Some(("device.preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-gain" => Some(("device.preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.antenna-gain-new-field" => Some(("device.preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-model" => Some(("device.preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.cpe-cbsd-indication" => Some(("device.preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.eirp-capability" => Some(("device.preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.eirp-capability-new-field" => Some(("device.preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height" => Some(("device.preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height-type" => Some(("device.preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.horizontal-accuracy" => Some(("device.preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3339,7 +3373,7 @@ where "device.serial-number" => Some(("device.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.state" => Some(("device.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3791,9 +3825,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3819,6 +3855,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3830,9 +3867,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3853,7 +3892,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4009,7 +4048,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4123,7 +4162,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4303,7 +4342,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4383,9 +4422,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4411,6 +4452,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4422,9 +4464,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4445,7 +4489,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4705,7 +4749,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4873,9 +4917,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4901,6 +4947,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4912,9 +4959,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4935,7 +4984,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4950,7 +4999,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5030,9 +5079,11 @@ where "device.active-config.installation-params.antenna-beamwidth" => Some(("device.activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-downtilt" => Some(("device.activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-gain" => Some(("device.activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.antenna-gain-new-field" => Some(("device.activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-model" => Some(("device.activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.cpe-cbsd-indication" => Some(("device.activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.active-config.installation-params.eirp-capability" => Some(("device.activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.eirp-capability-new-field" => Some(("device.activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height" => Some(("device.activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height-type" => Some(("device.activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.horizontal-accuracy" => Some(("device.activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5058,6 +5109,7 @@ where "device.device-metadata.nrqz-validation.cpi-id" => Some(("device.deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.latitude" => Some(("device.deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.longitude" => Some(("device.deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device.device-metadata.nrqz-validation.state" => Some(("device.deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.display-name" => Some(("device.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.fcc-id" => Some(("device.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.name" => Some(("device.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5069,9 +5121,11 @@ where "device.preloaded-config.installation-params.antenna-beamwidth" => Some(("device.preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-downtilt" => Some(("device.preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-gain" => Some(("device.preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.antenna-gain-new-field" => Some(("device.preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-model" => Some(("device.preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.cpe-cbsd-indication" => Some(("device.preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.eirp-capability" => Some(("device.preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.eirp-capability-new-field" => Some(("device.preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height" => Some(("device.preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height-type" => Some(("device.preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.horizontal-accuracy" => Some(("device.preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5092,7 +5146,7 @@ where "device.serial-number" => Some(("device.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.state" => Some(("device.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5527,7 +5581,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5610,9 +5664,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5638,6 +5694,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5649,9 +5706,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5672,7 +5731,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5828,7 +5887,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5942,7 +6001,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6176,7 +6235,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6270,7 +6329,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8873,7 +8932,7 @@ async fn main() { let mut app = App::new("prod-tt-sasportal1-alpha1") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230121") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_prod_tt_sasportal1_alpha1_cli") .arg(Arg::with_name("url") diff --git a/gen/prod_tt_sasportal1_alpha1/Cargo.toml b/gen/prod_tt_sasportal1_alpha1/Cargo.toml index 8545802774..64caeebb71 100644 --- a/gen/prod_tt_sasportal1_alpha1/Cargo.toml +++ b/gen/prod_tt_sasportal1_alpha1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-prod_tt_sasportal1_alpha1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with SAS Portal Testing (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/prod_tt_sasportal1_alpha1" homepage = "https://developers.google.com/spectrum-access-system/" -documentation = "https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121" license = "MIT" keywords = ["prod_tt_sasportal", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/prod_tt_sasportal1_alpha1/README.md b/gen/prod_tt_sasportal1_alpha1/README.md index d297351120..ff8b89b049 100644 --- a/gen/prod_tt_sasportal1_alpha1/README.md +++ b/gen/prod_tt_sasportal1_alpha1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-prod_tt_sasportal1_alpha1` library allows access to all features of the *Google SAS Portal Testing* service. -This documentation was generated from *SAS Portal Testing* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *prod_tt_sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *SAS Portal Testing* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *prod_tt_sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *SAS Portal Testing* *v1_alpha1* API can be found at the [official documentation site](https://developers.google.com/spectrum-access-system/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/SASPortalTesting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/SASPortalTesting) ... * customers - * [*deployments create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentCreateCall), [*deployments delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentGetCall), [*deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentListCall), [*deployments move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentPatchCall), [*devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceCreateCall), [*devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceDeleteCall), [*devices get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceGetCall), [*devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceListCall), [*devices move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceMoveCall), [*devices patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDevicePatchCall), [*devices sign device*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceUpdateSignedCall), [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerGetCall), [*list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerListCall), [*nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeCreateCall), [*nodes delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeviceListCall), [*nodes get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeGetCall), [*nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeListCall), [*nodes move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeNodeListCall), [*nodes patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodePatchCall) and [*patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerPatchCall) + * [*deployments create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentCreateCall), [*deployments delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentGetCall), [*deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentListCall), [*deployments move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeploymentPatchCall), [*devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceCreateCall), [*devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceDeleteCall), [*devices get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceGetCall), [*devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceListCall), [*devices move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceMoveCall), [*devices patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDevicePatchCall), [*devices sign device*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerDeviceUpdateSignedCall), [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerGetCall), [*list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerListCall), [*nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeCreateCall), [*nodes delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeDeviceListCall), [*nodes get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeGetCall), [*nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeListCall), [*nodes move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodeNodeListCall), [*nodes patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerNodePatchCall) and [*patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::CustomerPatchCall) * deployments - * [*devices delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceDeleteCall), [*devices get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceGetCall), [*devices move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceMoveCall), [*devices patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDevicePatchCall), [*devices sign device*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceUpdateSignedCall) and [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentGetCall) + * [*devices delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceDeleteCall), [*devices get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceGetCall), [*devices move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceMoveCall), [*devices patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDevicePatchCall), [*devices sign device*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentDeviceUpdateSignedCall) and [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::DeploymentGetCall) * installer - * [*generate secret*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::InstallerGenerateSecretCall) and [*validate*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::InstallerValidateCall) + * [*generate secret*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::InstallerGenerateSecretCall) and [*validate*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::InstallerValidateCall) * nodes - * [*deployments delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentGetCall), [*deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentListCall), [*deployments move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentPatchCall), [*devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceCreateCall), [*devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceDeleteCall), [*devices get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceGetCall), [*devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceListCall), [*devices move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceMoveCall), [*devices patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDevicePatchCall), [*devices sign device*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceUpdateSignedCall), [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeGetCall), [*nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeCreateCall), [*nodes delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeviceListCall), [*nodes get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeGetCall), [*nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeListCall), [*nodes move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeNodeListCall) and [*nodes patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodePatchCall) + * [*deployments delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentGetCall), [*deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentListCall), [*deployments move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeploymentPatchCall), [*devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceCreateCall), [*devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceDeleteCall), [*devices get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceGetCall), [*devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceListCall), [*devices move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceMoveCall), [*devices patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDevicePatchCall), [*devices sign device*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeDeviceUpdateSignedCall), [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeGetCall), [*nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeCreateCall), [*nodes delete*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeDeviceListCall), [*nodes get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeGetCall), [*nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeListCall), [*nodes move*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodeNodeListCall) and [*nodes patch*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::NodeNodePatchCall) * policies - * [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::PolicyGetCall), [*set*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::PolicySetCall) and [*test*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/api::PolicyTestCall) + * [*get*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::PolicyGetCall), [*set*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::PolicySetCall) and [*test*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/api::PolicyTestCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/SASPortalTesting)** +* **[Hub](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/SASPortalTesting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder) -* **[Resources](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder) +* **[Resources](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Part)** + * **[Parts](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -153,17 +153,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -173,29 +173,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Delegate) to the -[Method Builder](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Delegate) to the +[Method Builder](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::RequestValue) and -[decodable](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::RequestValue) and +[decodable](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2-beta-1+20230121/google_prod_tt_sasportal1_alpha1/client::RequestValue) are moved +* [request values](https://docs.rs/google-prod_tt_sasportal1_alpha1/5.0.2+20230121/google_prod_tt_sasportal1_alpha1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/prod_tt_sasportal1_alpha1/src/api.rs b/gen/prod_tt_sasportal1_alpha1/src/api.rs index 4a0fbea10f..f33d4fe613 100644 --- a/gen/prod_tt_sasportal1_alpha1/src/api.rs +++ b/gen/prod_tt_sasportal1_alpha1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> SASPortalTesting { SASPortalTesting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://prod-tt-sasportal.googleapis.com/".to_string(), _root_url: "https://prod-tt-sasportal.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> SASPortalTesting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/prod_tt_sasportal1_alpha1/src/client.rs b/gen/prod_tt_sasportal1_alpha1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/prod_tt_sasportal1_alpha1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/prod_tt_sasportal1_alpha1/src/lib.rs b/gen/prod_tt_sasportal1_alpha1/src/lib.rs index 64522084cf..7692e40077 100644 --- a/gen/prod_tt_sasportal1_alpha1/src/lib.rs +++ b/gen/prod_tt_sasportal1_alpha1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *SAS Portal Testing* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *prod_tt_sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *SAS Portal Testing* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *prod_tt_sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *SAS Portal Testing* *v1_alpha1* API can be found at the //! [official documentation site](https://developers.google.com/spectrum-access-system/). diff --git a/gen/proximitybeacon1_beta1-cli/Cargo.toml b/gen/proximitybeacon1_beta1-cli/Cargo.toml index b60c0c1bbe..10cf99d334 100644 --- a/gen/proximitybeacon1_beta1-cli/Cargo.toml +++ b/gen/proximitybeacon1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-proximitybeacon1_beta1-cli" -version = "4.0.1+20200127" +version = "5.0.2+20200127" authors = ["Sebastian Thiel "] description = "A complete library to interact with proximitybeacon (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/proximitybeacon1_beta1-cli" @@ -20,13 +20,13 @@ name = "proximitybeacon1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-proximitybeacon1_beta1] path = "../proximitybeacon1_beta1" -version = "4.0.1+20200127" +version = "5.0.2+20200127" + diff --git a/gen/proximitybeacon1_beta1-cli/README.md b/gen/proximitybeacon1_beta1-cli/README.md index 1a7a9b11e0..fa6177d2c4 100644 --- a/gen/proximitybeacon1_beta1-cli/README.md +++ b/gen/proximitybeacon1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *proximitybeacon* API at revision *20200127*. The CLI is at version *4.0.1*. +This documentation was generated from the *proximitybeacon* API at revision *20200127*. The CLI is at version *5.0.2*. ```bash proximitybeacon1-beta1 [options] diff --git a/gen/proximitybeacon1_beta1-cli/mkdocs.yml b/gen/proximitybeacon1_beta1-cli/mkdocs.yml index 2659f5786c..baf07d8c32 100644 --- a/gen/proximitybeacon1_beta1-cli/mkdocs.yml +++ b/gen/proximitybeacon1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: proximitybeacon v4.0.1+20200127 +site_name: proximitybeacon v5.0.2+20200127 site_url: http://byron.github.io/google-apis-rs/google-proximitybeacon1_beta1-cli site_description: A complete library to interact with proximitybeacon (protocol v1beta1) @@ -7,25 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/proximitybeacon1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['beaconinfo_getforobserved.md', 'Beaconinfo', 'Getforobserved'] -- ['beacons_activate.md', 'Beacons', 'Activate'] -- ['beacons_attachments-batch-delete.md', 'Beacons', 'Attachments Batch Delete'] -- ['beacons_attachments-create.md', 'Beacons', 'Attachments Create'] -- ['beacons_attachments-delete.md', 'Beacons', 'Attachments Delete'] -- ['beacons_attachments-list.md', 'Beacons', 'Attachments List'] -- ['beacons_deactivate.md', 'Beacons', 'Deactivate'] -- ['beacons_decommission.md', 'Beacons', 'Decommission'] -- ['beacons_delete.md', 'Beacons', 'Delete'] -- ['beacons_diagnostics-list.md', 'Beacons', 'Diagnostics List'] -- ['beacons_get.md', 'Beacons', 'Get'] -- ['beacons_list.md', 'Beacons', 'List'] -- ['beacons_register.md', 'Beacons', 'Register'] -- ['beacons_update.md', 'Beacons', 'Update'] -- ['methods_get-eidparams.md', 'Methods', 'Get Eidparams'] -- ['namespaces_list.md', 'Namespaces', 'List'] -- ['namespaces_update.md', 'Namespaces', 'Update'] +nav: +- Home: 'index.md' +- 'Beaconinfo': + - 'Getforobserved': 'beaconinfo_getforobserved.md' +- 'Beacons': + - 'Activate': 'beacons_activate.md' + - 'Attachments Batch Delete': 'beacons_attachments-batch-delete.md' + - 'Attachments Create': 'beacons_attachments-create.md' + - 'Attachments Delete': 'beacons_attachments-delete.md' + - 'Attachments List': 'beacons_attachments-list.md' + - 'Deactivate': 'beacons_deactivate.md' + - 'Decommission': 'beacons_decommission.md' + - 'Delete': 'beacons_delete.md' + - 'Diagnostics List': 'beacons_diagnostics-list.md' + - 'Get': 'beacons_get.md' + - 'List': 'beacons_list.md' + - 'Register': 'beacons_register.md' + - 'Update': 'beacons_update.md' +- 'Methods': + - 'Get Eidparams': 'methods_get-eidparams.md' +- 'Namespaces': + - 'List': 'namespaces_list.md' + - 'Update': 'namespaces_update.md' theme: readthedocs diff --git a/gen/proximitybeacon1_beta1-cli/src/client.rs b/gen/proximitybeacon1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/proximitybeacon1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/proximitybeacon1_beta1-cli/src/main.rs b/gen/proximitybeacon1_beta1-cli/src/main.rs index 509b5bea0c..772cb0156d 100644 --- a/gen/proximitybeacon1_beta1-cli/src/main.rs +++ b/gen/proximitybeacon1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_proximitybeacon1_beta1::{api, Error, oauth2}; +use google_proximitybeacon1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -637,7 +636,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "alert-filter" => { call = call.alert_filter(value.unwrap_or("")); @@ -761,7 +760,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1969,7 +1968,7 @@ async fn main() { let mut app = App::new("proximitybeacon1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20200127") + .version("5.0.2+20200127") .about("Registers, manages, indexes, and searches beacons.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_proximitybeacon1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/proximitybeacon1_beta1/Cargo.toml b/gen/proximitybeacon1_beta1/Cargo.toml index deaa4a4182..c6c5511346 100644 --- a/gen/proximitybeacon1_beta1/Cargo.toml +++ b/gen/proximitybeacon1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-proximitybeacon1_beta1" -version = "5.0.2-beta-1+20200127" +version = "5.0.2+20200127" authors = ["Sebastian Thiel "] description = "A complete library to interact with proximitybeacon (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/proximitybeacon1_beta1" homepage = "https://developers.google.com/beacons/proximity/" -documentation = "https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127" +documentation = "https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127" license = "MIT" keywords = ["proximitybeacon", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/proximitybeacon1_beta1/README.md b/gen/proximitybeacon1_beta1/README.md index 31d812e139..1d4c4f9315 100644 --- a/gen/proximitybeacon1_beta1/README.md +++ b/gen/proximitybeacon1_beta1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-proximitybeacon1_beta1` library allows access to all features of the *Google proximitybeacon* service. -This documentation was generated from *proximitybeacon* crate version *5.0.2-beta-1+20200127*, where *20200127* is the exact revision of the *proximitybeacon:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *proximitybeacon* crate version *5.0.2+20200127*, where *20200127* is the exact revision of the *proximitybeacon:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *proximitybeacon* *v1_beta1* API can be found at the [official documentation site](https://developers.google.com/beacons/proximity/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/Proximitybeacon) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/Proximitybeacon) ... * beaconinfo - * [*getforobserved*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconinfoGetforobservedCall) -* [beacons](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::Beacon) - * [*activate*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconActivateCall), [*attachments batch delete*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentBatchDeleteCall), [*attachments create*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentCreateCall), [*attachments delete*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentDeleteCall), [*attachments list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentListCall), [*deactivate*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconDeactivateCall), [*decommission*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconDecommissionCall), [*delete*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconDeleteCall), [*diagnostics list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconDiagnosticListCall), [*get*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconGetCall), [*list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconListCall), [*register*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconRegisterCall) and [*update*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::BeaconUpdateCall) -* [namespaces](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::Namespace) - * [*list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::NamespaceListCall) and [*update*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::NamespaceUpdateCall) + * [*getforobserved*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconinfoGetforobservedCall) +* [beacons](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::Beacon) + * [*activate*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconActivateCall), [*attachments batch delete*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentBatchDeleteCall), [*attachments create*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentCreateCall), [*attachments delete*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentDeleteCall), [*attachments list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconAttachmentListCall), [*deactivate*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconDeactivateCall), [*decommission*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconDecommissionCall), [*delete*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconDeleteCall), [*diagnostics list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconDiagnosticListCall), [*get*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconGetCall), [*list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconListCall), [*register*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconRegisterCall) and [*update*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::BeaconUpdateCall) +* [namespaces](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::Namespace) + * [*list*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::NamespaceListCall) and [*update*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::NamespaceUpdateCall) Other activities are ... -* [get eidparams](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/api::MethodGetEidparamCall) +* [get eidparams](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/api::MethodGetEidparamCall) @@ -30,17 +30,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/Proximitybeacon)** +* **[Hub](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/Proximitybeacon)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -141,17 +141,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -161,29 +161,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-proximitybeacon1_beta1/5.0.2-beta-1+20200127/google_proximitybeacon1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-proximitybeacon1_beta1/5.0.2+20200127/google_proximitybeacon1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/proximitybeacon1_beta1/src/api.rs b/gen/proximitybeacon1_beta1/src/api.rs index 438ee30b17..cf26464993 100644 --- a/gen/proximitybeacon1_beta1/src/api.rs +++ b/gen/proximitybeacon1_beta1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> Proximitybeacon { Proximitybeacon { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://proximitybeacon.googleapis.com/".to_string(), _root_url: "https://proximitybeacon.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> Proximitybeacon { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/proximitybeacon1_beta1/src/client.rs b/gen/proximitybeacon1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/proximitybeacon1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/proximitybeacon1_beta1/src/lib.rs b/gen/proximitybeacon1_beta1/src/lib.rs index d19b6e6b0f..0396288e55 100644 --- a/gen/proximitybeacon1_beta1/src/lib.rs +++ b/gen/proximitybeacon1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *proximitybeacon* crate version *5.0.2-beta-1+20200127*, where *20200127* is the exact revision of the *proximitybeacon:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *proximitybeacon* crate version *5.0.2+20200127*, where *20200127* is the exact revision of the *proximitybeacon:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *proximitybeacon* *v1_beta1* API can be found at the //! [official documentation site](https://developers.google.com/beacons/proximity/). diff --git a/gen/pubsub1-cli/Cargo.toml b/gen/pubsub1-cli/Cargo.toml index ba50c7ec80..cae7fa395c 100644 --- a/gen/pubsub1-cli/Cargo.toml +++ b/gen/pubsub1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-pubsub1-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Pubsub (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pubsub1-cli" @@ -20,13 +20,13 @@ name = "pubsub1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-pubsub1] path = "../pubsub1" -version = "4.0.1+20220221" +version = "5.0.2+20230119" + diff --git a/gen/pubsub1-cli/README.md b/gen/pubsub1-cli/README.md index e86a48df87..50fc22db00 100644 --- a/gen/pubsub1-cli/README.md +++ b/gen/pubsub1-cli/README.md @@ -25,16 +25,20 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Pubsub* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *Pubsub* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash pubsub1 [options] projects + schemas-commit (-r )... [-p ]... [-o ] schemas-create (-r )... [-p ]... [-o ] schemas-delete [-p ]... [-o ] + schemas-delete-revision [-p ]... [-o ] schemas-get [-p ]... [-o ] schemas-get-iam-policy [-p ]... [-o ] schemas-list [-p ]... [-o ] + schemas-list-revisions [-p ]... [-o ] + schemas-rollback (-r )... [-p ]... [-o ] schemas-set-iam-policy (-r )... [-p ]... [-o ] schemas-test-iam-permissions (-r )... [-p ]... [-o ] schemas-validate (-r )... [-p ]... [-o ] diff --git a/gen/pubsub1-cli/mkdocs.yml b/gen/pubsub1-cli/mkdocs.yml index 97d75d6a1b..27cba0a009 100644 --- a/gen/pubsub1-cli/mkdocs.yml +++ b/gen/pubsub1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Pubsub v4.0.1+20220221 +site_name: Pubsub v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-pubsub1-cli site_description: A complete library to interact with Pubsub (protocol v1) @@ -7,50 +7,55 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/pubsub1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_schemas-create.md', 'Projects', 'Schemas Create'] -- ['projects_schemas-delete.md', 'Projects', 'Schemas Delete'] -- ['projects_schemas-get.md', 'Projects', 'Schemas Get'] -- ['projects_schemas-get-iam-policy.md', 'Projects', 'Schemas Get Iam Policy'] -- ['projects_schemas-list.md', 'Projects', 'Schemas List'] -- ['projects_schemas-set-iam-policy.md', 'Projects', 'Schemas Set Iam Policy'] -- ['projects_schemas-test-iam-permissions.md', 'Projects', 'Schemas Test Iam Permissions'] -- ['projects_schemas-validate.md', 'Projects', 'Schemas Validate'] -- ['projects_schemas-validate-message.md', 'Projects', 'Schemas Validate Message'] -- ['projects_snapshots-create.md', 'Projects', 'Snapshots Create'] -- ['projects_snapshots-delete.md', 'Projects', 'Snapshots Delete'] -- ['projects_snapshots-get.md', 'Projects', 'Snapshots Get'] -- ['projects_snapshots-get-iam-policy.md', 'Projects', 'Snapshots Get Iam Policy'] -- ['projects_snapshots-list.md', 'Projects', 'Snapshots List'] -- ['projects_snapshots-patch.md', 'Projects', 'Snapshots Patch'] -- ['projects_snapshots-set-iam-policy.md', 'Projects', 'Snapshots Set Iam Policy'] -- ['projects_snapshots-test-iam-permissions.md', 'Projects', 'Snapshots Test Iam Permissions'] -- ['projects_subscriptions-acknowledge.md', 'Projects', 'Subscriptions Acknowledge'] -- ['projects_subscriptions-create.md', 'Projects', 'Subscriptions Create'] -- ['projects_subscriptions-delete.md', 'Projects', 'Subscriptions Delete'] -- ['projects_subscriptions-detach.md', 'Projects', 'Subscriptions Detach'] -- ['projects_subscriptions-get.md', 'Projects', 'Subscriptions Get'] -- ['projects_subscriptions-get-iam-policy.md', 'Projects', 'Subscriptions Get Iam Policy'] -- ['projects_subscriptions-list.md', 'Projects', 'Subscriptions List'] -- ['projects_subscriptions-modify-ack-deadline.md', 'Projects', 'Subscriptions Modify Ack Deadline'] -- ['projects_subscriptions-modify-push-config.md', 'Projects', 'Subscriptions Modify Push Config'] -- ['projects_subscriptions-patch.md', 'Projects', 'Subscriptions Patch'] -- ['projects_subscriptions-pull.md', 'Projects', 'Subscriptions Pull'] -- ['projects_subscriptions-seek.md', 'Projects', 'Subscriptions Seek'] -- ['projects_subscriptions-set-iam-policy.md', 'Projects', 'Subscriptions Set Iam Policy'] -- ['projects_subscriptions-test-iam-permissions.md', 'Projects', 'Subscriptions Test Iam Permissions'] -- ['projects_topics-create.md', 'Projects', 'Topics Create'] -- ['projects_topics-delete.md', 'Projects', 'Topics Delete'] -- ['projects_topics-get.md', 'Projects', 'Topics Get'] -- ['projects_topics-get-iam-policy.md', 'Projects', 'Topics Get Iam Policy'] -- ['projects_topics-list.md', 'Projects', 'Topics List'] -- ['projects_topics-patch.md', 'Projects', 'Topics Patch'] -- ['projects_topics-publish.md', 'Projects', 'Topics Publish'] -- ['projects_topics-set-iam-policy.md', 'Projects', 'Topics Set Iam Policy'] -- ['projects_topics-snapshots-list.md', 'Projects', 'Topics Snapshots List'] -- ['projects_topics-subscriptions-list.md', 'Projects', 'Topics Subscriptions List'] -- ['projects_topics-test-iam-permissions.md', 'Projects', 'Topics Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Schemas Commit': 'projects_schemas-commit.md' + - 'Schemas Create': 'projects_schemas-create.md' + - 'Schemas Delete': 'projects_schemas-delete.md' + - 'Schemas Delete Revision': 'projects_schemas-delete-revision.md' + - 'Schemas Get': 'projects_schemas-get.md' + - 'Schemas Get Iam Policy': 'projects_schemas-get-iam-policy.md' + - 'Schemas List': 'projects_schemas-list.md' + - 'Schemas List Revisions': 'projects_schemas-list-revisions.md' + - 'Schemas Rollback': 'projects_schemas-rollback.md' + - 'Schemas Set Iam Policy': 'projects_schemas-set-iam-policy.md' + - 'Schemas Test Iam Permissions': 'projects_schemas-test-iam-permissions.md' + - 'Schemas Validate': 'projects_schemas-validate.md' + - 'Schemas Validate Message': 'projects_schemas-validate-message.md' + - 'Snapshots Create': 'projects_snapshots-create.md' + - 'Snapshots Delete': 'projects_snapshots-delete.md' + - 'Snapshots Get': 'projects_snapshots-get.md' + - 'Snapshots Get Iam Policy': 'projects_snapshots-get-iam-policy.md' + - 'Snapshots List': 'projects_snapshots-list.md' + - 'Snapshots Patch': 'projects_snapshots-patch.md' + - 'Snapshots Set Iam Policy': 'projects_snapshots-set-iam-policy.md' + - 'Snapshots Test Iam Permissions': 'projects_snapshots-test-iam-permissions.md' + - 'Subscriptions Acknowledge': 'projects_subscriptions-acknowledge.md' + - 'Subscriptions Create': 'projects_subscriptions-create.md' + - 'Subscriptions Delete': 'projects_subscriptions-delete.md' + - 'Subscriptions Detach': 'projects_subscriptions-detach.md' + - 'Subscriptions Get': 'projects_subscriptions-get.md' + - 'Subscriptions Get Iam Policy': 'projects_subscriptions-get-iam-policy.md' + - 'Subscriptions List': 'projects_subscriptions-list.md' + - 'Subscriptions Modify Ack Deadline': 'projects_subscriptions-modify-ack-deadline.md' + - 'Subscriptions Modify Push Config': 'projects_subscriptions-modify-push-config.md' + - 'Subscriptions Patch': 'projects_subscriptions-patch.md' + - 'Subscriptions Pull': 'projects_subscriptions-pull.md' + - 'Subscriptions Seek': 'projects_subscriptions-seek.md' + - 'Subscriptions Set Iam Policy': 'projects_subscriptions-set-iam-policy.md' + - 'Subscriptions Test Iam Permissions': 'projects_subscriptions-test-iam-permissions.md' + - 'Topics Create': 'projects_topics-create.md' + - 'Topics Delete': 'projects_topics-delete.md' + - 'Topics Get': 'projects_topics-get.md' + - 'Topics Get Iam Policy': 'projects_topics-get-iam-policy.md' + - 'Topics List': 'projects_topics-list.md' + - 'Topics Patch': 'projects_topics-patch.md' + - 'Topics Publish': 'projects_topics-publish.md' + - 'Topics Set Iam Policy': 'projects_topics-set-iam-policy.md' + - 'Topics Snapshots List': 'projects_topics-snapshots-list.md' + - 'Topics Subscriptions List': 'projects_topics-subscriptions-list.md' + - 'Topics Test Iam Permissions': 'projects_topics-test-iam-permissions.md' theme: readthedocs diff --git a/gen/pubsub1-cli/src/client.rs b/gen/pubsub1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/pubsub1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/pubsub1-cli/src/main.rs b/gen/pubsub1-cli/src/main.rs index e0a3e059b2..ec18c606ce 100644 --- a/gen/pubsub1-cli/src/main.rs +++ b/gen/pubsub1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_pubsub1::{api, Error, oauth2}; +use google_pubsub1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,95 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_schemas_commit(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "schema.definition" => Some(("schema.definition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.name" => Some(("schema.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.revision-create-time" => Some(("schema.revisionCreateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.revision-id" => Some(("schema.revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.type" => Some(("schema.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["definition", "name", "revision-create-time", "revision-id", "schema", "type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CommitSchemaRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().schemas_commit(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_schemas_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -76,9 +164,11 @@ where match &temp_cursor.to_string()[..] { "definition" => Some(("definition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-create-time" => Some(("revisionCreateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["definition", "name", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["definition", "name", "revision-create-time", "revision-id", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -194,6 +284,62 @@ where } } + async fn _projects_schemas_delete_revision(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().schemas_delete_revision(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "revision-id" => { + call = call.revision_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["revision-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_schemas_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().schemas_get(opt.value_of("name").unwrap_or("")); @@ -257,7 +403,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -319,7 +465,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -368,6 +514,153 @@ where } } + async fn _projects_schemas_list_revisions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().schemas_list_revisions(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "view" => { + call = call.view(value.unwrap_or("")); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "view"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_schemas_rollback(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["revision-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::RollbackSchemaRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().schemas_rollback(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_schemas_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -564,9 +857,11 @@ where match &temp_cursor.to_string()[..] { "schema.definition" => Some(("schema.definition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "schema.name" => Some(("schema.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.revision-create-time" => Some(("schema.revisionCreateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.revision-id" => Some(("schema.revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "schema.type" => Some(("schema.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["definition", "name", "schema", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["definition", "name", "revision-create-time", "revision-id", "schema", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -654,9 +949,11 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "schema.definition" => Some(("schema.definition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "schema.name" => Some(("schema.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.revision-create-time" => Some(("schema.revisionCreateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema.revision-id" => Some(("schema.revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "schema.type" => Some(("schema.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["definition", "encoding", "message", "name", "schema", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["definition", "encoding", "message", "name", "revision-create-time", "revision-id", "schema", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -913,7 +1210,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -972,7 +1269,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1390,6 +1687,11 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "ack-deadline-seconds" => Some(("ackDeadlineSeconds", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "bigquery-config.drop-unknown-fields" => Some(("bigqueryConfig.dropUnknownFields", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "bigquery-config.state" => Some(("bigqueryConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "bigquery-config.table" => Some(("bigqueryConfig.table", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "bigquery-config.use-topic-schema" => Some(("bigqueryConfig.useTopicSchema", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "bigquery-config.write-metadata" => Some(("bigqueryConfig.writeMetadata", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "dead-letter-policy.dead-letter-topic" => Some(("deadLetterPolicy.deadLetterTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "dead-letter-policy.max-delivery-attempts" => Some(("deadLetterPolicy.maxDeliveryAttempts", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "detached" => Some(("detached", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1411,7 +1713,7 @@ where "topic" => Some(("topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "topic-message-retention-duration" => Some(("topicMessageRetentionDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ack-deadline-seconds", "attributes", "audience", "dead-letter-policy", "dead-letter-topic", "detached", "enable-exactly-once-delivery", "enable-message-ordering", "expiration-policy", "filter", "labels", "max-delivery-attempts", "maximum-backoff", "message-retention-duration", "minimum-backoff", "name", "oidc-token", "push-config", "push-endpoint", "retain-acked-messages", "retry-policy", "service-account-email", "state", "topic", "topic-message-retention-duration", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ack-deadline-seconds", "attributes", "audience", "bigquery-config", "dead-letter-policy", "dead-letter-topic", "detached", "drop-unknown-fields", "enable-exactly-once-delivery", "enable-message-ordering", "expiration-policy", "filter", "labels", "max-delivery-attempts", "maximum-backoff", "message-retention-duration", "minimum-backoff", "name", "oidc-token", "push-config", "push-endpoint", "retain-acked-messages", "retry-policy", "service-account-email", "state", "table", "topic", "topic-message-retention-duration", "ttl", "use-topic-schema", "write-metadata"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1634,7 +1936,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1693,7 +1995,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1940,6 +2242,11 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "subscription.ack-deadline-seconds" => Some(("subscription.ackDeadlineSeconds", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "subscription.bigquery-config.drop-unknown-fields" => Some(("subscription.bigqueryConfig.dropUnknownFields", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "subscription.bigquery-config.state" => Some(("subscription.bigqueryConfig.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "subscription.bigquery-config.table" => Some(("subscription.bigqueryConfig.table", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "subscription.bigquery-config.use-topic-schema" => Some(("subscription.bigqueryConfig.useTopicSchema", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "subscription.bigquery-config.write-metadata" => Some(("subscription.bigqueryConfig.writeMetadata", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "subscription.dead-letter-policy.dead-letter-topic" => Some(("subscription.deadLetterPolicy.deadLetterTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "subscription.dead-letter-policy.max-delivery-attempts" => Some(("subscription.deadLetterPolicy.maxDeliveryAttempts", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "subscription.detached" => Some(("subscription.detached", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1962,7 +2269,7 @@ where "subscription.topic-message-retention-duration" => Some(("subscription.topicMessageRetentionDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["ack-deadline-seconds", "attributes", "audience", "dead-letter-policy", "dead-letter-topic", "detached", "enable-exactly-once-delivery", "enable-message-ordering", "expiration-policy", "filter", "labels", "max-delivery-attempts", "maximum-backoff", "message-retention-duration", "minimum-backoff", "name", "oidc-token", "push-config", "push-endpoint", "retain-acked-messages", "retry-policy", "service-account-email", "state", "subscription", "topic", "topic-message-retention-duration", "ttl", "update-mask"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["ack-deadline-seconds", "attributes", "audience", "bigquery-config", "dead-letter-policy", "dead-letter-topic", "detached", "drop-unknown-fields", "enable-exactly-once-delivery", "enable-message-ordering", "expiration-policy", "filter", "labels", "max-delivery-attempts", "maximum-backoff", "message-retention-duration", "minimum-backoff", "name", "oidc-token", "push-config", "push-endpoint", "retain-acked-messages", "retry-policy", "service-account-email", "state", "subscription", "table", "topic", "topic-message-retention-duration", "ttl", "update-mask", "use-topic-schema", "write-metadata"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2395,9 +2702,11 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "satisfies-pzs" => Some(("satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "schema-settings.encoding" => Some(("schemaSettings.encoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema-settings.first-revision-id" => Some(("schemaSettings.firstRevisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "schema-settings.last-revision-id" => Some(("schemaSettings.lastRevisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "schema-settings.schema" => Some(("schemaSettings.schema", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-persistence-regions", "encoding", "kms-key-name", "labels", "message-retention-duration", "message-storage-policy", "name", "satisfies-pzs", "schema", "schema-settings"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-persistence-regions", "encoding", "first-revision-id", "kms-key-name", "labels", "last-revision-id", "message-retention-duration", "message-storage-policy", "name", "satisfies-pzs", "schema", "schema-settings"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2568,7 +2877,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2627,7 +2936,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2706,10 +3015,12 @@ where "topic.name" => Some(("topic.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "topic.satisfies-pzs" => Some(("topic.satisfiesPzs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "topic.schema-settings.encoding" => Some(("topic.schemaSettings.encoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "topic.schema-settings.first-revision-id" => Some(("topic.schemaSettings.firstRevisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "topic.schema-settings.last-revision-id" => Some(("topic.schemaSettings.lastRevisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "topic.schema-settings.schema" => Some(("topic.schemaSettings.schema", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-persistence-regions", "encoding", "kms-key-name", "labels", "message-retention-duration", "message-storage-policy", "name", "satisfies-pzs", "schema", "schema-settings", "topic", "update-mask"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-persistence-regions", "encoding", "first-revision-id", "kms-key-name", "labels", "last-revision-id", "message-retention-duration", "message-storage-policy", "name", "satisfies-pzs", "schema", "schema-settings", "topic", "update-mask"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2949,7 +3260,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3008,7 +3319,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3149,12 +3460,18 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("schemas-commit", Some(opt)) => { + call_result = self._projects_schemas_commit(opt, dry_run, &mut err).await; + }, ("schemas-create", Some(opt)) => { call_result = self._projects_schemas_create(opt, dry_run, &mut err).await; }, ("schemas-delete", Some(opt)) => { call_result = self._projects_schemas_delete(opt, dry_run, &mut err).await; }, + ("schemas-delete-revision", Some(opt)) => { + call_result = self._projects_schemas_delete_revision(opt, dry_run, &mut err).await; + }, ("schemas-get", Some(opt)) => { call_result = self._projects_schemas_get(opt, dry_run, &mut err).await; }, @@ -3164,6 +3481,12 @@ where ("schemas-list", Some(opt)) => { call_result = self._projects_schemas_list(opt, dry_run, &mut err).await; }, + ("schemas-list-revisions", Some(opt)) => { + call_result = self._projects_schemas_list_revisions(opt, dry_run, &mut err).await; + }, + ("schemas-rollback", Some(opt)) => { + call_result = self._projects_schemas_rollback(opt, dry_run, &mut err).await; + }, ("schemas-set-iam-policy", Some(opt)) => { call_result = self._projects_schemas_set_iam_policy(opt, dry_run, &mut err).await; }, @@ -3354,7 +3677,35 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'schemas-create', 'schemas-delete', 'schemas-get', 'schemas-get-iam-policy', 'schemas-list', 'schemas-set-iam-policy', 'schemas-test-iam-permissions', 'schemas-validate', 'schemas-validate-message', 'snapshots-create', 'snapshots-delete', 'snapshots-get', 'snapshots-get-iam-policy', 'snapshots-list', 'snapshots-patch', 'snapshots-set-iam-policy', 'snapshots-test-iam-permissions', 'subscriptions-acknowledge', 'subscriptions-create', 'subscriptions-delete', 'subscriptions-detach', 'subscriptions-get', 'subscriptions-get-iam-policy', 'subscriptions-list', 'subscriptions-modify-ack-deadline', 'subscriptions-modify-push-config', 'subscriptions-patch', 'subscriptions-pull', 'subscriptions-seek', 'subscriptions-set-iam-policy', 'subscriptions-test-iam-permissions', 'topics-create', 'topics-delete', 'topics-get', 'topics-get-iam-policy', 'topics-list', 'topics-patch', 'topics-publish', 'topics-set-iam-policy', 'topics-snapshots-list', 'topics-subscriptions-list' and 'topics-test-iam-permissions'", vec![ + ("projects", "methods: 'schemas-commit', 'schemas-create', 'schemas-delete', 'schemas-delete-revision', 'schemas-get', 'schemas-get-iam-policy', 'schemas-list', 'schemas-list-revisions', 'schemas-rollback', 'schemas-set-iam-policy', 'schemas-test-iam-permissions', 'schemas-validate', 'schemas-validate-message', 'snapshots-create', 'snapshots-delete', 'snapshots-get', 'snapshots-get-iam-policy', 'snapshots-list', 'snapshots-patch', 'snapshots-set-iam-policy', 'snapshots-test-iam-permissions', 'subscriptions-acknowledge', 'subscriptions-create', 'subscriptions-delete', 'subscriptions-detach', 'subscriptions-get', 'subscriptions-get-iam-policy', 'subscriptions-list', 'subscriptions-modify-ack-deadline', 'subscriptions-modify-push-config', 'subscriptions-patch', 'subscriptions-pull', 'subscriptions-seek', 'subscriptions-set-iam-policy', 'subscriptions-test-iam-permissions', 'topics-create', 'topics-delete', 'topics-get', 'topics-get-iam-policy', 'topics-list', 'topics-patch', 'topics-publish', 'topics-set-iam-policy', 'topics-snapshots-list', 'topics-subscriptions-list' and 'topics-test-iam-permissions'", vec![ + ("schemas-commit", + Some(r##"Commits a new schema revision to an existing schema."##), + "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_schemas-commit", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the schema we are revising. Format is `projects/{project}/schemas/{schema}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("schemas-create", Some(r##"Creates a schema."##), "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_schemas-create", @@ -3399,6 +3750,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("schemas-delete-revision", + Some(r##"Deletes a specific schema revision."##), + "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_schemas-delete-revision", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the schema revision to be deleted, with a revision ID explicitly included. Example: projects/123/schemas/my-schema@c7cfa2a8"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3433,7 +3806,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3465,6 +3838,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("schemas-list-revisions", + Some(r##"Lists all schema revisions for the named schema."##), + "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_schemas-list-revisions", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the schema to list revisions for."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("schemas-rollback", + Some(r##"Creates a new schema revision that is a copy of the provided revision_id."##), + "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_schemas-rollback", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The schema being rolled back with revision id."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3477,7 +3900,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3505,7 +3928,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3589,7 +4012,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. User-provided name for this snapshot. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription. Note that for REST API requests, you must specify a name. See the resource name rules. Format is `projects/{project}/snapshots/{snap}`."##), + Some(r##"Required. User-provided name for this snapshot. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription. Note that for REST API requests, you must specify a name. See the [resource name rules](https://cloud.google.com/pubsub/docs/admin#resource_names). Format is `projects/{project}/snapshots/{snap}`."##), Some(true), Some(false)), @@ -3634,7 +4057,7 @@ async fn main() { Some(false)), ]), ("snapshots-get", - Some(r##"Gets the configuration details of a snapshot. Snapshots are used in Seek operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot."##), + Some(r##"Gets the configuration details of a snapshot. Snapshots are used in [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot."##), "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_snapshots-get", vec![ (Some(r##"snapshot"##), @@ -3661,7 +4084,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3700,7 +4123,7 @@ async fn main() { Some(false)), ]), ("snapshots-patch", - Some(r##"Updates an existing snapshot. Snapshots are used in Seek operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot."##), + Some(r##"Updates an existing snapshot. Snapshots are used in [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot."##), "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_snapshots-patch", vec![ (Some(r##"name"##), @@ -3733,7 +4156,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3761,7 +4184,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -3911,7 +4334,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4034,7 +4457,7 @@ async fn main() { Some(false)), ]), ("subscriptions-pull", - Some(r##"Pulls messages from the server. The server may return `UNAVAILABLE` if there are too many concurrent pull requests pending for the given subscription."##), + Some(r##"Pulls messages from the server."##), "Details at http://byron.github.io/google-apis-rs/google_pubsub1_cli/projects_subscriptions-pull", vec![ (Some(r##"subscription"##), @@ -4095,7 +4518,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4123,7 +4546,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4223,7 +4646,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4323,7 +4746,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4395,7 +4818,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -4423,7 +4846,7 @@ async fn main() { let mut app = App::new("pubsub1") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230119") .about("Provides reliable, many-to-many, asynchronous messaging between applications. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_pubsub1_cli") .arg(Arg::with_name("url") diff --git a/gen/pubsub1/Cargo.toml b/gen/pubsub1/Cargo.toml index 84de793eaa..70022fda95 100644 --- a/gen/pubsub1/Cargo.toml +++ b/gen/pubsub1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-pubsub1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Pubsub (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pubsub1" homepage = "https://cloud.google.com/pubsub/docs" -documentation = "https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-pubsub1/5.0.2+20230119" license = "MIT" keywords = ["pubsub", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/pubsub1/README.md b/gen/pubsub1/README.md index 0ea1ccc0ac..4c1c48bb74 100644 --- a/gen/pubsub1/README.md +++ b/gen/pubsub1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-pubsub1` library allows access to all features of the *Google Pubsub* service. -This documentation was generated from *Pubsub* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *pubsub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Pubsub* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *pubsub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Pubsub* *v1* API can be found at the [official documentation site](https://cloud.google.com/pubsub/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/Pubsub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/Pubsub) ... * projects - * [*schemas commit*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaCommitCall), [*schemas create*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaCreateCall), [*schemas delete*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaDeleteCall), [*schemas delete revision*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaDeleteRevisionCall), [*schemas get*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaGetCall), [*schemas get iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaGetIamPolicyCall), [*schemas list*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaListCall), [*schemas list revisions*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaListRevisionCall), [*schemas rollback*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaRollbackCall), [*schemas set iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaSetIamPolicyCall), [*schemas test iam permissions*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaTestIamPermissionCall), [*schemas validate*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaValidateCall), [*schemas validate message*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSchemaValidateMessageCall), [*snapshots create*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotCreateCall), [*snapshots delete*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotDeleteCall), [*snapshots get*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotGetCall), [*snapshots get iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotGetIamPolicyCall), [*snapshots list*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotListCall), [*snapshots patch*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotPatchCall), [*snapshots set iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotSetIamPolicyCall), [*snapshots test iam permissions*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSnapshotTestIamPermissionCall), [*subscriptions acknowledge*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionAcknowledgeCall), [*subscriptions create*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionCreateCall), [*subscriptions delete*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionDeleteCall), [*subscriptions detach*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionDetachCall), [*subscriptions get*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionGetCall), [*subscriptions get iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionGetIamPolicyCall), [*subscriptions list*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionListCall), [*subscriptions modify ack deadline*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionModifyAckDeadlineCall), [*subscriptions modify push config*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionModifyPushConfigCall), [*subscriptions patch*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionPatchCall), [*subscriptions pull*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionPullCall), [*subscriptions seek*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionSeekCall), [*subscriptions set iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionSetIamPolicyCall), [*subscriptions test iam permissions*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectSubscriptionTestIamPermissionCall), [*topics create*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicCreateCall), [*topics delete*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicDeleteCall), [*topics get*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicGetCall), [*topics get iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicGetIamPolicyCall), [*topics list*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicListCall), [*topics patch*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicPatchCall), [*topics publish*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicPublishCall), [*topics set iam policy*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicSetIamPolicyCall), [*topics snapshots list*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicSnapshotListCall), [*topics subscriptions list*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicSubscriptionListCall) and [*topics test iam permissions*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/api::ProjectTopicTestIamPermissionCall) + * [*schemas commit*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaCommitCall), [*schemas create*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaCreateCall), [*schemas delete*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaDeleteCall), [*schemas delete revision*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaDeleteRevisionCall), [*schemas get*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaGetCall), [*schemas get iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaGetIamPolicyCall), [*schemas list*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaListCall), [*schemas list revisions*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaListRevisionCall), [*schemas rollback*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaRollbackCall), [*schemas set iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaSetIamPolicyCall), [*schemas test iam permissions*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaTestIamPermissionCall), [*schemas validate*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaValidateCall), [*schemas validate message*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSchemaValidateMessageCall), [*snapshots create*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotCreateCall), [*snapshots delete*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotDeleteCall), [*snapshots get*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotGetCall), [*snapshots get iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotGetIamPolicyCall), [*snapshots list*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotListCall), [*snapshots patch*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotPatchCall), [*snapshots set iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotSetIamPolicyCall), [*snapshots test iam permissions*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSnapshotTestIamPermissionCall), [*subscriptions acknowledge*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionAcknowledgeCall), [*subscriptions create*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionCreateCall), [*subscriptions delete*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionDeleteCall), [*subscriptions detach*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionDetachCall), [*subscriptions get*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionGetCall), [*subscriptions get iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionGetIamPolicyCall), [*subscriptions list*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionListCall), [*subscriptions modify ack deadline*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionModifyAckDeadlineCall), [*subscriptions modify push config*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionModifyPushConfigCall), [*subscriptions patch*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionPatchCall), [*subscriptions pull*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionPullCall), [*subscriptions seek*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionSeekCall), [*subscriptions set iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionSetIamPolicyCall), [*subscriptions test iam permissions*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectSubscriptionTestIamPermissionCall), [*topics create*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicCreateCall), [*topics delete*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicDeleteCall), [*topics get*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicGetCall), [*topics get iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicGetIamPolicyCall), [*topics list*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicListCall), [*topics patch*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicPatchCall), [*topics publish*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicPublishCall), [*topics set iam policy*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicSetIamPolicyCall), [*topics snapshots list*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicSnapshotListCall), [*topics subscriptions list*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicSubscriptionListCall) and [*topics test iam permissions*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/api::ProjectTopicTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/Pubsub)** +* **[Hub](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/Pubsub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::CallBuilder) -* **[Resources](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::CallBuilder) +* **[Resources](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Part)** + * **[Parts](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Delegate) to the -[Method Builder](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Delegate) to the +[Method Builder](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::RequestValue) and -[decodable](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::RequestValue) and +[decodable](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-pubsub1/5.0.2-beta-1+20230119/google_pubsub1/client::RequestValue) are moved +* [request values](https://docs.rs/google-pubsub1/5.0.2+20230119/google_pubsub1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/pubsub1/src/api.rs b/gen/pubsub1/src/api.rs index 77e2be54bb..735e2731a6 100644 --- a/gen/pubsub1/src/api.rs +++ b/gen/pubsub1/src/api.rs @@ -124,7 +124,7 @@ impl<'a, S> Pubsub { Pubsub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://pubsub.googleapis.com/".to_string(), _root_url: "https://pubsub.googleapis.com/".to_string(), } @@ -135,7 +135,7 @@ impl<'a, S> Pubsub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/pubsub1/src/client.rs b/gen/pubsub1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/pubsub1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/pubsub1/src/lib.rs b/gen/pubsub1/src/lib.rs index e80c036451..1501dcd23d 100644 --- a/gen/pubsub1/src/lib.rs +++ b/gen/pubsub1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Pubsub* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *pubsub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Pubsub* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *pubsub:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Pubsub* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/pubsub/docs). diff --git a/gen/pubsub1_beta2-cli/Cargo.toml b/gen/pubsub1_beta2-cli/Cargo.toml index f2c9836d1d..a400692dba 100644 --- a/gen/pubsub1_beta2-cli/Cargo.toml +++ b/gen/pubsub1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-pubsub1_beta2-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Pubsub (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pubsub1_beta2-cli" @@ -20,13 +20,13 @@ name = "pubsub1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-pubsub1_beta2] path = "../pubsub1_beta2" -version = "4.0.1+20220221" +version = "5.0.2+20230119" + diff --git a/gen/pubsub1_beta2-cli/README.md b/gen/pubsub1_beta2-cli/README.md index ad4197ae4d..e20b1aba1a 100644 --- a/gen/pubsub1_beta2-cli/README.md +++ b/gen/pubsub1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Pubsub* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *Pubsub* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash pubsub1-beta2 [options] diff --git a/gen/pubsub1_beta2-cli/mkdocs.yml b/gen/pubsub1_beta2-cli/mkdocs.yml index d2365ede95..2ab0ed52bc 100644 --- a/gen/pubsub1_beta2-cli/mkdocs.yml +++ b/gen/pubsub1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Pubsub v4.0.1+20220221 +site_name: Pubsub v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-pubsub1_beta2-cli site_description: A complete library to interact with Pubsub (protocol v1beta2) @@ -7,28 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/pubsub1_beta2-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_subscriptions-acknowledge.md', 'Projects', 'Subscriptions Acknowledge'] -- ['projects_subscriptions-create.md', 'Projects', 'Subscriptions Create'] -- ['projects_subscriptions-delete.md', 'Projects', 'Subscriptions Delete'] -- ['projects_subscriptions-get.md', 'Projects', 'Subscriptions Get'] -- ['projects_subscriptions-get-iam-policy.md', 'Projects', 'Subscriptions Get Iam Policy'] -- ['projects_subscriptions-list.md', 'Projects', 'Subscriptions List'] -- ['projects_subscriptions-modify-ack-deadline.md', 'Projects', 'Subscriptions Modify Ack Deadline'] -- ['projects_subscriptions-modify-push-config.md', 'Projects', 'Subscriptions Modify Push Config'] -- ['projects_subscriptions-pull.md', 'Projects', 'Subscriptions Pull'] -- ['projects_subscriptions-set-iam-policy.md', 'Projects', 'Subscriptions Set Iam Policy'] -- ['projects_subscriptions-test-iam-permissions.md', 'Projects', 'Subscriptions Test Iam Permissions'] -- ['projects_topics-create.md', 'Projects', 'Topics Create'] -- ['projects_topics-delete.md', 'Projects', 'Topics Delete'] -- ['projects_topics-get.md', 'Projects', 'Topics Get'] -- ['projects_topics-get-iam-policy.md', 'Projects', 'Topics Get Iam Policy'] -- ['projects_topics-list.md', 'Projects', 'Topics List'] -- ['projects_topics-publish.md', 'Projects', 'Topics Publish'] -- ['projects_topics-set-iam-policy.md', 'Projects', 'Topics Set Iam Policy'] -- ['projects_topics-subscriptions-list.md', 'Projects', 'Topics Subscriptions List'] -- ['projects_topics-test-iam-permissions.md', 'Projects', 'Topics Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Subscriptions Acknowledge': 'projects_subscriptions-acknowledge.md' + - 'Subscriptions Create': 'projects_subscriptions-create.md' + - 'Subscriptions Delete': 'projects_subscriptions-delete.md' + - 'Subscriptions Get': 'projects_subscriptions-get.md' + - 'Subscriptions Get Iam Policy': 'projects_subscriptions-get-iam-policy.md' + - 'Subscriptions List': 'projects_subscriptions-list.md' + - 'Subscriptions Modify Ack Deadline': 'projects_subscriptions-modify-ack-deadline.md' + - 'Subscriptions Modify Push Config': 'projects_subscriptions-modify-push-config.md' + - 'Subscriptions Pull': 'projects_subscriptions-pull.md' + - 'Subscriptions Set Iam Policy': 'projects_subscriptions-set-iam-policy.md' + - 'Subscriptions Test Iam Permissions': 'projects_subscriptions-test-iam-permissions.md' + - 'Topics Create': 'projects_topics-create.md' + - 'Topics Delete': 'projects_topics-delete.md' + - 'Topics Get': 'projects_topics-get.md' + - 'Topics Get Iam Policy': 'projects_topics-get-iam-policy.md' + - 'Topics List': 'projects_topics-list.md' + - 'Topics Publish': 'projects_topics-publish.md' + - 'Topics Set Iam Policy': 'projects_topics-set-iam-policy.md' + - 'Topics Subscriptions List': 'projects_topics-subscriptions-list.md' + - 'Topics Test Iam Permissions': 'projects_topics-test-iam-permissions.md' theme: readthedocs diff --git a/gen/pubsub1_beta2-cli/src/client.rs b/gen/pubsub1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/pubsub1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/pubsub1_beta2-cli/src/main.rs b/gen/pubsub1_beta2-cli/src/main.rs index d97a94de6a..6b8d5d8efc 100644 --- a/gen/pubsub1_beta2-cli/src/main.rs +++ b/gen/pubsub1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_pubsub1_beta2::{api, Error, oauth2}; +use google_pubsub1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -338,7 +337,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -397,7 +396,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1074,7 +1073,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1133,7 +1132,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1362,7 +1361,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1749,7 +1748,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1877,7 +1876,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1905,7 +1904,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2005,7 +2004,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2077,7 +2076,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2127,7 +2126,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2155,7 +2154,7 @@ async fn main() { let mut app = App::new("pubsub1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230119") .about("Provides reliable, many-to-many, asynchronous messaging between applications. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_pubsub1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/pubsub1_beta2/Cargo.toml b/gen/pubsub1_beta2/Cargo.toml index 2055316ac7..e392194a26 100644 --- a/gen/pubsub1_beta2/Cargo.toml +++ b/gen/pubsub1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-pubsub1_beta2" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Pubsub (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pubsub1_beta2" homepage = "https://cloud.google.com/pubsub/docs" -documentation = "https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-pubsub1_beta2/5.0.2+20230119" license = "MIT" keywords = ["pubsub", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/pubsub1_beta2/README.md b/gen/pubsub1_beta2/README.md index 0c160d576d..8f512ad830 100644 --- a/gen/pubsub1_beta2/README.md +++ b/gen/pubsub1_beta2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-pubsub1_beta2` library allows access to all features of the *Google Pubsub* service. -This documentation was generated from *Pubsub* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *pubsub:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Pubsub* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *pubsub:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Pubsub* *v1_beta2* API can be found at the [official documentation site](https://cloud.google.com/pubsub/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/Pubsub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/Pubsub) ... * projects - * [*subscriptions acknowledge*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionAcknowledgeCall), [*subscriptions create*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionCreateCall), [*subscriptions delete*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionDeleteCall), [*subscriptions get*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionGetCall), [*subscriptions get iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionGetIamPolicyCall), [*subscriptions list*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionListCall), [*subscriptions modify ack deadline*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionModifyAckDeadlineCall), [*subscriptions modify push config*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionModifyPushConfigCall), [*subscriptions pull*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionPullCall), [*subscriptions set iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionSetIamPolicyCall), [*subscriptions test iam permissions*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectSubscriptionTestIamPermissionCall), [*topics create*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicCreateCall), [*topics delete*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicDeleteCall), [*topics get*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicGetCall), [*topics get iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicGetIamPolicyCall), [*topics list*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicListCall), [*topics publish*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicPublishCall), [*topics set iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicSetIamPolicyCall), [*topics subscriptions list*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicSubscriptionListCall) and [*topics test iam permissions*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/api::ProjectTopicTestIamPermissionCall) + * [*subscriptions acknowledge*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionAcknowledgeCall), [*subscriptions create*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionCreateCall), [*subscriptions delete*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionDeleteCall), [*subscriptions get*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionGetCall), [*subscriptions get iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionGetIamPolicyCall), [*subscriptions list*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionListCall), [*subscriptions modify ack deadline*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionModifyAckDeadlineCall), [*subscriptions modify push config*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionModifyPushConfigCall), [*subscriptions pull*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionPullCall), [*subscriptions set iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionSetIamPolicyCall), [*subscriptions test iam permissions*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectSubscriptionTestIamPermissionCall), [*topics create*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicCreateCall), [*topics delete*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicDeleteCall), [*topics get*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicGetCall), [*topics get iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicGetIamPolicyCall), [*topics list*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicListCall), [*topics publish*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicPublishCall), [*topics set iam policy*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicSetIamPolicyCall), [*topics subscriptions list*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicSubscriptionListCall) and [*topics test iam permissions*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/api::ProjectTopicTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/Pubsub)** +* **[Hub](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/Pubsub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-pubsub1_beta2/5.0.2-beta-1+20230119/google_pubsub1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-pubsub1_beta2/5.0.2+20230119/google_pubsub1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/pubsub1_beta2/src/api.rs b/gen/pubsub1_beta2/src/api.rs index a74625ba06..cff42e5dc6 100644 --- a/gen/pubsub1_beta2/src/api.rs +++ b/gen/pubsub1_beta2/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Pubsub { Pubsub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://pubsub.googleapis.com/".to_string(), _root_url: "https://pubsub.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Pubsub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/pubsub1_beta2/src/client.rs b/gen/pubsub1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/pubsub1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/pubsub1_beta2/src/lib.rs b/gen/pubsub1_beta2/src/lib.rs index dc33bedb61..cbd742c117 100644 --- a/gen/pubsub1_beta2/src/lib.rs +++ b/gen/pubsub1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Pubsub* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *pubsub:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Pubsub* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *pubsub:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Pubsub* *v1_beta2* API can be found at the //! [official documentation site](https://cloud.google.com/pubsub/docs). diff --git a/gen/pubsublite1-cli/Cargo.toml b/gen/pubsublite1-cli/Cargo.toml index 57921ad2e0..0caf9fdd58 100644 --- a/gen/pubsublite1-cli/Cargo.toml +++ b/gen/pubsublite1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-pubsublite1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with PubsubLite (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pubsublite1-cli" @@ -20,13 +20,13 @@ name = "pubsublite1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-pubsublite1] path = "../pubsublite1" -version = "4.0.1+20220301" +version = "5.0.2+20230114" + diff --git a/gen/pubsublite1-cli/README.md b/gen/pubsublite1-cli/README.md index d2978736c7..536c2f114d 100644 --- a/gen/pubsublite1-cli/README.md +++ b/gen/pubsublite1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *PubsubLite* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *PubsubLite* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash pubsublite1 [options] diff --git a/gen/pubsublite1-cli/mkdocs.yml b/gen/pubsublite1-cli/mkdocs.yml index 5e5ab11049..916674d8f2 100644 --- a/gen/pubsublite1-cli/mkdocs.yml +++ b/gen/pubsublite1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: PubsubLite v4.0.1+20220301 +site_name: PubsubLite v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-pubsublite1-cli site_description: A complete library to interact with PubsubLite (protocol v1) @@ -7,36 +7,39 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/pubsublite1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['admin_projects-locations-operations-cancel.md', 'Admin', 'Projects Locations Operations Cancel'] -- ['admin_projects-locations-operations-delete.md', 'Admin', 'Projects Locations Operations Delete'] -- ['admin_projects-locations-operations-get.md', 'Admin', 'Projects Locations Operations Get'] -- ['admin_projects-locations-operations-list.md', 'Admin', 'Projects Locations Operations List'] -- ['admin_projects-locations-reservations-create.md', 'Admin', 'Projects Locations Reservations Create'] -- ['admin_projects-locations-reservations-delete.md', 'Admin', 'Projects Locations Reservations Delete'] -- ['admin_projects-locations-reservations-get.md', 'Admin', 'Projects Locations Reservations Get'] -- ['admin_projects-locations-reservations-list.md', 'Admin', 'Projects Locations Reservations List'] -- ['admin_projects-locations-reservations-patch.md', 'Admin', 'Projects Locations Reservations Patch'] -- ['admin_projects-locations-reservations-topics-list.md', 'Admin', 'Projects Locations Reservations Topics List'] -- ['admin_projects-locations-subscriptions-create.md', 'Admin', 'Projects Locations Subscriptions Create'] -- ['admin_projects-locations-subscriptions-delete.md', 'Admin', 'Projects Locations Subscriptions Delete'] -- ['admin_projects-locations-subscriptions-get.md', 'Admin', 'Projects Locations Subscriptions Get'] -- ['admin_projects-locations-subscriptions-list.md', 'Admin', 'Projects Locations Subscriptions List'] -- ['admin_projects-locations-subscriptions-patch.md', 'Admin', 'Projects Locations Subscriptions Patch'] -- ['admin_projects-locations-subscriptions-seek.md', 'Admin', 'Projects Locations Subscriptions Seek'] -- ['admin_projects-locations-topics-create.md', 'Admin', 'Projects Locations Topics Create'] -- ['admin_projects-locations-topics-delete.md', 'Admin', 'Projects Locations Topics Delete'] -- ['admin_projects-locations-topics-get.md', 'Admin', 'Projects Locations Topics Get'] -- ['admin_projects-locations-topics-get-partitions.md', 'Admin', 'Projects Locations Topics Get Partitions'] -- ['admin_projects-locations-topics-list.md', 'Admin', 'Projects Locations Topics List'] -- ['admin_projects-locations-topics-patch.md', 'Admin', 'Projects Locations Topics Patch'] -- ['admin_projects-locations-topics-subscriptions-list.md', 'Admin', 'Projects Locations Topics Subscriptions List'] -- ['cursor_projects-locations-subscriptions-commit-cursor.md', 'Cursor', 'Projects Locations Subscriptions Commit Cursor'] -- ['cursor_projects-locations-subscriptions-cursors-list.md', 'Cursor', 'Projects Locations Subscriptions Cursors List'] -- ['topic-stats_projects-locations-topics-compute-head-cursor.md', 'Topic Stats', 'Projects Locations Topics Compute Head Cursor'] -- ['topic-stats_projects-locations-topics-compute-message-stats.md', 'Topic Stats', 'Projects Locations Topics Compute Message Stats'] -- ['topic-stats_projects-locations-topics-compute-time-cursor.md', 'Topic Stats', 'Projects Locations Topics Compute Time Cursor'] +nav: +- Home: 'index.md' +- 'Admin': + - 'Projects Locations Operations Cancel': 'admin_projects-locations-operations-cancel.md' + - 'Projects Locations Operations Delete': 'admin_projects-locations-operations-delete.md' + - 'Projects Locations Operations Get': 'admin_projects-locations-operations-get.md' + - 'Projects Locations Operations List': 'admin_projects-locations-operations-list.md' + - 'Projects Locations Reservations Create': 'admin_projects-locations-reservations-create.md' + - 'Projects Locations Reservations Delete': 'admin_projects-locations-reservations-delete.md' + - 'Projects Locations Reservations Get': 'admin_projects-locations-reservations-get.md' + - 'Projects Locations Reservations List': 'admin_projects-locations-reservations-list.md' + - 'Projects Locations Reservations Patch': 'admin_projects-locations-reservations-patch.md' + - 'Projects Locations Reservations Topics List': 'admin_projects-locations-reservations-topics-list.md' + - 'Projects Locations Subscriptions Create': 'admin_projects-locations-subscriptions-create.md' + - 'Projects Locations Subscriptions Delete': 'admin_projects-locations-subscriptions-delete.md' + - 'Projects Locations Subscriptions Get': 'admin_projects-locations-subscriptions-get.md' + - 'Projects Locations Subscriptions List': 'admin_projects-locations-subscriptions-list.md' + - 'Projects Locations Subscriptions Patch': 'admin_projects-locations-subscriptions-patch.md' + - 'Projects Locations Subscriptions Seek': 'admin_projects-locations-subscriptions-seek.md' + - 'Projects Locations Topics Create': 'admin_projects-locations-topics-create.md' + - 'Projects Locations Topics Delete': 'admin_projects-locations-topics-delete.md' + - 'Projects Locations Topics Get': 'admin_projects-locations-topics-get.md' + - 'Projects Locations Topics Get Partitions': 'admin_projects-locations-topics-get-partitions.md' + - 'Projects Locations Topics List': 'admin_projects-locations-topics-list.md' + - 'Projects Locations Topics Patch': 'admin_projects-locations-topics-patch.md' + - 'Projects Locations Topics Subscriptions List': 'admin_projects-locations-topics-subscriptions-list.md' +- 'Cursor': + - 'Projects Locations Subscriptions Commit Cursor': 'cursor_projects-locations-subscriptions-commit-cursor.md' + - 'Projects Locations Subscriptions Cursors List': 'cursor_projects-locations-subscriptions-cursors-list.md' +- 'Topic Stats': + - 'Projects Locations Topics Compute Head Cursor': 'topic-stats_projects-locations-topics-compute-head-cursor.md' + - 'Projects Locations Topics Compute Message Stats': 'topic-stats_projects-locations-topics-compute-message-stats.md' + - 'Projects Locations Topics Compute Time Cursor': 'topic-stats_projects-locations-topics-compute-time-cursor.md' theme: readthedocs diff --git a/gen/pubsublite1-cli/src/client.rs b/gen/pubsublite1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/pubsublite1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/pubsublite1-cli/src/main.rs b/gen/pubsublite1-cli/src/main.rs index c488111e16..046cfed1fd 100644 --- a/gen/pubsublite1-cli/src/main.rs +++ b/gen/pubsublite1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_pubsublite1::{api, Error, oauth2}; +use google_pubsublite1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -249,7 +248,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -505,7 +504,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -595,7 +594,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -654,7 +653,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -727,10 +726,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "delivery-config.delivery-requirement" => Some(("deliveryConfig.deliveryRequirement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.current-state" => Some(("exportConfig.currentState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.dead-letter-topic" => Some(("exportConfig.deadLetterTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.desired-state" => Some(("exportConfig.desiredState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.pubsub-config.topic" => Some(("exportConfig.pubsubConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "topic" => Some(("topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["delivery-config", "delivery-requirement", "name", "topic"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["current-state", "dead-letter-topic", "delivery-config", "delivery-requirement", "desired-state", "export-config", "name", "pubsub-config", "topic"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -748,7 +751,7 @@ where call = call.subscription_id(value.unwrap_or("")); }, "skip-backlog" => { - call = call.skip_backlog(arg_from_str(value.unwrap_or("false"), err, "skip-backlog", "boolean")); + call = call.skip_backlog( value.map(|v| arg_from_str(v, err, "skip-backlog", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -911,7 +914,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -984,10 +987,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "delivery-config.delivery-requirement" => Some(("deliveryConfig.deliveryRequirement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.current-state" => Some(("exportConfig.currentState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.dead-letter-topic" => Some(("exportConfig.deadLetterTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.desired-state" => Some(("exportConfig.desiredState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "export-config.pubsub-config.topic" => Some(("exportConfig.pubsubConfig.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "topic" => Some(("topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["delivery-config", "delivery-requirement", "name", "topic"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["current-state", "dead-letter-topic", "delivery-config", "delivery-requirement", "desired-state", "export-config", "name", "pubsub-config", "topic"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1002,7 +1009,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1400,7 +1407,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1496,7 +1503,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1555,7 +1562,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1700,7 +1707,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2895,7 +2902,7 @@ async fn main() { let mut app = App::new("pubsublite1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230114") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_pubsublite1_cli") .arg(Arg::with_name("url") diff --git a/gen/pubsublite1/Cargo.toml b/gen/pubsublite1/Cargo.toml index 1fd67cf591..f69b5d5923 100644 --- a/gen/pubsublite1/Cargo.toml +++ b/gen/pubsublite1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-pubsublite1" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with PubsubLite (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/pubsublite1" homepage = "https://cloud.google.com/pubsub/lite/docs" -documentation = "https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-pubsublite1/5.0.2+20230114" license = "MIT" keywords = ["pubsublite", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/pubsublite1/README.md b/gen/pubsublite1/README.md index b919d45c41..356f8db39b 100644 --- a/gen/pubsublite1/README.md +++ b/gen/pubsublite1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-pubsublite1` library allows access to all features of the *Google PubsubLite* service. -This documentation was generated from *PubsubLite* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *pubsublite:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *PubsubLite* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *pubsublite:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *PubsubLite* *v1* API can be found at the [official documentation site](https://cloud.google.com/pubsub/lite/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/PubsubLite) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/PubsubLite) ... * admin - * [*projects locations operations cancel*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationOperationCancelCall), [*projects locations operations delete*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationOperationDeleteCall), [*projects locations operations get*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationOperationGetCall), [*projects locations operations list*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationOperationListCall), [*projects locations reservations create*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationReservationCreateCall), [*projects locations reservations delete*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationReservationDeleteCall), [*projects locations reservations get*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationReservationGetCall), [*projects locations reservations list*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationReservationListCall), [*projects locations reservations patch*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationReservationPatchCall), [*projects locations reservations topics list*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationReservationTopicListCall), [*projects locations subscriptions create*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionCreateCall), [*projects locations subscriptions delete*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionDeleteCall), [*projects locations subscriptions get*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionGetCall), [*projects locations subscriptions list*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionListCall), [*projects locations subscriptions patch*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionPatchCall), [*projects locations subscriptions seek*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionSeekCall), [*projects locations topics create*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationTopicCreateCall), [*projects locations topics delete*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationTopicDeleteCall), [*projects locations topics get*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationTopicGetCall), [*projects locations topics get partitions*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationTopicGetPartitionCall), [*projects locations topics list*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationTopicListCall), [*projects locations topics patch*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationTopicPatchCall) and [*projects locations topics subscriptions list*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::AdminProjectLocationTopicSubscriptionListCall) -* [cursor](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::Cursor) - * [*projects locations subscriptions commit cursor*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::CursorProjectLocationSubscriptionCommitCursorCall) and [*projects locations subscriptions cursors list*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::CursorProjectLocationSubscriptionCursorListCall) + * [*projects locations operations cancel*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationOperationCancelCall), [*projects locations operations delete*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationOperationDeleteCall), [*projects locations operations get*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationOperationGetCall), [*projects locations operations list*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationOperationListCall), [*projects locations reservations create*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationReservationCreateCall), [*projects locations reservations delete*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationReservationDeleteCall), [*projects locations reservations get*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationReservationGetCall), [*projects locations reservations list*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationReservationListCall), [*projects locations reservations patch*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationReservationPatchCall), [*projects locations reservations topics list*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationReservationTopicListCall), [*projects locations subscriptions create*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionCreateCall), [*projects locations subscriptions delete*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionDeleteCall), [*projects locations subscriptions get*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionGetCall), [*projects locations subscriptions list*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionListCall), [*projects locations subscriptions patch*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionPatchCall), [*projects locations subscriptions seek*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationSubscriptionSeekCall), [*projects locations topics create*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationTopicCreateCall), [*projects locations topics delete*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationTopicDeleteCall), [*projects locations topics get*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationTopicGetCall), [*projects locations topics get partitions*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationTopicGetPartitionCall), [*projects locations topics list*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationTopicListCall), [*projects locations topics patch*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationTopicPatchCall) and [*projects locations topics subscriptions list*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::AdminProjectLocationTopicSubscriptionListCall) +* [cursor](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::Cursor) + * [*projects locations subscriptions commit cursor*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::CursorProjectLocationSubscriptionCommitCursorCall) and [*projects locations subscriptions cursors list*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::CursorProjectLocationSubscriptionCursorListCall) * topic stats - * [*projects locations topics compute head cursor*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::TopicStatProjectLocationTopicComputeHeadCursorCall), [*projects locations topics compute message stats*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::TopicStatProjectLocationTopicComputeMessageStatCall) and [*projects locations topics compute time cursor*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/api::TopicStatProjectLocationTopicComputeTimeCursorCall) + * [*projects locations topics compute head cursor*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::TopicStatProjectLocationTopicComputeHeadCursorCall), [*projects locations topics compute message stats*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::TopicStatProjectLocationTopicComputeMessageStatCall) and [*projects locations topics compute time cursor*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/api::TopicStatProjectLocationTopicComputeTimeCursorCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/PubsubLite)** +* **[Hub](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/PubsubLite)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::CallBuilder) -* **[Resources](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::CallBuilder) +* **[Resources](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Part)** + * **[Parts](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Delegate) to the -[Method Builder](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Delegate) to the +[Method Builder](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::RequestValue) and -[decodable](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::RequestValue) and +[decodable](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-pubsublite1/5.0.2-beta-1+20230114/google_pubsublite1/client::RequestValue) are moved +* [request values](https://docs.rs/google-pubsublite1/5.0.2+20230114/google_pubsublite1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/pubsublite1/src/api.rs b/gen/pubsublite1/src/api.rs index be3e358f3f..ff7acfaa70 100644 --- a/gen/pubsublite1/src/api.rs +++ b/gen/pubsublite1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> PubsubLite { PubsubLite { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://pubsublite.googleapis.com/".to_string(), _root_url: "https://pubsublite.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> PubsubLite { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/pubsublite1/src/client.rs b/gen/pubsublite1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/pubsublite1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/pubsublite1/src/lib.rs b/gen/pubsublite1/src/lib.rs index f58941e7c4..cc3e5b8bee 100644 --- a/gen/pubsublite1/src/lib.rs +++ b/gen/pubsublite1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *PubsubLite* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *pubsublite:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *PubsubLite* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *pubsublite:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *PubsubLite* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/pubsub/lite/docs). diff --git a/gen/qpxexpress1-cli/Cargo.toml b/gen/qpxexpress1-cli/Cargo.toml index 40c683b20c..0cff445352 100644 --- a/gen/qpxexpress1-cli/Cargo.toml +++ b/gen/qpxexpress1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-qpxexpress1-cli" -version = "4.0.1+20160708" +version = "5.0.2+20160708" authors = ["Sebastian Thiel "] description = "A complete library to interact with QPX Express (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/qpxexpress1-cli" @@ -20,13 +20,13 @@ name = "qpxexpress1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-qpxexpress1] path = "../qpxexpress1" -version = "4.0.1+20160708" +version = "5.0.2+20160708" + diff --git a/gen/qpxexpress1-cli/README.md b/gen/qpxexpress1-cli/README.md index 6b57f8ad2d..fc4b95a460 100644 --- a/gen/qpxexpress1-cli/README.md +++ b/gen/qpxexpress1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *QPX Express* API at revision *20160708*. The CLI is at version *4.0.1*. +This documentation was generated from the *QPX Express* API at revision *20160708*. The CLI is at version *5.0.2*. ```bash qpxexpress1 [options] diff --git a/gen/qpxexpress1-cli/mkdocs.yml b/gen/qpxexpress1-cli/mkdocs.yml index 3bf0e6b102..846ac3d13b 100644 --- a/gen/qpxexpress1-cli/mkdocs.yml +++ b/gen/qpxexpress1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: QPX Express v4.0.1+20160708 +site_name: QPX Express v5.0.2+20160708 site_url: http://byron.github.io/google-apis-rs/google-qpxexpress1-cli site_description: A complete library to interact with QPX Express (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/qpxexpress1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['trips_search.md', 'Trips', 'Search'] +nav: +- Home: 'index.md' +- 'Trips': + - 'Search': 'trips_search.md' theme: readthedocs diff --git a/gen/qpxexpress1-cli/src/client.rs b/gen/qpxexpress1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/qpxexpress1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/qpxexpress1-cli/src/main.rs b/gen/qpxexpress1-cli/src/main.rs index 646d23fc5a..aa04f1b483 100644 --- a/gen/qpxexpress1-cli/src/main.rs +++ b/gen/qpxexpress1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_qpxexpress1::{api, Error, oauth2}; +use google_qpxexpress1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -258,7 +257,7 @@ async fn main() { let mut app = App::new("qpxexpress1") .author("Sebastian Thiel ") - .version("4.0.1+20160708") + .version("5.0.2+20160708") .about("Finds the least expensive flights between an origin and a destination.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_qpxexpress1_cli") .arg(Arg::with_name("folder") diff --git a/gen/qpxexpress1/Cargo.toml b/gen/qpxexpress1/Cargo.toml index 0fbcd7b983..f9cd8f4cb2 100644 --- a/gen/qpxexpress1/Cargo.toml +++ b/gen/qpxexpress1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-qpxexpress1" -version = "5.0.2-beta-1+20160708" +version = "5.0.2+20160708" authors = ["Sebastian Thiel "] description = "A complete library to interact with QPX Express (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/qpxexpress1" homepage = "http://developers.google.com/qpx-express" -documentation = "https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708" +documentation = "https://docs.rs/google-qpxexpress1/5.0.2+20160708" license = "MIT" keywords = ["qpxExpress", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/qpxexpress1/README.md b/gen/qpxexpress1/README.md index 0759768630..fea92e8a27 100644 --- a/gen/qpxexpress1/README.md +++ b/gen/qpxexpress1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-qpxexpress1` library allows access to all features of the *Google QPX Express* service. -This documentation was generated from *QPX Express* crate version *5.0.2-beta-1+20160708*, where *20160708* is the exact revision of the *qpxExpress:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *QPX Express* crate version *5.0.2+20160708*, where *20160708* is the exact revision of the *qpxExpress:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *QPX Express* *v1* API can be found at the [official documentation site](http://developers.google.com/qpx-express). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/QPXExpress) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/QPXExpress) ... * trips - * [*search*](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/api::TripSearchCall) + * [*search*](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/api::TripSearchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/QPXExpress)** +* **[Hub](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/QPXExpress)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::CallBuilder) -* **[Resources](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::CallBuilder) +* **[Resources](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Part)** + * **[Parts](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Delegate) to the -[Method Builder](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Delegate) to the +[Method Builder](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::RequestValue) and -[decodable](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::RequestValue) and +[decodable](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-qpxexpress1/5.0.2-beta-1+20160708/google_qpxexpress1/client::RequestValue) are moved +* [request values](https://docs.rs/google-qpxexpress1/5.0.2+20160708/google_qpxexpress1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/qpxexpress1/src/api.rs b/gen/qpxexpress1/src/api.rs index 4d5b4763eb..f0a9bdde5d 100644 --- a/gen/qpxexpress1/src/api.rs +++ b/gen/qpxexpress1/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> QPXExpress { QPXExpress { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/qpxExpress/v1/trips/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -114,7 +114,7 @@ impl<'a, S> QPXExpress { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/qpxexpress1/src/client.rs b/gen/qpxexpress1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/qpxexpress1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/qpxexpress1/src/lib.rs b/gen/qpxexpress1/src/lib.rs index 348d2f48fc..e79a36a4ab 100644 --- a/gen/qpxexpress1/src/lib.rs +++ b/gen/qpxexpress1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *QPX Express* crate version *5.0.2-beta-1+20160708*, where *20160708* is the exact revision of the *qpxExpress:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *QPX Express* crate version *5.0.2+20160708*, where *20160708* is the exact revision of the *qpxExpress:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *QPX Express* *v1* API can be found at the //! [official documentation site](http://developers.google.com/qpx-express). diff --git a/gen/realtimebidding1-cli/Cargo.toml b/gen/realtimebidding1-cli/Cargo.toml index 1a7f3e95e6..cdce44b705 100644 --- a/gen/realtimebidding1-cli/Cargo.toml +++ b/gen/realtimebidding1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-realtimebidding1-cli" -version = "4.0.1+20220307" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Real-time Bidding (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/realtimebidding1-cli" @@ -20,13 +20,13 @@ name = "realtimebidding1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-realtimebidding1] path = "../realtimebidding1" -version = "4.0.1+20220307" +version = "5.0.2+20230124" + diff --git a/gen/realtimebidding1-cli/README.md b/gen/realtimebidding1-cli/README.md index b351cba5ab..dc7d8519a4 100644 --- a/gen/realtimebidding1-cli/README.md +++ b/gen/realtimebidding1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Real-time Bidding* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *Real-time Bidding* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash realtimebidding1 [options] @@ -50,6 +50,10 @@ realtimebidding1 [options] pretargeting-configs-remove-targeted-publishers (-r )... [-p ]... [-o ] pretargeting-configs-remove-targeted-sites (-r )... [-p ]... [-o ] pretargeting-configs-suspend (-r )... [-p ]... [-o ] + publisher-connections-batch-approve (-r )... [-p ]... [-o ] + publisher-connections-batch-reject (-r )... [-p ]... [-o ] + publisher-connections-get [-p ]... [-o ] + publisher-connections-list [-p ]... [-o ] buyers creatives-create (-r )... [-p ]... [-o ] creatives-get [-p ]... [-o ] diff --git a/gen/realtimebidding1-cli/mkdocs.yml b/gen/realtimebidding1-cli/mkdocs.yml index 042161b15b..8c59168e9f 100644 --- a/gen/realtimebidding1-cli/mkdocs.yml +++ b/gen/realtimebidding1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Real-time Bidding v4.0.1+20220307 +site_name: Real-time Bidding v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-realtimebidding1-cli site_description: A complete library to interact with Real-time Bidding (protocol v1) @@ -7,42 +7,48 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/realtimebidding1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['bidders_creatives-list.md', 'Bidders', 'Creatives List'] -- ['bidders_creatives-watch.md', 'Bidders', 'Creatives Watch'] -- ['bidders_endpoints-get.md', 'Bidders', 'Endpoints Get'] -- ['bidders_endpoints-list.md', 'Bidders', 'Endpoints List'] -- ['bidders_endpoints-patch.md', 'Bidders', 'Endpoints Patch'] -- ['bidders_get.md', 'Bidders', 'Get'] -- ['bidders_list.md', 'Bidders', 'List'] -- ['bidders_pretargeting-configs-activate.md', 'Bidders', 'Pretargeting Configs Activate'] -- ['bidders_pretargeting-configs-add-targeted-apps.md', 'Bidders', 'Pretargeting Configs Add Targeted Apps'] -- ['bidders_pretargeting-configs-add-targeted-publishers.md', 'Bidders', 'Pretargeting Configs Add Targeted Publishers'] -- ['bidders_pretargeting-configs-add-targeted-sites.md', 'Bidders', 'Pretargeting Configs Add Targeted Sites'] -- ['bidders_pretargeting-configs-create.md', 'Bidders', 'Pretargeting Configs Create'] -- ['bidders_pretargeting-configs-delete.md', 'Bidders', 'Pretargeting Configs Delete'] -- ['bidders_pretargeting-configs-get.md', 'Bidders', 'Pretargeting Configs Get'] -- ['bidders_pretargeting-configs-list.md', 'Bidders', 'Pretargeting Configs List'] -- ['bidders_pretargeting-configs-patch.md', 'Bidders', 'Pretargeting Configs Patch'] -- ['bidders_pretargeting-configs-remove-targeted-apps.md', 'Bidders', 'Pretargeting Configs Remove Targeted Apps'] -- ['bidders_pretargeting-configs-remove-targeted-publishers.md', 'Bidders', 'Pretargeting Configs Remove Targeted Publishers'] -- ['bidders_pretargeting-configs-remove-targeted-sites.md', 'Bidders', 'Pretargeting Configs Remove Targeted Sites'] -- ['bidders_pretargeting-configs-suspend.md', 'Bidders', 'Pretargeting Configs Suspend'] -- ['buyers_creatives-create.md', 'Buyers', 'Creatives Create'] -- ['buyers_creatives-get.md', 'Buyers', 'Creatives Get'] -- ['buyers_creatives-list.md', 'Buyers', 'Creatives List'] -- ['buyers_creatives-patch.md', 'Buyers', 'Creatives Patch'] -- ['buyers_get.md', 'Buyers', 'Get'] -- ['buyers_get-remarketing-tag.md', 'Buyers', 'Get Remarketing Tag'] -- ['buyers_list.md', 'Buyers', 'List'] -- ['buyers_user-lists-close.md', 'Buyers', 'User Lists Close'] -- ['buyers_user-lists-create.md', 'Buyers', 'User Lists Create'] -- ['buyers_user-lists-get.md', 'Buyers', 'User Lists Get'] -- ['buyers_user-lists-get-remarketing-tag.md', 'Buyers', 'User Lists Get Remarketing Tag'] -- ['buyers_user-lists-list.md', 'Buyers', 'User Lists List'] -- ['buyers_user-lists-open.md', 'Buyers', 'User Lists Open'] -- ['buyers_user-lists-update.md', 'Buyers', 'User Lists Update'] +nav: +- Home: 'index.md' +- 'Bidders': + - 'Creatives List': 'bidders_creatives-list.md' + - 'Creatives Watch': 'bidders_creatives-watch.md' + - 'Endpoints Get': 'bidders_endpoints-get.md' + - 'Endpoints List': 'bidders_endpoints-list.md' + - 'Endpoints Patch': 'bidders_endpoints-patch.md' + - 'Get': 'bidders_get.md' + - 'List': 'bidders_list.md' + - 'Pretargeting Configs Activate': 'bidders_pretargeting-configs-activate.md' + - 'Pretargeting Configs Add Targeted Apps': 'bidders_pretargeting-configs-add-targeted-apps.md' + - 'Pretargeting Configs Add Targeted Publishers': 'bidders_pretargeting-configs-add-targeted-publishers.md' + - 'Pretargeting Configs Add Targeted Sites': 'bidders_pretargeting-configs-add-targeted-sites.md' + - 'Pretargeting Configs Create': 'bidders_pretargeting-configs-create.md' + - 'Pretargeting Configs Delete': 'bidders_pretargeting-configs-delete.md' + - 'Pretargeting Configs Get': 'bidders_pretargeting-configs-get.md' + - 'Pretargeting Configs List': 'bidders_pretargeting-configs-list.md' + - 'Pretargeting Configs Patch': 'bidders_pretargeting-configs-patch.md' + - 'Pretargeting Configs Remove Targeted Apps': 'bidders_pretargeting-configs-remove-targeted-apps.md' + - 'Pretargeting Configs Remove Targeted Publishers': 'bidders_pretargeting-configs-remove-targeted-publishers.md' + - 'Pretargeting Configs Remove Targeted Sites': 'bidders_pretargeting-configs-remove-targeted-sites.md' + - 'Pretargeting Configs Suspend': 'bidders_pretargeting-configs-suspend.md' + - 'Publisher Connections Batch Approve': 'bidders_publisher-connections-batch-approve.md' + - 'Publisher Connections Batch Reject': 'bidders_publisher-connections-batch-reject.md' + - 'Publisher Connections Get': 'bidders_publisher-connections-get.md' + - 'Publisher Connections List': 'bidders_publisher-connections-list.md' +- 'Buyers': + - 'Creatives Create': 'buyers_creatives-create.md' + - 'Creatives Get': 'buyers_creatives-get.md' + - 'Creatives List': 'buyers_creatives-list.md' + - 'Creatives Patch': 'buyers_creatives-patch.md' + - 'Get': 'buyers_get.md' + - 'Get Remarketing Tag': 'buyers_get-remarketing-tag.md' + - 'List': 'buyers_list.md' + - 'User Lists Close': 'buyers_user-lists-close.md' + - 'User Lists Create': 'buyers_user-lists-create.md' + - 'User Lists Get': 'buyers_user-lists-get.md' + - 'User Lists Get Remarketing Tag': 'buyers_user-lists-get-remarketing-tag.md' + - 'User Lists List': 'buyers_user-lists-list.md' + - 'User Lists Open': 'buyers_user-lists-open.md' + - 'User Lists Update': 'buyers_user-lists-update.md' theme: readthedocs diff --git a/gen/realtimebidding1-cli/src/client.rs b/gen/realtimebidding1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/realtimebidding1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/realtimebidding1-cli/src/main.rs b/gen/realtimebidding1-cli/src/main.rs index fa756270fc..c6dcae17aa 100644 --- a/gen/realtimebidding1-cli/src/main.rs +++ b/gen/realtimebidding1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_realtimebidding1::{api, Error, oauth2}; +use google_realtimebidding1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -64,7 +63,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -262,7 +261,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -355,7 +354,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -466,7 +465,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1085,7 +1084,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1203,7 +1202,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1591,6 +1590,293 @@ where } } + async fn _bidders_publisher_connections_batch_approve(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "names" => Some(("names", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["names"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::BatchApprovePublisherConnectionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.bidders().publisher_connections_batch_approve(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _bidders_publisher_connections_batch_reject(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "names" => Some(("names", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["names"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::BatchRejectPublisherConnectionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.bidders().publisher_connections_batch_reject(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _bidders_publisher_connections_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.bidders().publisher_connections_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _bidders_publisher_connections_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.bidders().publisher_connections_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _buyers_creatives_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1666,6 +1952,7 @@ where "native.star-rating" => Some(("native.starRating", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "native.video-url" => Some(("native.videoUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native.video-vast-xml" => Some(("native.videoVastXml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "render-url" => Some(("renderUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "restricted-categories" => Some(("restrictedCategories", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "video.video-metadata.duration" => Some(("video.videoMetadata.duration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1676,7 +1963,7 @@ where "video.video-url" => Some(("video.videoUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "video.video-vast-xml" => Some(("video.videoVastXml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "ad-choices-destination-url", "ad-technology-providers", "advertiser-name", "agency-id", "api-update-time", "app-icon", "body", "call-to-action", "china-policy-compliance", "click-link-url", "click-tracking-url", "creative-format", "creative-id", "creative-serving-decision", "deal-ids", "deals-policy-compliance", "declared-attributes", "declared-click-through-urls", "declared-restricted-categories", "declared-vendor-ids", "detected-attributes", "detected-click-through-urls", "detected-domains", "detected-gvl-ids", "detected-languages", "detected-product-categories", "detected-provider-ids", "detected-sensitive-categories", "detected-vendor-ids", "duration", "headline", "height", "html", "image", "impression-tracking-urls", "is-valid-vast", "is-vpaid", "last-status-update", "logo", "name", "native", "network-policy-compliance", "platform-policy-compliance", "price-display-text", "restricted-categories", "russia-policy-compliance", "skip-offset", "snippet", "star-rating", "status", "unidentified-provider-domains", "url", "vast-version", "version", "video", "video-metadata", "video-url", "video-vast-xml", "width"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "ad-choices-destination-url", "ad-technology-providers", "advertiser-name", "agency-id", "api-update-time", "app-icon", "body", "call-to-action", "china-policy-compliance", "click-link-url", "click-tracking-url", "creative-format", "creative-id", "creative-serving-decision", "deal-ids", "deals-policy-compliance", "declared-attributes", "declared-click-through-urls", "declared-restricted-categories", "declared-vendor-ids", "detected-attributes", "detected-click-through-urls", "detected-domains", "detected-gvl-ids", "detected-languages", "detected-product-categories", "detected-provider-ids", "detected-sensitive-categories", "detected-vendor-ids", "duration", "headline", "height", "html", "image", "impression-tracking-urls", "is-valid-vast", "is-vpaid", "last-status-update", "logo", "name", "native", "network-policy-compliance", "platform-policy-compliance", "price-display-text", "render-url", "restricted-categories", "russia-policy-compliance", "skip-offset", "snippet", "star-rating", "status", "unidentified-provider-domains", "url", "vast-version", "version", "video", "video-metadata", "video-url", "video-vast-xml", "width"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1805,7 +2092,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1932,6 +2219,7 @@ where "native.star-rating" => Some(("native.starRating", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "native.video-url" => Some(("native.videoUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native.video-vast-xml" => Some(("native.videoVastXml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "render-url" => Some(("renderUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "restricted-categories" => Some(("restrictedCategories", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "video.video-metadata.duration" => Some(("video.videoMetadata.duration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1942,7 +2230,7 @@ where "video.video-url" => Some(("video.videoUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "video.video-vast-xml" => Some(("video.videoVastXml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "ad-choices-destination-url", "ad-technology-providers", "advertiser-name", "agency-id", "api-update-time", "app-icon", "body", "call-to-action", "china-policy-compliance", "click-link-url", "click-tracking-url", "creative-format", "creative-id", "creative-serving-decision", "deal-ids", "deals-policy-compliance", "declared-attributes", "declared-click-through-urls", "declared-restricted-categories", "declared-vendor-ids", "detected-attributes", "detected-click-through-urls", "detected-domains", "detected-gvl-ids", "detected-languages", "detected-product-categories", "detected-provider-ids", "detected-sensitive-categories", "detected-vendor-ids", "duration", "headline", "height", "html", "image", "impression-tracking-urls", "is-valid-vast", "is-vpaid", "last-status-update", "logo", "name", "native", "network-policy-compliance", "platform-policy-compliance", "price-display-text", "restricted-categories", "russia-policy-compliance", "skip-offset", "snippet", "star-rating", "status", "unidentified-provider-domains", "url", "vast-version", "version", "video", "video-metadata", "video-url", "video-vast-xml", "width"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "ad-choices-destination-url", "ad-technology-providers", "advertiser-name", "agency-id", "api-update-time", "app-icon", "body", "call-to-action", "china-policy-compliance", "click-link-url", "click-tracking-url", "creative-format", "creative-id", "creative-serving-decision", "deal-ids", "deals-policy-compliance", "declared-attributes", "declared-click-through-urls", "declared-restricted-categories", "declared-vendor-ids", "detected-attributes", "detected-click-through-urls", "detected-domains", "detected-gvl-ids", "detected-languages", "detected-product-categories", "detected-provider-ids", "detected-sensitive-categories", "detected-vendor-ids", "duration", "headline", "height", "html", "image", "impression-tracking-urls", "is-valid-vast", "is-vpaid", "last-status-update", "logo", "name", "native", "network-policy-compliance", "platform-policy-compliance", "price-display-text", "render-url", "restricted-categories", "russia-policy-compliance", "skip-offset", "snippet", "star-rating", "status", "unidentified-provider-domains", "url", "vast-version", "version", "video", "video-metadata", "video-url", "video-vast-xml", "width"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1957,7 +2245,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2120,7 +2408,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2464,7 +2752,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2761,6 +3049,18 @@ where ("pretargeting-configs-suspend", Some(opt)) => { call_result = self._bidders_pretargeting_configs_suspend(opt, dry_run, &mut err).await; }, + ("publisher-connections-batch-approve", Some(opt)) => { + call_result = self._bidders_publisher_connections_batch_approve(opt, dry_run, &mut err).await; + }, + ("publisher-connections-batch-reject", Some(opt)) => { + call_result = self._bidders_publisher_connections_batch_reject(opt, dry_run, &mut err).await; + }, + ("publisher-connections-get", Some(opt)) => { + call_result = self._bidders_publisher_connections_get(opt, dry_run, &mut err).await; + }, + ("publisher-connections-list", Some(opt)) => { + call_result = self._bidders_publisher_connections_list(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("bidders".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2890,14 +3190,14 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("bidders", "methods: 'creatives-list', 'creatives-watch', 'endpoints-get', 'endpoints-list', 'endpoints-patch', 'get', 'list', 'pretargeting-configs-activate', 'pretargeting-configs-add-targeted-apps', 'pretargeting-configs-add-targeted-publishers', 'pretargeting-configs-add-targeted-sites', 'pretargeting-configs-create', 'pretargeting-configs-delete', 'pretargeting-configs-get', 'pretargeting-configs-list', 'pretargeting-configs-patch', 'pretargeting-configs-remove-targeted-apps', 'pretargeting-configs-remove-targeted-publishers', 'pretargeting-configs-remove-targeted-sites' and 'pretargeting-configs-suspend'", vec![ + ("bidders", "methods: 'creatives-list', 'creatives-watch', 'endpoints-get', 'endpoints-list', 'endpoints-patch', 'get', 'list', 'pretargeting-configs-activate', 'pretargeting-configs-add-targeted-apps', 'pretargeting-configs-add-targeted-publishers', 'pretargeting-configs-add-targeted-sites', 'pretargeting-configs-create', 'pretargeting-configs-delete', 'pretargeting-configs-get', 'pretargeting-configs-list', 'pretargeting-configs-patch', 'pretargeting-configs-remove-targeted-apps', 'pretargeting-configs-remove-targeted-publishers', 'pretargeting-configs-remove-targeted-sites', 'pretargeting-configs-suspend', 'publisher-connections-batch-approve', 'publisher-connections-batch-reject', 'publisher-connections-get' and 'publisher-connections-list'", vec![ ("creatives-list", - Some(r##"Lists creatives."##), + Some(r##"Lists creatives as they are at the time of the initial request. This call may take multiple hours to complete. For large, paginated requests, this method returns a snapshot of creatives at the time of request for the first page. `lastStatusUpdate` and `creativeServingDecision` may be outdated for creatives on sequential pages. We recommend [Google Cloud Pub/Sub](//cloud.google.com/pubsub/docs/overview) to view the latest status."##), "Details at http://byron.github.io/google-apis-rs/google_realtimebidding1_cli/bidders_creatives-list", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the parent buyer that owns the creatives. The pattern for this resource is either `buyers/{buyerAccountId}` or `bidders/{bidderAccountId}`. For `buyers/{buyerAccountId}`, the `buyerAccountId` can be one of the following: 1. The ID of the buyer that is accessing their own creatives. 2. The ID of the child seat buyer under a bidder account. So for listing creatives pertaining to the child seat buyer (`456`) under bidder account (`123`), you would use the pattern: `buyers/456`. 3. The ID of the bidder itself. So for listing creatives pertaining to bidder (`123`), you would use `buyers/123`. If you want to access all creatives pertaining to both the bidder and all of its child seat accounts, you would use `bidders/{bidderAccountId}`, e.g., for all creatives pertaining to bidder (`123`), use `bidders/123`."##), + Some(r##"Required. Name of the parent buyer that owns the creatives. The pattern for this resource is either `buyers/{buyerAccountId}` or `bidders/{bidderAccountId}`. For `buyers/{buyerAccountId}`, the `buyerAccountId` can be one of the following: 1. The ID of the buyer that is accessing their own creatives. 2. The ID of the child seat buyer under a bidder account. So for listing creatives pertaining to the child seat buyer (`456`) under bidder account (`123`), you would use the pattern: `buyers/456`. 3. The ID of the bidder itself. So for listing creatives pertaining to bidder (`123`), you would use `buyers/123`. If you want to access all creatives pertaining to both the bidder and all of its child seat accounts, you would use `bidders/{bidderAccountId}`, for example, for all creatives pertaining to bidder (`123`), use `bidders/123`."##), Some(true), Some(false)), @@ -3391,6 +3691,106 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("publisher-connections-batch-approve", + Some(r##"Batch approves multiple publisher connections."##), + "Details at http://byron.github.io/google-apis-rs/google_realtimebidding1_cli/bidders_publisher-connections-batch-approve", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The bidder for whom publisher connections will be approved. Format: `bidders/{bidder}` where `{bidder}` is the account ID of the bidder."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("publisher-connections-batch-reject", + Some(r##"Batch rejects multiple publisher connections."##), + "Details at http://byron.github.io/google-apis-rs/google_realtimebidding1_cli/bidders_publisher-connections-batch-reject", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The bidder for whom publisher connections will be rejected. Format: `bidders/{bidder}` where `{bidder}` is the account ID of the bidder."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("publisher-connections-get", + Some(r##"Gets a publisher connection."##), + "Details at http://byron.github.io/google-apis-rs/google_realtimebidding1_cli/bidders_publisher-connections-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the publisher whose connection information is to be retrieved. In the pattern `bidders/{bidder}/publisherConnections/{publisher}` where `{bidder}` is the account ID of the bidder, and `{publisher}` is the ads.txt/app-ads.txt publisher ID. See publisherConnection.name."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("publisher-connections-list", + Some(r##"Lists publisher connections for a given bidder."##), + "Details at http://byron.github.io/google-apis-rs/google_realtimebidding1_cli/bidders_publisher-connections-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the bidder for which publishers have initiated connections. The pattern for this resource is `bidders/{bidder}` where `{bidder}` represents the account ID of the bidder."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3451,12 +3851,12 @@ async fn main() { Some(false)), ]), ("creatives-list", - Some(r##"Lists creatives."##), + Some(r##"Lists creatives as they are at the time of the initial request. This call may take multiple hours to complete. For large, paginated requests, this method returns a snapshot of creatives at the time of request for the first page. `lastStatusUpdate` and `creativeServingDecision` may be outdated for creatives on sequential pages. We recommend [Google Cloud Pub/Sub](//cloud.google.com/pubsub/docs/overview) to view the latest status."##), "Details at http://byron.github.io/google-apis-rs/google_realtimebidding1_cli/buyers_creatives-list", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the parent buyer that owns the creatives. The pattern for this resource is either `buyers/{buyerAccountId}` or `bidders/{bidderAccountId}`. For `buyers/{buyerAccountId}`, the `buyerAccountId` can be one of the following: 1. The ID of the buyer that is accessing their own creatives. 2. The ID of the child seat buyer under a bidder account. So for listing creatives pertaining to the child seat buyer (`456`) under bidder account (`123`), you would use the pattern: `buyers/456`. 3. The ID of the bidder itself. So for listing creatives pertaining to bidder (`123`), you would use `buyers/123`. If you want to access all creatives pertaining to both the bidder and all of its child seat accounts, you would use `bidders/{bidderAccountId}`, e.g., for all creatives pertaining to bidder (`123`), use `bidders/123`."##), + Some(r##"Required. Name of the parent buyer that owns the creatives. The pattern for this resource is either `buyers/{buyerAccountId}` or `bidders/{bidderAccountId}`. For `buyers/{buyerAccountId}`, the `buyerAccountId` can be one of the following: 1. The ID of the buyer that is accessing their own creatives. 2. The ID of the child seat buyer under a bidder account. So for listing creatives pertaining to the child seat buyer (`456`) under bidder account (`123`), you would use the pattern: `buyers/456`. 3. The ID of the bidder itself. So for listing creatives pertaining to bidder (`123`), you would use `buyers/123`. If you want to access all creatives pertaining to both the bidder and all of its child seat accounts, you would use `bidders/{bidderAccountId}`, for example, for all creatives pertaining to bidder (`123`), use `bidders/123`."##), Some(true), Some(false)), @@ -3744,7 +4144,7 @@ async fn main() { let mut app = App::new("realtimebidding1") .author("Sebastian Thiel ") - .version("4.0.1+20220307") + .version("5.0.2+20230124") .about("Allows external bidders to manage their RTB integration with Google. This includes managing bidder endpoints, QPS quotas, configuring what ad inventory to receive via pretargeting, submitting creatives for verification, and accessing creative metadata such as approval status.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_realtimebidding1_cli") .arg(Arg::with_name("url") diff --git a/gen/realtimebidding1/Cargo.toml b/gen/realtimebidding1/Cargo.toml index 7331703dc5..967e86006f 100644 --- a/gen/realtimebidding1/Cargo.toml +++ b/gen/realtimebidding1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-realtimebidding1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Real-time Bidding (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/realtimebidding1" homepage = "https://developers.google.com/authorized-buyers/apis/realtimebidding/reference/rest/" -documentation = "https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-realtimebidding1/5.0.2+20230124" license = "MIT" keywords = ["realtimebidding", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/realtimebidding1/README.md b/gen/realtimebidding1/README.md index 034ddcb90d..e89d8c1cf0 100644 --- a/gen/realtimebidding1/README.md +++ b/gen/realtimebidding1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-realtimebidding1` library allows access to all features of the *Google Real-time Bidding* service. -This documentation was generated from *Real-time Bidding* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *realtimebidding:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Real-time Bidding* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *realtimebidding:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Real-time Bidding* *v1* API can be found at the [official documentation site](https://developers.google.com/authorized-buyers/apis/realtimebidding/reference/rest/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/RealTimeBidding) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/RealTimeBidding) ... -* [bidders](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::Bidder) - * [*creatives list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderCreativeListCall), [*creatives watch*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderCreativeWatchCall), [*endpoints get*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderEndpointGetCall), [*endpoints list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderEndpointListCall), [*endpoints patch*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderEndpointPatchCall), [*get*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderGetCall), [*list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderListCall), [*pretargeting configs activate*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigActivateCall), [*pretargeting configs add targeted apps*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigAddTargetedAppCall), [*pretargeting configs add targeted publishers*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigAddTargetedPublisherCall), [*pretargeting configs add targeted sites*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigAddTargetedSiteCall), [*pretargeting configs create*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigCreateCall), [*pretargeting configs delete*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigDeleteCall), [*pretargeting configs get*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigGetCall), [*pretargeting configs list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigListCall), [*pretargeting configs patch*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigPatchCall), [*pretargeting configs remove targeted apps*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigRemoveTargetedAppCall), [*pretargeting configs remove targeted publishers*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigRemoveTargetedPublisherCall), [*pretargeting configs remove targeted sites*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigRemoveTargetedSiteCall), [*pretargeting configs suspend*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPretargetingConfigSuspendCall), [*publisher connections batch approve*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPublisherConnectionBatchApproveCall), [*publisher connections batch reject*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPublisherConnectionBatchRejectCall), [*publisher connections get*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPublisherConnectionGetCall) and [*publisher connections list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BidderPublisherConnectionListCall) -* [buyers](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::Buyer) - * [*creatives create*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerCreativeCreateCall), [*creatives get*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerCreativeGetCall), [*creatives list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerCreativeListCall), [*creatives patch*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerCreativePatchCall), [*get*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerGetCall), [*get remarketing tag*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerGetRemarketingTagCall), [*list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerListCall), [*user lists close*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerUserListCloseCall), [*user lists create*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerUserListCreateCall), [*user lists get*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerUserListGetCall), [*user lists get remarketing tag*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerUserListGetRemarketingTagCall), [*user lists list*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerUserListListCall), [*user lists open*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerUserListOpenCall) and [*user lists update*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/api::BuyerUserListUpdateCall) +* [bidders](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::Bidder) + * [*creatives list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderCreativeListCall), [*creatives watch*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderCreativeWatchCall), [*endpoints get*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderEndpointGetCall), [*endpoints list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderEndpointListCall), [*endpoints patch*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderEndpointPatchCall), [*get*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderGetCall), [*list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderListCall), [*pretargeting configs activate*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigActivateCall), [*pretargeting configs add targeted apps*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigAddTargetedAppCall), [*pretargeting configs add targeted publishers*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigAddTargetedPublisherCall), [*pretargeting configs add targeted sites*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigAddTargetedSiteCall), [*pretargeting configs create*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigCreateCall), [*pretargeting configs delete*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigDeleteCall), [*pretargeting configs get*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigGetCall), [*pretargeting configs list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigListCall), [*pretargeting configs patch*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigPatchCall), [*pretargeting configs remove targeted apps*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigRemoveTargetedAppCall), [*pretargeting configs remove targeted publishers*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigRemoveTargetedPublisherCall), [*pretargeting configs remove targeted sites*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigRemoveTargetedSiteCall), [*pretargeting configs suspend*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPretargetingConfigSuspendCall), [*publisher connections batch approve*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPublisherConnectionBatchApproveCall), [*publisher connections batch reject*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPublisherConnectionBatchRejectCall), [*publisher connections get*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPublisherConnectionGetCall) and [*publisher connections list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BidderPublisherConnectionListCall) +* [buyers](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::Buyer) + * [*creatives create*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerCreativeCreateCall), [*creatives get*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerCreativeGetCall), [*creatives list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerCreativeListCall), [*creatives patch*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerCreativePatchCall), [*get*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerGetCall), [*get remarketing tag*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerGetRemarketingTagCall), [*list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerListCall), [*user lists close*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerUserListCloseCall), [*user lists create*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerUserListCreateCall), [*user lists get*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerUserListGetCall), [*user lists get remarketing tag*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerUserListGetRemarketingTagCall), [*user lists list*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerUserListListCall), [*user lists open*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerUserListOpenCall) and [*user lists update*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/api::BuyerUserListUpdateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/RealTimeBidding)** +* **[Hub](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/RealTimeBidding)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::CallBuilder) -* **[Resources](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::CallBuilder) +* **[Resources](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Part)** + * **[Parts](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -147,17 +147,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -167,29 +167,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Delegate) to the -[Method Builder](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Delegate) to the +[Method Builder](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::RequestValue) and -[decodable](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::RequestValue) and +[decodable](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-realtimebidding1/5.0.2-beta-1+20230124/google_realtimebidding1/client::RequestValue) are moved +* [request values](https://docs.rs/google-realtimebidding1/5.0.2+20230124/google_realtimebidding1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/realtimebidding1/src/api.rs b/gen/realtimebidding1/src/api.rs index b7b7d59f2c..2114ef59bd 100644 --- a/gen/realtimebidding1/src/api.rs +++ b/gen/realtimebidding1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> RealTimeBidding { RealTimeBidding { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://realtimebidding.googleapis.com/".to_string(), _root_url: "https://realtimebidding.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> RealTimeBidding { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/realtimebidding1/src/client.rs b/gen/realtimebidding1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/realtimebidding1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/realtimebidding1/src/lib.rs b/gen/realtimebidding1/src/lib.rs index 85bd1e9816..78795d7e3e 100644 --- a/gen/realtimebidding1/src/lib.rs +++ b/gen/realtimebidding1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Real-time Bidding* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *realtimebidding:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Real-time Bidding* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *realtimebidding:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Real-time Bidding* *v1* API can be found at the //! [official documentation site](https://developers.google.com/authorized-buyers/apis/realtimebidding/reference/rest/). diff --git a/gen/recaptchaenterprise1-cli/Cargo.toml b/gen/recaptchaenterprise1-cli/Cargo.toml index 314a6198a4..83bf7bfb96 100644 --- a/gen/recaptchaenterprise1-cli/Cargo.toml +++ b/gen/recaptchaenterprise1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-recaptchaenterprise1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recaptcha Enterprise (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recaptchaenterprise1-cli" @@ -20,13 +20,13 @@ name = "recaptchaenterprise1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-recaptchaenterprise1] path = "../recaptchaenterprise1" -version = "4.0.1+20220226" +version = "5.0.2+20230115" + diff --git a/gen/recaptchaenterprise1-cli/README.md b/gen/recaptchaenterprise1-cli/README.md index 940678422f..f33eae1d0e 100644 --- a/gen/recaptchaenterprise1-cli/README.md +++ b/gen/recaptchaenterprise1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Recaptcha Enterprise* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *Recaptcha Enterprise* API at revision *20230115*. The CLI is at version *5.0.2*. ```bash recaptchaenterprise1 [options] @@ -39,6 +39,7 @@ recaptchaenterprise1 [options] keys-list [-p ]... [-o ] keys-migrate (-r )... [-p ]... [-o ] keys-patch (-r )... [-p ]... [-o ] + keys-retrieve-legacy-secret-key [-p ]... [-o ] relatedaccountgroupmemberships-search (-r )... [-p ]... [-o ] relatedaccountgroups-list [-p ]... [-o ] relatedaccountgroups-memberships-list [-p ]... [-o ] diff --git a/gen/recaptchaenterprise1-cli/mkdocs.yml b/gen/recaptchaenterprise1-cli/mkdocs.yml index a87d28ff47..96d1d3a305 100644 --- a/gen/recaptchaenterprise1-cli/mkdocs.yml +++ b/gen/recaptchaenterprise1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Recaptcha Enterprise v4.0.1+20220226 +site_name: Recaptcha Enterprise v5.0.2+20230115 site_url: http://byron.github.io/google-apis-rs/google-recaptchaenterprise1-cli site_description: A complete library to interact with Recaptcha Enterprise (protocol v1) @@ -7,20 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/recaptchaenterpr docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_assessments-annotate.md', 'Projects', 'Assessments Annotate'] -- ['projects_assessments-create.md', 'Projects', 'Assessments Create'] -- ['projects_keys-create.md', 'Projects', 'Keys Create'] -- ['projects_keys-delete.md', 'Projects', 'Keys Delete'] -- ['projects_keys-get.md', 'Projects', 'Keys Get'] -- ['projects_keys-get-metrics.md', 'Projects', 'Keys Get Metrics'] -- ['projects_keys-list.md', 'Projects', 'Keys List'] -- ['projects_keys-migrate.md', 'Projects', 'Keys Migrate'] -- ['projects_keys-patch.md', 'Projects', 'Keys Patch'] -- ['projects_relatedaccountgroupmemberships-search.md', 'Projects', 'Relatedaccountgroupmemberships Search'] -- ['projects_relatedaccountgroups-list.md', 'Projects', 'Relatedaccountgroups List'] -- ['projects_relatedaccountgroups-memberships-list.md', 'Projects', 'Relatedaccountgroups Memberships List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Assessments Annotate': 'projects_assessments-annotate.md' + - 'Assessments Create': 'projects_assessments-create.md' + - 'Keys Create': 'projects_keys-create.md' + - 'Keys Delete': 'projects_keys-delete.md' + - 'Keys Get': 'projects_keys-get.md' + - 'Keys Get Metrics': 'projects_keys-get-metrics.md' + - 'Keys List': 'projects_keys-list.md' + - 'Keys Migrate': 'projects_keys-migrate.md' + - 'Keys Patch': 'projects_keys-patch.md' + - 'Keys Retrieve Legacy Secret Key': 'projects_keys-retrieve-legacy-secret-key.md' + - 'Relatedaccountgroupmemberships Search': 'projects_relatedaccountgroupmemberships-search.md' + - 'Relatedaccountgroups List': 'projects_relatedaccountgroups-list.md' + - 'Relatedaccountgroups Memberships List': 'projects_relatedaccountgroups-memberships-list.md' theme: readthedocs diff --git a/gen/recaptchaenterprise1-cli/src/client.rs b/gen/recaptchaenterprise1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/recaptchaenterprise1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/recaptchaenterprise1-cli/src/main.rs b/gen/recaptchaenterprise1-cli/src/main.rs index d918354816..9cb5cb6d58 100644 --- a/gen/recaptchaenterprise1-cli/src/main.rs +++ b/gen/recaptchaenterprise1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_recaptchaenterprise1::{api, Error, oauth2}; +use google_recaptchaenterprise1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -162,6 +161,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "account-defender-assessment.labels" => Some(("accountDefenderAssessment.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "account-verification.language-code" => Some(("accountVerification.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "account-verification.latest-verification-result" => Some(("accountVerification.latestVerificationResult", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "account-verification.username" => Some(("accountVerification.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event.expected-action" => Some(("event.expectedAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event.hashed-account-id" => Some(("event.hashedAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event.site-key" => Some(("event.siteKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -169,15 +171,21 @@ where "event.user-agent" => Some(("event.userAgent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event.user-ip-address" => Some(("event.userIpAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "private-password-leak-verification.encrypted-leak-match-prefixes" => Some(("privatePasswordLeakVerification.encryptedLeakMatchPrefixes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "private-password-leak-verification.encrypted-user-credentials-hash" => Some(("privatePasswordLeakVerification.encryptedUserCredentialsHash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "private-password-leak-verification.lookup-hash-prefix" => Some(("privatePasswordLeakVerification.lookupHashPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "private-password-leak-verification.reencrypted-user-credentials-hash" => Some(("privatePasswordLeakVerification.reencryptedUserCredentialsHash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "risk-analysis.reasons" => Some(("riskAnalysis.reasons", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "risk-analysis.score" => Some(("riskAnalysis.score", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "token-properties.action" => Some(("tokenProperties.action", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "token-properties.android-package-name" => Some(("tokenProperties.androidPackageName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "token-properties.create-time" => Some(("tokenProperties.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "token-properties.hostname" => Some(("tokenProperties.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "token-properties.invalid-reason" => Some(("tokenProperties.invalidReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "token-properties.ios-bundle-id" => Some(("tokenProperties.iosBundleId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "token-properties.valid" => Some(("tokenProperties.valid", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-defender-assessment", "action", "create-time", "event", "expected-action", "hashed-account-id", "hostname", "invalid-reason", "labels", "name", "reasons", "risk-analysis", "score", "site-key", "token", "token-properties", "user-agent", "user-ip-address", "valid"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-defender-assessment", "account-verification", "action", "android-package-name", "create-time", "encrypted-leak-match-prefixes", "encrypted-user-credentials-hash", "event", "expected-action", "hashed-account-id", "hostname", "invalid-reason", "ios-bundle-id", "labels", "language-code", "latest-verification-result", "lookup-hash-prefix", "name", "private-password-leak-verification", "reasons", "reencrypted-user-credentials-hash", "risk-analysis", "score", "site-key", "token", "token-properties", "user-agent", "user-ip-address", "username", "valid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -504,7 +512,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -576,8 +584,9 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "skip-billing-check" => Some(("skipBillingCheck", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec![]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["skip-billing-check"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -693,7 +702,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -742,6 +751,58 @@ where } } + async fn _projects_keys_retrieve_legacy_secret_key(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().keys_retrieve_legacy_secret_key(opt.value_of("key").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_relatedaccountgroupmemberships_search(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -839,7 +900,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -898,7 +959,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -981,6 +1042,9 @@ where ("keys-patch", Some(opt)) => { call_result = self._projects_keys_patch(opt, dry_run, &mut err).await; }, + ("keys-retrieve-legacy-secret-key", Some(opt)) => { + call_result = self._projects_keys_retrieve_legacy_secret_key(opt, dry_run, &mut err).await; + }, ("relatedaccountgroupmemberships-search", Some(opt)) => { call_result = self._projects_relatedaccountgroupmemberships_search(opt, dry_run, &mut err).await; }, @@ -1069,7 +1133,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'assessments-annotate', 'assessments-create', 'keys-create', 'keys-delete', 'keys-get', 'keys-get-metrics', 'keys-list', 'keys-migrate', 'keys-patch', 'relatedaccountgroupmemberships-search', 'relatedaccountgroups-list' and 'relatedaccountgroups-memberships-list'", vec![ + ("projects", "methods: 'assessments-annotate', 'assessments-create', 'keys-create', 'keys-delete', 'keys-get', 'keys-get-metrics', 'keys-list', 'keys-migrate', 'keys-patch', 'keys-retrieve-legacy-secret-key', 'relatedaccountgroupmemberships-search', 'relatedaccountgroups-list' and 'relatedaccountgroups-memberships-list'", vec![ ("assessments-annotate", Some(r##"Annotates a previously created Assessment to provide additional information on whether the event turned out to be authentic or fraudulent."##), "Details at http://byron.github.io/google-apis-rs/google_recaptchaenterprise1_cli/projects_assessments-annotate", @@ -1292,6 +1356,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("keys-retrieve-legacy-secret-key", + Some(r##"Returns the secret key related to the specified public key. You must use the legacy secret key only in a 3rd party integration with legacy reCAPTCHA."##), + "Details at http://byron.github.io/google-apis-rs/google_recaptchaenterprise1_cli/projects_keys-retrieve-legacy-secret-key", + vec![ + (Some(r##"key"##), + None, + Some(r##"Required. The public key name linked to the requested secret key in the format "projects/{project}/keys/{key}"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1304,7 +1390,7 @@ async fn main() { vec![ (Some(r##"project"##), None, - Some(r##"Required. The name of the project to search related account group memberships from, in the format "projects/{project}"."##), + Some(r##"Required. The name of the project to search related account group memberships from. Specify the project name in the following format: "projects/{project}"."##), Some(true), Some(false)), @@ -1349,7 +1435,7 @@ async fn main() { Some(false)), ]), ("relatedaccountgroups-memberships-list", - Some(r##"Get the memberships in a group of related accounts."##), + Some(r##"Get memberships in a group of related accounts."##), "Details at http://byron.github.io/google-apis-rs/google_recaptchaenterprise1_cli/projects_relatedaccountgroups-memberships-list", vec![ (Some(r##"parent"##), @@ -1376,8 +1462,8 @@ async fn main() { let mut app = App::new("recaptchaenterprise1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") - .about("") + .version("5.0.2+20230115") + .about("Help protect your website from fraudulent activity, spam, and abuse without creating friction.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_recaptchaenterprise1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/recaptchaenterprise1/Cargo.toml b/gen/recaptchaenterprise1/Cargo.toml index 0dfdc24049..36e6a5f6c9 100644 --- a/gen/recaptchaenterprise1/Cargo.toml +++ b/gen/recaptchaenterprise1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-recaptchaenterprise1" -version = "5.0.2-beta-1+20230115" +version = "5.0.2+20230115" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recaptcha Enterprise (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recaptchaenterprise1" homepage = "https://cloud.google.com/recaptcha-enterprise/" -documentation = "https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115" +documentation = "https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115" license = "MIT" keywords = ["recaptchaenterprise", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/recaptchaenterprise1/README.md b/gen/recaptchaenterprise1/README.md index 74180153b2..af4461adf8 100644 --- a/gen/recaptchaenterprise1/README.md +++ b/gen/recaptchaenterprise1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-recaptchaenterprise1` library allows access to all features of the *Google Recaptcha Enterprise* service. -This documentation was generated from *Recaptcha Enterprise* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *recaptchaenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Recaptcha Enterprise* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *recaptchaenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Recaptcha Enterprise* *v1* API can be found at the [official documentation site](https://cloud.google.com/recaptcha-enterprise/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/RecaptchaEnterprise) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/RecaptchaEnterprise) ... * projects - * [*assessments annotate*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectAssessmentAnnotateCall), [*assessments create*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectAssessmentCreateCall), [*keys create*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyCreateCall), [*keys delete*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyDeleteCall), [*keys get*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyGetCall), [*keys get metrics*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyGetMetricCall), [*keys list*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyListCall), [*keys migrate*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyMigrateCall), [*keys patch*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyPatchCall), [*keys retrieve legacy secret key*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectKeyRetrieveLegacySecretKeyCall), [*relatedaccountgroupmemberships search*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectRelatedaccountgroupmembershipSearchCall), [*relatedaccountgroups list*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectRelatedaccountgroupListCall) and [*relatedaccountgroups memberships list*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/api::ProjectRelatedaccountgroupMembershipListCall) + * [*assessments annotate*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectAssessmentAnnotateCall), [*assessments create*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectAssessmentCreateCall), [*keys create*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyCreateCall), [*keys delete*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyDeleteCall), [*keys get*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyGetCall), [*keys get metrics*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyGetMetricCall), [*keys list*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyListCall), [*keys migrate*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyMigrateCall), [*keys patch*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyPatchCall), [*keys retrieve legacy secret key*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectKeyRetrieveLegacySecretKeyCall), [*relatedaccountgroupmemberships search*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectRelatedaccountgroupmembershipSearchCall), [*relatedaccountgroups list*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectRelatedaccountgroupListCall) and [*relatedaccountgroups memberships list*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/api::ProjectRelatedaccountgroupMembershipListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/RecaptchaEnterprise)** +* **[Hub](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/RecaptchaEnterprise)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::CallBuilder) -* **[Resources](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::CallBuilder) +* **[Resources](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Part)** + * **[Parts](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Delegate) to the -[Method Builder](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Delegate) to the +[Method Builder](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::RequestValue) and -[decodable](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::RequestValue) and +[decodable](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-recaptchaenterprise1/5.0.2-beta-1+20230115/google_recaptchaenterprise1/client::RequestValue) are moved +* [request values](https://docs.rs/google-recaptchaenterprise1/5.0.2+20230115/google_recaptchaenterprise1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/recaptchaenterprise1/src/api.rs b/gen/recaptchaenterprise1/src/api.rs index 469441f40d..2867b4f8c4 100644 --- a/gen/recaptchaenterprise1/src/api.rs +++ b/gen/recaptchaenterprise1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> RecaptchaEnterprise { RecaptchaEnterprise { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://recaptchaenterprise.googleapis.com/".to_string(), _root_url: "https://recaptchaenterprise.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> RecaptchaEnterprise { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/recaptchaenterprise1/src/client.rs b/gen/recaptchaenterprise1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/recaptchaenterprise1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/recaptchaenterprise1/src/lib.rs b/gen/recaptchaenterprise1/src/lib.rs index 2d4b311fe8..e80aed9e6f 100644 --- a/gen/recaptchaenterprise1/src/lib.rs +++ b/gen/recaptchaenterprise1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Recaptcha Enterprise* crate version *5.0.2-beta-1+20230115*, where *20230115* is the exact revision of the *recaptchaenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Recaptcha Enterprise* crate version *5.0.2+20230115*, where *20230115* is the exact revision of the *recaptchaenterprise:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Recaptcha Enterprise* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/recaptcha-enterprise/). diff --git a/gen/recommendationengine1_beta1-cli/Cargo.toml b/gen/recommendationengine1_beta1-cli/Cargo.toml index ee5189f078..0425399be7 100644 --- a/gen/recommendationengine1_beta1-cli/Cargo.toml +++ b/gen/recommendationengine1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-recommendationengine1_beta1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recommendations AI (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recommendationengine1_beta1-cli" @@ -20,13 +20,13 @@ name = "recommendationengine1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-recommendationengine1_beta1] path = "../recommendationengine1_beta1" -version = "4.0.1+20220224" +version = "5.0.2+20230105" + diff --git a/gen/recommendationengine1_beta1-cli/README.md b/gen/recommendationengine1_beta1-cli/README.md index d42ce4c8d4..2e6fb0b2be 100644 --- a/gen/recommendationengine1_beta1-cli/README.md +++ b/gen/recommendationengine1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Recommendations AI* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Recommendations AI* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash recommendationengine1-beta1 [options] diff --git a/gen/recommendationengine1_beta1-cli/mkdocs.yml b/gen/recommendationengine1_beta1-cli/mkdocs.yml index b706867fb1..78c42ce8c7 100644 --- a/gen/recommendationengine1_beta1-cli/mkdocs.yml +++ b/gen/recommendationengine1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Recommendations AI v4.0.1+20220224 +site_name: Recommendations AI v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-recommendationengine1_beta1-cli site_description: A complete library to interact with Recommendations AI (protocol v1beta1) @@ -7,30 +7,31 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/recommendationen docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-catalogs-catalog-items-create.md', 'Projects', 'Locations Catalogs Catalog Items Create'] -- ['projects_locations-catalogs-catalog-items-delete.md', 'Projects', 'Locations Catalogs Catalog Items Delete'] -- ['projects_locations-catalogs-catalog-items-get.md', 'Projects', 'Locations Catalogs Catalog Items Get'] -- ['projects_locations-catalogs-catalog-items-import.md', 'Projects', 'Locations Catalogs Catalog Items Import'] -- ['projects_locations-catalogs-catalog-items-list.md', 'Projects', 'Locations Catalogs Catalog Items List'] -- ['projects_locations-catalogs-catalog-items-patch.md', 'Projects', 'Locations Catalogs Catalog Items Patch'] -- ['projects_locations-catalogs-event-stores-operations-get.md', 'Projects', 'Locations Catalogs Event Stores Operations Get'] -- ['projects_locations-catalogs-event-stores-operations-list.md', 'Projects', 'Locations Catalogs Event Stores Operations List'] -- ['projects_locations-catalogs-event-stores-placements-predict.md', 'Projects', 'Locations Catalogs Event Stores Placements Predict'] -- ['projects_locations-catalogs-event-stores-prediction-api-key-registrations-create.md', 'Projects', 'Locations Catalogs Event Stores Prediction Api Key Registrations Create'] -- ['projects_locations-catalogs-event-stores-prediction-api-key-registrations-delete.md', 'Projects', 'Locations Catalogs Event Stores Prediction Api Key Registrations Delete'] -- ['projects_locations-catalogs-event-stores-prediction-api-key-registrations-list.md', 'Projects', 'Locations Catalogs Event Stores Prediction Api Key Registrations List'] -- ['projects_locations-catalogs-event-stores-user-events-collect.md', 'Projects', 'Locations Catalogs Event Stores User Events Collect'] -- ['projects_locations-catalogs-event-stores-user-events-import.md', 'Projects', 'Locations Catalogs Event Stores User Events Import'] -- ['projects_locations-catalogs-event-stores-user-events-list.md', 'Projects', 'Locations Catalogs Event Stores User Events List'] -- ['projects_locations-catalogs-event-stores-user-events-purge.md', 'Projects', 'Locations Catalogs Event Stores User Events Purge'] -- ['projects_locations-catalogs-event-stores-user-events-rejoin.md', 'Projects', 'Locations Catalogs Event Stores User Events Rejoin'] -- ['projects_locations-catalogs-event-stores-user-events-write.md', 'Projects', 'Locations Catalogs Event Stores User Events Write'] -- ['projects_locations-catalogs-list.md', 'Projects', 'Locations Catalogs List'] -- ['projects_locations-catalogs-operations-get.md', 'Projects', 'Locations Catalogs Operations Get'] -- ['projects_locations-catalogs-operations-list.md', 'Projects', 'Locations Catalogs Operations List'] -- ['projects_locations-catalogs-patch.md', 'Projects', 'Locations Catalogs Patch'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Catalogs Catalog Items Create': 'projects_locations-catalogs-catalog-items-create.md' + - 'Locations Catalogs Catalog Items Delete': 'projects_locations-catalogs-catalog-items-delete.md' + - 'Locations Catalogs Catalog Items Get': 'projects_locations-catalogs-catalog-items-get.md' + - 'Locations Catalogs Catalog Items Import': 'projects_locations-catalogs-catalog-items-import.md' + - 'Locations Catalogs Catalog Items List': 'projects_locations-catalogs-catalog-items-list.md' + - 'Locations Catalogs Catalog Items Patch': 'projects_locations-catalogs-catalog-items-patch.md' + - 'Locations Catalogs Event Stores Operations Get': 'projects_locations-catalogs-event-stores-operations-get.md' + - 'Locations Catalogs Event Stores Operations List': 'projects_locations-catalogs-event-stores-operations-list.md' + - 'Locations Catalogs Event Stores Placements Predict': 'projects_locations-catalogs-event-stores-placements-predict.md' + - 'Locations Catalogs Event Stores Prediction Api Key Registrations Create': 'projects_locations-catalogs-event-stores-prediction-api-key-registrations-create.md' + - 'Locations Catalogs Event Stores Prediction Api Key Registrations Delete': 'projects_locations-catalogs-event-stores-prediction-api-key-registrations-delete.md' + - 'Locations Catalogs Event Stores Prediction Api Key Registrations List': 'projects_locations-catalogs-event-stores-prediction-api-key-registrations-list.md' + - 'Locations Catalogs Event Stores User Events Collect': 'projects_locations-catalogs-event-stores-user-events-collect.md' + - 'Locations Catalogs Event Stores User Events Import': 'projects_locations-catalogs-event-stores-user-events-import.md' + - 'Locations Catalogs Event Stores User Events List': 'projects_locations-catalogs-event-stores-user-events-list.md' + - 'Locations Catalogs Event Stores User Events Purge': 'projects_locations-catalogs-event-stores-user-events-purge.md' + - 'Locations Catalogs Event Stores User Events Rejoin': 'projects_locations-catalogs-event-stores-user-events-rejoin.md' + - 'Locations Catalogs Event Stores User Events Write': 'projects_locations-catalogs-event-stores-user-events-write.md' + - 'Locations Catalogs List': 'projects_locations-catalogs-list.md' + - 'Locations Catalogs Operations Get': 'projects_locations-catalogs-operations-get.md' + - 'Locations Catalogs Operations List': 'projects_locations-catalogs-operations-list.md' + - 'Locations Catalogs Patch': 'projects_locations-catalogs-patch.md' theme: readthedocs diff --git a/gen/recommendationengine1_beta1-cli/src/client.rs b/gen/recommendationengine1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/recommendationengine1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/recommendationengine1_beta1-cli/src/main.rs b/gen/recommendationengine1_beta1-cli/src/main.rs index 99e488f6e6..0bc148989f 100644 --- a/gen/recommendationengine1_beta1-cli/src/main.rs +++ b/gen/recommendationengine1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_recommendationengine1_beta1::{api, Error, oauth2}; +use google_recommendationengine1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -358,7 +357,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -464,7 +463,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -575,7 +574,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -884,7 +883,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -946,7 +945,7 @@ where call = call.uri(value.unwrap_or("")); }, "ets" => { - call = call.ets(value.unwrap_or("")); + call = call.ets( value.map(|v| arg_from_str(v, err, "ets", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1098,7 +1097,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1436,7 +1435,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1547,7 +1546,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1643,7 +1642,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2045,7 +2044,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Full resource name of the format: `{name=projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store/placements/*}` The id of the recommendation engine placement. This id is used to identify the set of models that will be used to make the prediction. We currently support three placements with the following IDs by default: * `shopping_cart`: Predicts items frequently bought together with one or more catalog items in the same shopping session. Commonly displayed after `add-to-cart` events, on product detail pages, or on the shopping cart page. * `home_page`: Predicts the next product that a user will most likely engage with or purchase based on the shopping or viewing history of the specified `userId` or `visitorId`. For example - Recommendations for you. * `product_detail`: Predicts the next product that a user will most likely engage with or purchase. The prediction is based on the shopping or viewing history of the specified `userId` or `visitorId` and its relevance to a specified `CatalogItem`. Typically used on product detail pages. For example - More items like this. * `recently_viewed_default`: Returns up to 75 items recently viewed by the specified `userId` or `visitorId`, most recent ones first. Returns nothing if neither of them has viewed any items yet. For example - Recently viewed. The full list of available placements can be seen at https://console.cloud.google.com/recommendation/datafeeds/default_catalog/dashboard"##), + None, Some(true), Some(false)), @@ -2395,7 +2394,7 @@ async fn main() { let mut app = App::new("recommendationengine1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230105") .about("Note that we now highly recommend new customers to use Retail API, which incorporates the GA version of the Recommendations AI funtionalities. To enable Retail API, please visit https://console.cloud.google.com/apis/library/retail.googleapis.com. The Recommendations AI service enables customers to build end-to-end personalized recommendation systems without requiring a high level of expertise in machine learning, recommendation system, or Google Cloud.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_recommendationengine1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/recommendationengine1_beta1/Cargo.toml b/gen/recommendationengine1_beta1/Cargo.toml index 9bdb318a38..951fc08760 100644 --- a/gen/recommendationengine1_beta1/Cargo.toml +++ b/gen/recommendationengine1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-recommendationengine1_beta1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recommendations AI (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recommendationengine1_beta1" homepage = "https://cloud.google.com/recommendations-ai/docs" -documentation = "https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105" license = "MIT" keywords = ["recommendationengine", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/recommendationengine1_beta1/README.md b/gen/recommendationengine1_beta1/README.md index 3f419e0730..d0ad2f4bdf 100644 --- a/gen/recommendationengine1_beta1/README.md +++ b/gen/recommendationengine1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-recommendationengine1_beta1` library allows access to all features of the *Google Recommendations AI* service. -This documentation was generated from *Recommendations AI* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *recommendationengine:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Recommendations AI* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *recommendationengine:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Recommendations AI* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/recommendations-ai/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/RecommendationsAI) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/RecommendationsAI) ... * projects - * [*locations catalogs catalog items create*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemCreateCall), [*locations catalogs catalog items delete*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemDeleteCall), [*locations catalogs catalog items get*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemGetCall), [*locations catalogs catalog items import*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemImportCall), [*locations catalogs catalog items list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemListCall), [*locations catalogs catalog items patch*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemPatchCall), [*locations catalogs event stores operations get*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreOperationGetCall), [*locations catalogs event stores operations list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreOperationListCall), [*locations catalogs event stores placements predict*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePlacementPredictCall), [*locations catalogs event stores prediction api key registrations create*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePredictionApiKeyRegistrationCreateCall), [*locations catalogs event stores prediction api key registrations delete*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePredictionApiKeyRegistrationDeleteCall), [*locations catalogs event stores prediction api key registrations list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePredictionApiKeyRegistrationListCall), [*locations catalogs event stores user events collect*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventCollectCall), [*locations catalogs event stores user events import*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventImportCall), [*locations catalogs event stores user events list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventListCall), [*locations catalogs event stores user events purge*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventPurgeCall), [*locations catalogs event stores user events rejoin*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventRejoinCall), [*locations catalogs event stores user events write*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventWriteCall), [*locations catalogs list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogListCall), [*locations catalogs operations get*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogOperationGetCall), [*locations catalogs operations list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogOperationListCall) and [*locations catalogs patch*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogPatchCall) + * [*locations catalogs catalog items create*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemCreateCall), [*locations catalogs catalog items delete*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemDeleteCall), [*locations catalogs catalog items get*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemGetCall), [*locations catalogs catalog items import*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemImportCall), [*locations catalogs catalog items list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemListCall), [*locations catalogs catalog items patch*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogCatalogItemPatchCall), [*locations catalogs event stores operations get*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreOperationGetCall), [*locations catalogs event stores operations list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreOperationListCall), [*locations catalogs event stores placements predict*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePlacementPredictCall), [*locations catalogs event stores prediction api key registrations create*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePredictionApiKeyRegistrationCreateCall), [*locations catalogs event stores prediction api key registrations delete*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePredictionApiKeyRegistrationDeleteCall), [*locations catalogs event stores prediction api key registrations list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStorePredictionApiKeyRegistrationListCall), [*locations catalogs event stores user events collect*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventCollectCall), [*locations catalogs event stores user events import*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventImportCall), [*locations catalogs event stores user events list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventListCall), [*locations catalogs event stores user events purge*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventPurgeCall), [*locations catalogs event stores user events rejoin*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventRejoinCall), [*locations catalogs event stores user events write*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogEventStoreUserEventWriteCall), [*locations catalogs list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogListCall), [*locations catalogs operations get*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogOperationGetCall), [*locations catalogs operations list*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogOperationListCall) and [*locations catalogs patch*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/api::ProjectLocationCatalogPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/RecommendationsAI)** +* **[Hub](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/RecommendationsAI)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-recommendationengine1_beta1/5.0.2-beta-1+20230105/google_recommendationengine1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-recommendationengine1_beta1/5.0.2+20230105/google_recommendationengine1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/recommendationengine1_beta1/src/api.rs b/gen/recommendationengine1_beta1/src/api.rs index 5fe5c422a5..b7ee7bcb12 100644 --- a/gen/recommendationengine1_beta1/src/api.rs +++ b/gen/recommendationengine1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> RecommendationsAI { RecommendationsAI { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://recommendationengine.googleapis.com/".to_string(), _root_url: "https://recommendationengine.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> RecommendationsAI { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/recommendationengine1_beta1/src/client.rs b/gen/recommendationengine1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/recommendationengine1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/recommendationengine1_beta1/src/lib.rs b/gen/recommendationengine1_beta1/src/lib.rs index 39bfb4501d..c1cbfc6228 100644 --- a/gen/recommendationengine1_beta1/src/lib.rs +++ b/gen/recommendationengine1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Recommendations AI* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *recommendationengine:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Recommendations AI* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *recommendationengine:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Recommendations AI* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/recommendations-ai/docs). diff --git a/gen/recommender1-cli/Cargo.toml b/gen/recommender1-cli/Cargo.toml index b8e2d3c5a5..1281152efe 100644 --- a/gen/recommender1-cli/Cargo.toml +++ b/gen/recommender1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-recommender1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230108" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recommender (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recommender1-cli" @@ -20,13 +20,13 @@ name = "recommender1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-recommender1] path = "../recommender1" -version = "4.0.1+20220228" +version = "5.0.2+20230108" + diff --git a/gen/recommender1-cli/README.md b/gen/recommender1-cli/README.md index 728abcc1d8..387f7995e9 100644 --- a/gen/recommender1-cli/README.md +++ b/gen/recommender1-cli/README.md @@ -25,19 +25,24 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Recommender* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Recommender* API at revision *20230108*. The CLI is at version *5.0.2*. ```bash recommender1 [options] billing-accounts + locations-insight-types-get-config [-p ]... [-o ] locations-insight-types-insights-get [-p ]... [-o ] locations-insight-types-insights-list [-p ]... [-o ] locations-insight-types-insights-mark-accepted (-r )... [-p ]... [-o ] + locations-insight-types-update-config (-r )... [-p ]... [-o ] + locations-recommenders-get-config [-p ]... [-o ] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] + locations-recommenders-update-config (-r )... [-p ]... [-o ] folders locations-insight-types-insights-get [-p ]... [-o ] locations-insight-types-insights-list [-p ]... [-o ] @@ -45,26 +50,37 @@ recommender1 [options] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] organizations + locations-insight-types-get-config [-p ]... [-o ] locations-insight-types-insights-get [-p ]... [-o ] locations-insight-types-insights-list [-p ]... [-o ] locations-insight-types-insights-mark-accepted (-r )... [-p ]... [-o ] + locations-insight-types-update-config (-r )... [-p ]... [-o ] + locations-recommenders-get-config [-p ]... [-o ] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] + locations-recommenders-update-config (-r )... [-p ]... [-o ] projects + locations-insight-types-get-config [-p ]... [-o ] locations-insight-types-insights-get [-p ]... [-o ] locations-insight-types-insights-list [-p ]... [-o ] locations-insight-types-insights-mark-accepted (-r )... [-p ]... [-o ] + locations-insight-types-update-config (-r )... [-p ]... [-o ] + locations-recommenders-get-config [-p ]... [-o ] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] + locations-recommenders-update-config (-r )... [-p ]... [-o ] recommender1 --help Configuration: diff --git a/gen/recommender1-cli/mkdocs.yml b/gen/recommender1-cli/mkdocs.yml index 1b7ae11b2b..8af5739dbb 100644 --- a/gen/recommender1-cli/mkdocs.yml +++ b/gen/recommender1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Recommender v4.0.1+20220228 +site_name: Recommender v5.0.2+20230108 site_url: http://byron.github.io/google-apis-rs/google-recommender1-cli site_description: A complete library to interact with Recommender (protocol v1) @@ -7,40 +7,60 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/recommender1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['billing-accounts_locations-insight-types-insights-get.md', 'Billing Accounts', 'Locations Insight Types Insights Get'] -- ['billing-accounts_locations-insight-types-insights-list.md', 'Billing Accounts', 'Locations Insight Types Insights List'] -- ['billing-accounts_locations-insight-types-insights-mark-accepted.md', 'Billing Accounts', 'Locations Insight Types Insights Mark Accepted'] -- ['billing-accounts_locations-recommenders-recommendations-get.md', 'Billing Accounts', 'Locations Recommenders Recommendations Get'] -- ['billing-accounts_locations-recommenders-recommendations-list.md', 'Billing Accounts', 'Locations Recommenders Recommendations List'] -- ['billing-accounts_locations-recommenders-recommendations-mark-claimed.md', 'Billing Accounts', 'Locations Recommenders Recommendations Mark Claimed'] -- ['billing-accounts_locations-recommenders-recommendations-mark-failed.md', 'Billing Accounts', 'Locations Recommenders Recommendations Mark Failed'] -- ['billing-accounts_locations-recommenders-recommendations-mark-succeeded.md', 'Billing Accounts', 'Locations Recommenders Recommendations Mark Succeeded'] -- ['folders_locations-insight-types-insights-get.md', 'Folders', 'Locations Insight Types Insights Get'] -- ['folders_locations-insight-types-insights-list.md', 'Folders', 'Locations Insight Types Insights List'] -- ['folders_locations-insight-types-insights-mark-accepted.md', 'Folders', 'Locations Insight Types Insights Mark Accepted'] -- ['folders_locations-recommenders-recommendations-get.md', 'Folders', 'Locations Recommenders Recommendations Get'] -- ['folders_locations-recommenders-recommendations-list.md', 'Folders', 'Locations Recommenders Recommendations List'] -- ['folders_locations-recommenders-recommendations-mark-claimed.md', 'Folders', 'Locations Recommenders Recommendations Mark Claimed'] -- ['folders_locations-recommenders-recommendations-mark-failed.md', 'Folders', 'Locations Recommenders Recommendations Mark Failed'] -- ['folders_locations-recommenders-recommendations-mark-succeeded.md', 'Folders', 'Locations Recommenders Recommendations Mark Succeeded'] -- ['organizations_locations-insight-types-insights-get.md', 'Organizations', 'Locations Insight Types Insights Get'] -- ['organizations_locations-insight-types-insights-list.md', 'Organizations', 'Locations Insight Types Insights List'] -- ['organizations_locations-insight-types-insights-mark-accepted.md', 'Organizations', 'Locations Insight Types Insights Mark Accepted'] -- ['organizations_locations-recommenders-recommendations-get.md', 'Organizations', 'Locations Recommenders Recommendations Get'] -- ['organizations_locations-recommenders-recommendations-list.md', 'Organizations', 'Locations Recommenders Recommendations List'] -- ['organizations_locations-recommenders-recommendations-mark-claimed.md', 'Organizations', 'Locations Recommenders Recommendations Mark Claimed'] -- ['organizations_locations-recommenders-recommendations-mark-failed.md', 'Organizations', 'Locations Recommenders Recommendations Mark Failed'] -- ['organizations_locations-recommenders-recommendations-mark-succeeded.md', 'Organizations', 'Locations Recommenders Recommendations Mark Succeeded'] -- ['projects_locations-insight-types-insights-get.md', 'Projects', 'Locations Insight Types Insights Get'] -- ['projects_locations-insight-types-insights-list.md', 'Projects', 'Locations Insight Types Insights List'] -- ['projects_locations-insight-types-insights-mark-accepted.md', 'Projects', 'Locations Insight Types Insights Mark Accepted'] -- ['projects_locations-recommenders-recommendations-get.md', 'Projects', 'Locations Recommenders Recommendations Get'] -- ['projects_locations-recommenders-recommendations-list.md', 'Projects', 'Locations Recommenders Recommendations List'] -- ['projects_locations-recommenders-recommendations-mark-claimed.md', 'Projects', 'Locations Recommenders Recommendations Mark Claimed'] -- ['projects_locations-recommenders-recommendations-mark-failed.md', 'Projects', 'Locations Recommenders Recommendations Mark Failed'] -- ['projects_locations-recommenders-recommendations-mark-succeeded.md', 'Projects', 'Locations Recommenders Recommendations Mark Succeeded'] +nav: +- Home: 'index.md' +- 'Billing Accounts': + - 'Locations Insight Types Get Config': 'billing-accounts_locations-insight-types-get-config.md' + - 'Locations Insight Types Insights Get': 'billing-accounts_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'billing-accounts_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'billing-accounts_locations-insight-types-insights-mark-accepted.md' + - 'Locations Insight Types Update Config': 'billing-accounts_locations-insight-types-update-config.md' + - 'Locations Recommenders Get Config': 'billing-accounts_locations-recommenders-get-config.md' + - 'Locations Recommenders Recommendations Get': 'billing-accounts_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'billing-accounts_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'billing-accounts_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'billing-accounts_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'billing-accounts_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'billing-accounts_locations-recommenders-recommendations-mark-succeeded.md' + - 'Locations Recommenders Update Config': 'billing-accounts_locations-recommenders-update-config.md' +- 'Folders': + - 'Locations Insight Types Insights Get': 'folders_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'folders_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'folders_locations-insight-types-insights-mark-accepted.md' + - 'Locations Recommenders Recommendations Get': 'folders_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'folders_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'folders_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'folders_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'folders_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'folders_locations-recommenders-recommendations-mark-succeeded.md' +- 'Organizations': + - 'Locations Insight Types Get Config': 'organizations_locations-insight-types-get-config.md' + - 'Locations Insight Types Insights Get': 'organizations_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'organizations_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'organizations_locations-insight-types-insights-mark-accepted.md' + - 'Locations Insight Types Update Config': 'organizations_locations-insight-types-update-config.md' + - 'Locations Recommenders Get Config': 'organizations_locations-recommenders-get-config.md' + - 'Locations Recommenders Recommendations Get': 'organizations_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'organizations_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'organizations_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'organizations_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'organizations_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'organizations_locations-recommenders-recommendations-mark-succeeded.md' + - 'Locations Recommenders Update Config': 'organizations_locations-recommenders-update-config.md' +- 'Projects': + - 'Locations Insight Types Get Config': 'projects_locations-insight-types-get-config.md' + - 'Locations Insight Types Insights Get': 'projects_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'projects_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'projects_locations-insight-types-insights-mark-accepted.md' + - 'Locations Insight Types Update Config': 'projects_locations-insight-types-update-config.md' + - 'Locations Recommenders Get Config': 'projects_locations-recommenders-get-config.md' + - 'Locations Recommenders Recommendations Get': 'projects_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'projects_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'projects_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'projects_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'projects_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'projects_locations-recommenders-recommendations-mark-succeeded.md' + - 'Locations Recommenders Update Config': 'projects_locations-recommenders-update-config.md' theme: readthedocs diff --git a/gen/recommender1-cli/src/client.rs b/gen/recommender1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/recommender1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/recommender1-cli/src/main.rs b/gen/recommender1-cli/src/main.rs index 07a061f789..b280180c97 100644 --- a/gen/recommender1-cli/src/main.rs +++ b/gen/recommender1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_recommender1::{api, Error, oauth2}; +use google_recommender1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,58 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _billing_accounts_locations_insight_types_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_insight_types_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_insight_types_insights_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_accounts().locations_insight_types_insights_get(opt.value_of("name").unwrap_or("")); @@ -113,7 +164,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -251,6 +302,155 @@ where } } + async fn _billing_accounts_locations_insight_types_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1InsightTypeConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.billing_accounts().locations_insight_types_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _billing_accounts_locations_recommenders_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_recommenders_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_recommenders_recommendations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_accounts().locations_recommenders_recommendations_get(opt.value_of("name").unwrap_or("")); @@ -313,7 +513,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -451,6 +651,91 @@ where } } + async fn _billing_accounts_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.billing_accounts().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -623,6 +908,103 @@ where } } + async fn _billing_accounts_locations_recommenders_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1RecommenderConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.billing_accounts().locations_recommenders_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_locations_insight_types_insights_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.folders().locations_insight_types_insights_get(opt.value_of("name").unwrap_or("")); @@ -685,7 +1067,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -885,7 +1267,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1023,6 +1405,91 @@ where } } + async fn _folders_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.folders().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1195,6 +1662,58 @@ where } } + async fn _organizations_locations_insight_types_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().locations_insight_types_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_locations_insight_types_insights_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().locations_insight_types_insights_get(opt.value_of("name").unwrap_or("")); @@ -1257,7 +1776,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1395,6 +1914,155 @@ where } } + async fn _organizations_locations_insight_types_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1InsightTypeConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_insight_types_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _organizations_locations_recommenders_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.organizations().locations_recommenders_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_locations_recommenders_recommendations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().locations_recommenders_recommendations_get(opt.value_of("name").unwrap_or("")); @@ -1457,7 +2125,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1595,6 +2263,91 @@ where } } + async fn _organizations_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1767,6 +2520,155 @@ where } } + async fn _organizations_locations_recommenders_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1RecommenderConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_recommenders_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_insight_types_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_insight_types_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_insight_types_insights_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_insight_types_insights_get(opt.value_of("name").unwrap_or("")); @@ -1829,7 +2731,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1967,6 +2869,155 @@ where } } + async fn _projects_locations_insight_types_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1InsightTypeConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_insight_types_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_recommenders_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_recommenders_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_recommenders_recommendations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_recommenders_recommendations_get(opt.value_of("name").unwrap_or("")); @@ -2029,7 +3080,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2167,6 +3218,91 @@ where } } + async fn _projects_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2339,6 +3475,103 @@ where } } + async fn _projects_locations_recommenders_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1RecommenderConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_recommenders_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -2346,6 +3579,9 @@ where match self.opt.subcommand() { ("billing-accounts", Some(opt)) => { match opt.subcommand() { + ("locations-insight-types-get-config", Some(opt)) => { + call_result = self._billing_accounts_locations_insight_types_get_config(opt, dry_run, &mut err).await; + }, ("locations-insight-types-insights-get", Some(opt)) => { call_result = self._billing_accounts_locations_insight_types_insights_get(opt, dry_run, &mut err).await; }, @@ -2355,6 +3591,12 @@ where ("locations-insight-types-insights-mark-accepted", Some(opt)) => { call_result = self._billing_accounts_locations_insight_types_insights_mark_accepted(opt, dry_run, &mut err).await; }, + ("locations-insight-types-update-config", Some(opt)) => { + call_result = self._billing_accounts_locations_insight_types_update_config(opt, dry_run, &mut err).await; + }, + ("locations-recommenders-get-config", Some(opt)) => { + call_result = self._billing_accounts_locations_recommenders_get_config(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-get", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_get(opt, dry_run, &mut err).await; }, @@ -2364,12 +3606,18 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._billing_accounts_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, ("locations-recommenders-recommendations-mark-succeeded", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_mark_succeeded(opt, dry_run, &mut err).await; }, + ("locations-recommenders-update-config", Some(opt)) => { + call_result = self._billing_accounts_locations_recommenders_update_config(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("billing-accounts".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2396,6 +3644,9 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._folders_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._folders_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._folders_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, @@ -2410,6 +3661,9 @@ where }, ("organizations", Some(opt)) => { match opt.subcommand() { + ("locations-insight-types-get-config", Some(opt)) => { + call_result = self._organizations_locations_insight_types_get_config(opt, dry_run, &mut err).await; + }, ("locations-insight-types-insights-get", Some(opt)) => { call_result = self._organizations_locations_insight_types_insights_get(opt, dry_run, &mut err).await; }, @@ -2419,6 +3673,12 @@ where ("locations-insight-types-insights-mark-accepted", Some(opt)) => { call_result = self._organizations_locations_insight_types_insights_mark_accepted(opt, dry_run, &mut err).await; }, + ("locations-insight-types-update-config", Some(opt)) => { + call_result = self._organizations_locations_insight_types_update_config(opt, dry_run, &mut err).await; + }, + ("locations-recommenders-get-config", Some(opt)) => { + call_result = self._organizations_locations_recommenders_get_config(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-get", Some(opt)) => { call_result = self._organizations_locations_recommenders_recommendations_get(opt, dry_run, &mut err).await; }, @@ -2428,12 +3688,18 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._organizations_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._organizations_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._organizations_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, ("locations-recommenders-recommendations-mark-succeeded", Some(opt)) => { call_result = self._organizations_locations_recommenders_recommendations_mark_succeeded(opt, dry_run, &mut err).await; }, + ("locations-recommenders-update-config", Some(opt)) => { + call_result = self._organizations_locations_recommenders_update_config(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("organizations".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2442,6 +3708,9 @@ where }, ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-insight-types-get-config", Some(opt)) => { + call_result = self._projects_locations_insight_types_get_config(opt, dry_run, &mut err).await; + }, ("locations-insight-types-insights-get", Some(opt)) => { call_result = self._projects_locations_insight_types_insights_get(opt, dry_run, &mut err).await; }, @@ -2451,6 +3720,12 @@ where ("locations-insight-types-insights-mark-accepted", Some(opt)) => { call_result = self._projects_locations_insight_types_insights_mark_accepted(opt, dry_run, &mut err).await; }, + ("locations-insight-types-update-config", Some(opt)) => { + call_result = self._projects_locations_insight_types_update_config(opt, dry_run, &mut err).await; + }, + ("locations-recommenders-get-config", Some(opt)) => { + call_result = self._projects_locations_recommenders_get_config(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-get", Some(opt)) => { call_result = self._projects_locations_recommenders_recommendations_get(opt, dry_run, &mut err).await; }, @@ -2460,12 +3735,18 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._projects_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._projects_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._projects_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, ("locations-recommenders-recommendations-mark-succeeded", Some(opt)) => { call_result = self._projects_locations_recommenders_recommendations_mark_succeeded(opt, dry_run, &mut err).await; }, + ("locations-recommenders-update-config", Some(opt)) => { + call_result = self._projects_locations_recommenders_update_config(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2545,7 +3826,29 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("billing-accounts", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ + ("billing-accounts", "methods: 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-insight-types-update-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed', 'locations-recommenders-recommendations-mark-succeeded' and 'locations-recommenders-update-config'", vec![ + ("locations-insight-types-get-config", + Some(r##"Gets the requested InsightTypeConfig. There is only one instance of the config for each InsightType."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/billing-accounts_locations-insight-types-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-insight-types-insights-get", Some(r##"Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/billing-accounts_locations-insight-types-insights-get", @@ -2612,6 +3915,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-insight-types-update-config", + Some(r##"Updates an InsightTypeConfig change. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/billing-accounts_locations-insight-types-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of insight type config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-get-config", + Some(r##"Gets the requested Recommender Config. There is only one instance of the config for each Recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/billing-accounts_locations-recommenders-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2684,6 +4037,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/billing-accounts_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2740,6 +4121,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-update-config", + Some(r##"Updates a Recommender Config. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/billing-accounts_locations-recommenders-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of recommender config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2748,7 +4157,7 @@ async fn main() { ]), ]), - ("folders", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ + ("folders", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ ("locations-insight-types-insights-get", Some(r##"Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/folders_locations-insight-types-insights-get", @@ -2887,6 +4296,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/folders_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2951,7 +4388,29 @@ async fn main() { ]), ]), - ("organizations", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ + ("organizations", "methods: 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-insight-types-update-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed', 'locations-recommenders-recommendations-mark-succeeded' and 'locations-recommenders-update-config'", vec![ + ("locations-insight-types-get-config", + Some(r##"Gets the requested InsightTypeConfig. There is only one instance of the config for each InsightType."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/organizations_locations-insight-types-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-insight-types-insights-get", Some(r##"Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/organizations_locations-insight-types-insights-get", @@ -3018,6 +4477,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-insight-types-update-config", + Some(r##"Updates an InsightTypeConfig change. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/organizations_locations-insight-types-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of insight type config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-get-config", + Some(r##"Gets the requested Recommender Config. There is only one instance of the config for each Recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/organizations_locations-recommenders-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3090,6 +4599,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/organizations_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3146,6 +4683,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-update-config", + Some(r##"Updates a Recommender Config. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/organizations_locations-recommenders-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of recommender config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3154,7 +4719,29 @@ async fn main() { ]), ]), - ("projects", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ + ("projects", "methods: 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-insight-types-update-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed', 'locations-recommenders-recommendations-mark-succeeded' and 'locations-recommenders-update-config'", vec![ + ("locations-insight-types-get-config", + Some(r##"Gets the requested InsightTypeConfig. There is only one instance of the config for each InsightType."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/projects_locations-insight-types-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-insight-types-insights-get", Some(r##"Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/projects_locations-insight-types-insights-get", @@ -3221,6 +4808,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-insight-types-update-config", + Some(r##"Updates an InsightTypeConfig change. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/projects_locations-insight-types-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of insight type config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-get-config", + Some(r##"Gets the requested Recommender Config. There is only one instance of the config for each Recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/projects_locations-recommenders-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3293,6 +4930,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/projects_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3349,6 +5014,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-update-config", + Some(r##"Updates a Recommender Config. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_cli/projects_locations-recommenders-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of recommender config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3361,7 +5054,7 @@ async fn main() { let mut app = App::new("recommender1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230108") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_recommender1_cli") .arg(Arg::with_name("url") diff --git a/gen/recommender1/Cargo.toml b/gen/recommender1/Cargo.toml index 932d468950..b17169768a 100644 --- a/gen/recommender1/Cargo.toml +++ b/gen/recommender1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-recommender1" -version = "5.0.2-beta-1+20230108" +version = "5.0.2+20230108" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recommender (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recommender1" homepage = "https://cloud.google.com/recommender/docs/" -documentation = "https://docs.rs/google-recommender1/5.0.2-beta-1+20230108" +documentation = "https://docs.rs/google-recommender1/5.0.2+20230108" license = "MIT" keywords = ["recommender", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/recommender1/README.md b/gen/recommender1/README.md index d228d30d06..a51614b2f5 100644 --- a/gen/recommender1/README.md +++ b/gen/recommender1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-recommender1` library allows access to all features of the *Google Recommender* service. -This documentation was generated from *Recommender* crate version *5.0.2-beta-1+20230108*, where *20230108* is the exact revision of the *recommender:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Recommender* crate version *5.0.2+20230108*, where *20230108* is the exact revision of the *recommender:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Recommender* *v1* API can be found at the [official documentation site](https://cloud.google.com/recommender/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/Recommender) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/Recommender) ... * billing accounts - * [*locations insight types get config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::BillingAccountLocationRecommenderUpdateConfigCall) + * [*locations insight types get config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::BillingAccountLocationRecommenderUpdateConfigCall) * folders - * [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationInsightTypeInsightMarkAcceptedCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkFailedCall) and [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkSucceededCall) + * [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationInsightTypeInsightMarkAcceptedCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkFailedCall) and [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::FolderLocationRecommenderRecommendationMarkSucceededCall) * organizations - * [*locations insight types get config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::OrganizationLocationRecommenderUpdateConfigCall) + * [*locations insight types get config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::OrganizationLocationRecommenderUpdateConfigCall) * projects - * [*locations insight types get config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/api::ProjectLocationRecommenderUpdateConfigCall) + * [*locations insight types get config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/api::ProjectLocationRecommenderUpdateConfigCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/Recommender)** +* **[Hub](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/Recommender)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::CallBuilder) -* **[Resources](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::CallBuilder) +* **[Resources](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Part)** + * **[Parts](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -149,17 +149,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -169,29 +169,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Delegate) to the -[Method Builder](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Delegate) to the +[Method Builder](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::RequestValue) and -[decodable](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::RequestValue) and +[decodable](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-recommender1/5.0.2-beta-1+20230108/google_recommender1/client::RequestValue) are moved +* [request values](https://docs.rs/google-recommender1/5.0.2+20230108/google_recommender1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/recommender1/src/api.rs b/gen/recommender1/src/api.rs index b5af592f18..b68b3f17aa 100644 --- a/gen/recommender1/src/api.rs +++ b/gen/recommender1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Recommender { Recommender { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://recommender.googleapis.com/".to_string(), _root_url: "https://recommender.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> Recommender { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/recommender1/src/client.rs b/gen/recommender1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/recommender1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/recommender1/src/lib.rs b/gen/recommender1/src/lib.rs index a93e65dde6..935c9749f4 100644 --- a/gen/recommender1/src/lib.rs +++ b/gen/recommender1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Recommender* crate version *5.0.2-beta-1+20230108*, where *20230108* is the exact revision of the *recommender:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Recommender* crate version *5.0.2+20230108*, where *20230108* is the exact revision of the *recommender:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Recommender* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/recommender/docs/). diff --git a/gen/recommender1_beta1-cli/Cargo.toml b/gen/recommender1_beta1-cli/Cargo.toml index a7b69b53f0..090d8bb170 100644 --- a/gen/recommender1_beta1-cli/Cargo.toml +++ b/gen/recommender1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-recommender1_beta1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230108" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recommender (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recommender1_beta1-cli" @@ -20,13 +20,13 @@ name = "recommender1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-recommender1_beta1] path = "../recommender1_beta1" -version = "4.0.1+20220228" +version = "5.0.2+20230108" + diff --git a/gen/recommender1_beta1-cli/README.md b/gen/recommender1_beta1-cli/README.md index a6d53ae2e4..9e65d4f2cd 100644 --- a/gen/recommender1_beta1-cli/README.md +++ b/gen/recommender1_beta1-cli/README.md @@ -25,19 +25,24 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Recommender* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Recommender* API at revision *20230108*. The CLI is at version *5.0.2*. ```bash recommender1-beta1 [options] billing-accounts + locations-insight-types-get-config [-p ]... [-o ] locations-insight-types-insights-get [-p ]... [-o ] locations-insight-types-insights-list [-p ]... [-o ] locations-insight-types-insights-mark-accepted (-r )... [-p ]... [-o ] + locations-insight-types-update-config (-r )... [-p ]... [-o ] + locations-recommenders-get-config [-p ]... [-o ] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] + locations-recommenders-update-config (-r )... [-p ]... [-o ] folders locations-insight-types-insights-get [-p ]... [-o ] locations-insight-types-insights-list [-p ]... [-o ] @@ -45,21 +50,23 @@ recommender1-beta1 [options] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] organizations - locations-insight-types-config (-r )... [-p ]... [-o ] locations-insight-types-get-config [-p ]... [-o ] locations-insight-types-insights-get [-p ]... [-o ] locations-insight-types-insights-list [-p ]... [-o ] locations-insight-types-insights-mark-accepted (-r )... [-p ]... [-o ] - locations-recommenders-config (-r )... [-p ]... [-o ] + locations-insight-types-update-config (-r )... [-p ]... [-o ] locations-recommenders-get-config [-p ]... [-o ] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] + locations-recommenders-update-config (-r )... [-p ]... [-o ] projects locations-insight-types-get-config [-p ]... [-o ] locations-insight-types-insights-get [-p ]... [-o ] @@ -70,6 +77,7 @@ recommender1-beta1 [options] locations-recommenders-recommendations-get [-p ]... [-o ] locations-recommenders-recommendations-list [-p ]... [-o ] locations-recommenders-recommendations-mark-claimed (-r )... [-p ]... [-o ] + locations-recommenders-recommendations-mark-dismissed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-failed (-r )... [-p ]... [-o ] locations-recommenders-recommendations-mark-succeeded (-r )... [-p ]... [-o ] locations-recommenders-update-config (-r )... [-p ]... [-o ] diff --git a/gen/recommender1_beta1-cli/mkdocs.yml b/gen/recommender1_beta1-cli/mkdocs.yml index 7899cb368b..223caa85c3 100644 --- a/gen/recommender1_beta1-cli/mkdocs.yml +++ b/gen/recommender1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Recommender v4.0.1+20220228 +site_name: Recommender v5.0.2+20230108 site_url: http://byron.github.io/google-apis-rs/google-recommender1_beta1-cli site_description: A complete library to interact with Recommender (protocol v1beta1) @@ -7,48 +7,60 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/recommender1_bet docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['billing-accounts_locations-insight-types-insights-get.md', 'Billing Accounts', 'Locations Insight Types Insights Get'] -- ['billing-accounts_locations-insight-types-insights-list.md', 'Billing Accounts', 'Locations Insight Types Insights List'] -- ['billing-accounts_locations-insight-types-insights-mark-accepted.md', 'Billing Accounts', 'Locations Insight Types Insights Mark Accepted'] -- ['billing-accounts_locations-recommenders-recommendations-get.md', 'Billing Accounts', 'Locations Recommenders Recommendations Get'] -- ['billing-accounts_locations-recommenders-recommendations-list.md', 'Billing Accounts', 'Locations Recommenders Recommendations List'] -- ['billing-accounts_locations-recommenders-recommendations-mark-claimed.md', 'Billing Accounts', 'Locations Recommenders Recommendations Mark Claimed'] -- ['billing-accounts_locations-recommenders-recommendations-mark-failed.md', 'Billing Accounts', 'Locations Recommenders Recommendations Mark Failed'] -- ['billing-accounts_locations-recommenders-recommendations-mark-succeeded.md', 'Billing Accounts', 'Locations Recommenders Recommendations Mark Succeeded'] -- ['folders_locations-insight-types-insights-get.md', 'Folders', 'Locations Insight Types Insights Get'] -- ['folders_locations-insight-types-insights-list.md', 'Folders', 'Locations Insight Types Insights List'] -- ['folders_locations-insight-types-insights-mark-accepted.md', 'Folders', 'Locations Insight Types Insights Mark Accepted'] -- ['folders_locations-recommenders-recommendations-get.md', 'Folders', 'Locations Recommenders Recommendations Get'] -- ['folders_locations-recommenders-recommendations-list.md', 'Folders', 'Locations Recommenders Recommendations List'] -- ['folders_locations-recommenders-recommendations-mark-claimed.md', 'Folders', 'Locations Recommenders Recommendations Mark Claimed'] -- ['folders_locations-recommenders-recommendations-mark-failed.md', 'Folders', 'Locations Recommenders Recommendations Mark Failed'] -- ['folders_locations-recommenders-recommendations-mark-succeeded.md', 'Folders', 'Locations Recommenders Recommendations Mark Succeeded'] -- ['organizations_locations-insight-types-config.md', 'Organizations', 'Locations Insight Types Config'] -- ['organizations_locations-insight-types-get-config.md', 'Organizations', 'Locations Insight Types Get Config'] -- ['organizations_locations-insight-types-insights-get.md', 'Organizations', 'Locations Insight Types Insights Get'] -- ['organizations_locations-insight-types-insights-list.md', 'Organizations', 'Locations Insight Types Insights List'] -- ['organizations_locations-insight-types-insights-mark-accepted.md', 'Organizations', 'Locations Insight Types Insights Mark Accepted'] -- ['organizations_locations-recommenders-config.md', 'Organizations', 'Locations Recommenders Config'] -- ['organizations_locations-recommenders-get-config.md', 'Organizations', 'Locations Recommenders Get Config'] -- ['organizations_locations-recommenders-recommendations-get.md', 'Organizations', 'Locations Recommenders Recommendations Get'] -- ['organizations_locations-recommenders-recommendations-list.md', 'Organizations', 'Locations Recommenders Recommendations List'] -- ['organizations_locations-recommenders-recommendations-mark-claimed.md', 'Organizations', 'Locations Recommenders Recommendations Mark Claimed'] -- ['organizations_locations-recommenders-recommendations-mark-failed.md', 'Organizations', 'Locations Recommenders Recommendations Mark Failed'] -- ['organizations_locations-recommenders-recommendations-mark-succeeded.md', 'Organizations', 'Locations Recommenders Recommendations Mark Succeeded'] -- ['projects_locations-insight-types-get-config.md', 'Projects', 'Locations Insight Types Get Config'] -- ['projects_locations-insight-types-insights-get.md', 'Projects', 'Locations Insight Types Insights Get'] -- ['projects_locations-insight-types-insights-list.md', 'Projects', 'Locations Insight Types Insights List'] -- ['projects_locations-insight-types-insights-mark-accepted.md', 'Projects', 'Locations Insight Types Insights Mark Accepted'] -- ['projects_locations-insight-types-update-config.md', 'Projects', 'Locations Insight Types Update Config'] -- ['projects_locations-recommenders-get-config.md', 'Projects', 'Locations Recommenders Get Config'] -- ['projects_locations-recommenders-recommendations-get.md', 'Projects', 'Locations Recommenders Recommendations Get'] -- ['projects_locations-recommenders-recommendations-list.md', 'Projects', 'Locations Recommenders Recommendations List'] -- ['projects_locations-recommenders-recommendations-mark-claimed.md', 'Projects', 'Locations Recommenders Recommendations Mark Claimed'] -- ['projects_locations-recommenders-recommendations-mark-failed.md', 'Projects', 'Locations Recommenders Recommendations Mark Failed'] -- ['projects_locations-recommenders-recommendations-mark-succeeded.md', 'Projects', 'Locations Recommenders Recommendations Mark Succeeded'] -- ['projects_locations-recommenders-update-config.md', 'Projects', 'Locations Recommenders Update Config'] +nav: +- Home: 'index.md' +- 'Billing Accounts': + - 'Locations Insight Types Get Config': 'billing-accounts_locations-insight-types-get-config.md' + - 'Locations Insight Types Insights Get': 'billing-accounts_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'billing-accounts_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'billing-accounts_locations-insight-types-insights-mark-accepted.md' + - 'Locations Insight Types Update Config': 'billing-accounts_locations-insight-types-update-config.md' + - 'Locations Recommenders Get Config': 'billing-accounts_locations-recommenders-get-config.md' + - 'Locations Recommenders Recommendations Get': 'billing-accounts_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'billing-accounts_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'billing-accounts_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'billing-accounts_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'billing-accounts_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'billing-accounts_locations-recommenders-recommendations-mark-succeeded.md' + - 'Locations Recommenders Update Config': 'billing-accounts_locations-recommenders-update-config.md' +- 'Folders': + - 'Locations Insight Types Insights Get': 'folders_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'folders_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'folders_locations-insight-types-insights-mark-accepted.md' + - 'Locations Recommenders Recommendations Get': 'folders_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'folders_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'folders_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'folders_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'folders_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'folders_locations-recommenders-recommendations-mark-succeeded.md' +- 'Organizations': + - 'Locations Insight Types Get Config': 'organizations_locations-insight-types-get-config.md' + - 'Locations Insight Types Insights Get': 'organizations_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'organizations_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'organizations_locations-insight-types-insights-mark-accepted.md' + - 'Locations Insight Types Update Config': 'organizations_locations-insight-types-update-config.md' + - 'Locations Recommenders Get Config': 'organizations_locations-recommenders-get-config.md' + - 'Locations Recommenders Recommendations Get': 'organizations_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'organizations_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'organizations_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'organizations_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'organizations_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'organizations_locations-recommenders-recommendations-mark-succeeded.md' + - 'Locations Recommenders Update Config': 'organizations_locations-recommenders-update-config.md' +- 'Projects': + - 'Locations Insight Types Get Config': 'projects_locations-insight-types-get-config.md' + - 'Locations Insight Types Insights Get': 'projects_locations-insight-types-insights-get.md' + - 'Locations Insight Types Insights List': 'projects_locations-insight-types-insights-list.md' + - 'Locations Insight Types Insights Mark Accepted': 'projects_locations-insight-types-insights-mark-accepted.md' + - 'Locations Insight Types Update Config': 'projects_locations-insight-types-update-config.md' + - 'Locations Recommenders Get Config': 'projects_locations-recommenders-get-config.md' + - 'Locations Recommenders Recommendations Get': 'projects_locations-recommenders-recommendations-get.md' + - 'Locations Recommenders Recommendations List': 'projects_locations-recommenders-recommendations-list.md' + - 'Locations Recommenders Recommendations Mark Claimed': 'projects_locations-recommenders-recommendations-mark-claimed.md' + - 'Locations Recommenders Recommendations Mark Dismissed': 'projects_locations-recommenders-recommendations-mark-dismissed.md' + - 'Locations Recommenders Recommendations Mark Failed': 'projects_locations-recommenders-recommendations-mark-failed.md' + - 'Locations Recommenders Recommendations Mark Succeeded': 'projects_locations-recommenders-recommendations-mark-succeeded.md' + - 'Locations Recommenders Update Config': 'projects_locations-recommenders-update-config.md' theme: readthedocs diff --git a/gen/recommender1_beta1-cli/src/client.rs b/gen/recommender1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/recommender1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/recommender1_beta1-cli/src/main.rs b/gen/recommender1_beta1-cli/src/main.rs index 025f90a5b8..d05f74740a 100644 --- a/gen/recommender1_beta1-cli/src/main.rs +++ b/gen/recommender1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_recommender1_beta1::{api, Error, oauth2}; +use google_recommender1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,58 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _billing_accounts_locations_insight_types_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_insight_types_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_insight_types_insights_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_accounts().locations_insight_types_insights_get(opt.value_of("name").unwrap_or("")); @@ -113,7 +164,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -251,6 +302,155 @@ where } } + async fn _billing_accounts_locations_insight_types_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1beta1InsightTypeConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.billing_accounts().locations_insight_types_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _billing_accounts_locations_recommenders_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.billing_accounts().locations_recommenders_get_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_recommenders_recommendations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_accounts().locations_recommenders_recommendations_get(opt.value_of("name").unwrap_or("")); @@ -313,7 +513,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -451,6 +651,91 @@ where } } + async fn _billing_accounts_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1beta1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.billing_accounts().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _billing_accounts_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -623,6 +908,103 @@ where } } + async fn _billing_accounts_locations_recommenders_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1beta1RecommenderConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.billing_accounts().locations_recommenders_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_locations_insight_types_insights_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.folders().locations_insight_types_insights_get(opt.value_of("name").unwrap_or("")); @@ -685,7 +1067,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -885,7 +1267,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1023,6 +1405,91 @@ where } } + async fn _folders_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1beta1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.folders().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _folders_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1195,103 +1662,6 @@ where } } - async fn _organizations_locations_insight_types_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) - -> Result<(), DoitError> { - - let mut field_cursor = FieldCursor::default(); - let mut object = json::value::Value::Object(Default::default()); - - for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let last_errc = err.issues.len(); - let (key, value) = parse_kv_arg(&*kvarg, err, false); - let mut temp_cursor = field_cursor.clone(); - if let Err(field_err) = temp_cursor.set(&*key) { - err.issues.push(field_err); - } - if value.is_none() { - field_cursor = temp_cursor.clone(); - if err.issues.len() > last_errc { - err.issues.remove(last_errc); - } - continue; - } - - let type_info: Option<(&'static str, JsonTypeInfo)> = - match &temp_cursor.to_string()[..] { - "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); - err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); - None - } - }; - if let Some((field_cursor_str, type_info)) = type_info { - FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); - } - } - let mut request: api::GoogleCloudRecommenderV1beta1InsightTypeConfig = json::value::from_value(object).unwrap(); - let mut call = self.hub.organizations().locations_insight_types_config(request, opt.value_of("name").unwrap_or("")); - for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - let (key, value) = parse_kv_arg(&*parg, err, false); - match key { - "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); - }, - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); - }, - _ => { - let mut found = false; - for param in &self.gp { - if key == *param { - found = true; - call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); - break; - } - } - if !found { - err.issues.push(CLIError::UnknownParameter(key.to_string(), - {let mut v = Vec::new(); - v.extend(self.gp.iter().map(|v|*v)); - v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); - v } )); - } - } - } - } - let protocol = CallType::Standard; - if dry_run { - Ok(()) - } else { - assert!(err.issues.len() == 0); - for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { - call = call.add_scope(scope); - } - let mut ostream = match writer_from_opts(opt.value_of("out")) { - Ok(mut f) => f, - Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), - }; - match match protocol { - CallType::Standard => call.doit().await, - _ => unreachable!() - } { - Err(api_err) => Err(DoitError::ApiError(api_err)), - Ok((mut response, output_schema)) => { - let mut value = json::value::to_value(&output_schema).expect("serde to work"); - remove_json_null_values(&mut value); - json::to_writer_pretty(&mut ostream, &value).unwrap(); - ostream.flush().unwrap(); - Ok(()) - } - } - } - } - async fn _organizations_locations_insight_types_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.organizations().locations_insight_types_get_config(opt.value_of("name").unwrap_or("")); @@ -1406,7 +1776,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1544,7 +1914,7 @@ where } } - async fn _organizations_locations_recommenders_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _organizations_locations_insight_types_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); @@ -1583,16 +1953,16 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::GoogleCloudRecommenderV1beta1RecommenderConfig = json::value::from_value(object).unwrap(); - let mut call = self.hub.organizations().locations_recommenders_config(request, opt.value_of("name").unwrap_or("")); + let mut request: api::GoogleCloudRecommenderV1beta1InsightTypeConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_insight_types_update_config(request, opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1755,7 +2125,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1893,6 +2263,91 @@ where } } + async fn _organizations_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1beta1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _organizations_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2065,6 +2520,103 @@ where } } + async fn _organizations_locations_recommenders_update_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "revision-id" => Some(("revisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "display-name", "etag", "name", "revision-id", "update-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1beta1RecommenderConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.organizations().locations_recommenders_update_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_insight_types_get_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_insight_types_get_config(opt.value_of("name").unwrap_or("")); @@ -2179,7 +2731,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2362,10 +2914,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2528,7 +3080,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2666,6 +3218,91 @@ where } } + async fn _projects_locations_recommenders_recommendations_mark_dismissed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRecommenderV1beta1MarkRecommendationDismissedRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_recommenders_recommendations_mark_dismissed(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_recommenders_recommendations_mark_failed(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -2883,10 +3520,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2942,6 +3579,9 @@ where match self.opt.subcommand() { ("billing-accounts", Some(opt)) => { match opt.subcommand() { + ("locations-insight-types-get-config", Some(opt)) => { + call_result = self._billing_accounts_locations_insight_types_get_config(opt, dry_run, &mut err).await; + }, ("locations-insight-types-insights-get", Some(opt)) => { call_result = self._billing_accounts_locations_insight_types_insights_get(opt, dry_run, &mut err).await; }, @@ -2951,6 +3591,12 @@ where ("locations-insight-types-insights-mark-accepted", Some(opt)) => { call_result = self._billing_accounts_locations_insight_types_insights_mark_accepted(opt, dry_run, &mut err).await; }, + ("locations-insight-types-update-config", Some(opt)) => { + call_result = self._billing_accounts_locations_insight_types_update_config(opt, dry_run, &mut err).await; + }, + ("locations-recommenders-get-config", Some(opt)) => { + call_result = self._billing_accounts_locations_recommenders_get_config(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-get", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_get(opt, dry_run, &mut err).await; }, @@ -2960,12 +3606,18 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._billing_accounts_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, ("locations-recommenders-recommendations-mark-succeeded", Some(opt)) => { call_result = self._billing_accounts_locations_recommenders_recommendations_mark_succeeded(opt, dry_run, &mut err).await; }, + ("locations-recommenders-update-config", Some(opt)) => { + call_result = self._billing_accounts_locations_recommenders_update_config(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("billing-accounts".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -2992,6 +3644,9 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._folders_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._folders_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._folders_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, @@ -3006,9 +3661,6 @@ where }, ("organizations", Some(opt)) => { match opt.subcommand() { - ("locations-insight-types-config", Some(opt)) => { - call_result = self._organizations_locations_insight_types_config(opt, dry_run, &mut err).await; - }, ("locations-insight-types-get-config", Some(opt)) => { call_result = self._organizations_locations_insight_types_get_config(opt, dry_run, &mut err).await; }, @@ -3021,8 +3673,8 @@ where ("locations-insight-types-insights-mark-accepted", Some(opt)) => { call_result = self._organizations_locations_insight_types_insights_mark_accepted(opt, dry_run, &mut err).await; }, - ("locations-recommenders-config", Some(opt)) => { - call_result = self._organizations_locations_recommenders_config(opt, dry_run, &mut err).await; + ("locations-insight-types-update-config", Some(opt)) => { + call_result = self._organizations_locations_insight_types_update_config(opt, dry_run, &mut err).await; }, ("locations-recommenders-get-config", Some(opt)) => { call_result = self._organizations_locations_recommenders_get_config(opt, dry_run, &mut err).await; @@ -3036,12 +3688,18 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._organizations_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._organizations_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._organizations_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, ("locations-recommenders-recommendations-mark-succeeded", Some(opt)) => { call_result = self._organizations_locations_recommenders_recommendations_mark_succeeded(opt, dry_run, &mut err).await; }, + ("locations-recommenders-update-config", Some(opt)) => { + call_result = self._organizations_locations_recommenders_update_config(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("organizations".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -3077,6 +3735,9 @@ where ("locations-recommenders-recommendations-mark-claimed", Some(opt)) => { call_result = self._projects_locations_recommenders_recommendations_mark_claimed(opt, dry_run, &mut err).await; }, + ("locations-recommenders-recommendations-mark-dismissed", Some(opt)) => { + call_result = self._projects_locations_recommenders_recommendations_mark_dismissed(opt, dry_run, &mut err).await; + }, ("locations-recommenders-recommendations-mark-failed", Some(opt)) => { call_result = self._projects_locations_recommenders_recommendations_mark_failed(opt, dry_run, &mut err).await; }, @@ -3165,7 +3826,29 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("billing-accounts", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ + ("billing-accounts", "methods: 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-insight-types-update-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed', 'locations-recommenders-recommendations-mark-succeeded' and 'locations-recommenders-update-config'", vec![ + ("locations-insight-types-get-config", + Some(r##"Gets the requested InsightTypeConfig. There is only one instance of the config for each InsightType."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/billing-accounts_locations-insight-types-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-insight-types-insights-get", Some(r##"Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/billing-accounts_locations-insight-types-insights-get", @@ -3232,6 +3915,56 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-insight-types-update-config", + Some(r##"Updates an InsightTypeConfig change. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/billing-accounts_locations-insight-types-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of insight type config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-get-config", + Some(r##"Gets the requested Recommender Config. There is only one instance of the config for each Recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/billing-accounts_locations-recommenders-get-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3304,6 +4037,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/billing-accounts_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3360,6 +4121,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-update-config", + Some(r##"Updates a Recommender Config. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/billing-accounts_locations-recommenders-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of recommender config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3368,7 +4157,7 @@ async fn main() { ]), ]), - ("folders", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ + ("folders", "methods: 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ ("locations-insight-types-insights-get", Some(r##"Gets the requested insight. Requires the recommender.*.get IAM permission for the specified insight type."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/folders_locations-insight-types-insights-get", @@ -3507,6 +4296,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/folders_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3571,42 +4388,14 @@ async fn main() { ]), ]), - ("organizations", "methods: 'locations-insight-types-config', 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-recommenders-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed' and 'locations-recommenders-recommendations-mark-succeeded'", vec![ - ("locations-insight-types-config", - Some(r##"Updates an InsightTypeConfig change. This will create a new revision of the config."##), - "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/organizations_locations-insight-types-config", - vec![ - (Some(r##"name"##), - None, - Some(r##"Name of insight type config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config"##), - Some(true), - Some(false)), - - (Some(r##"kv"##), - Some(r##"r"##), - Some(r##"Set various fields of the request structure, matching the key=value form"##), - Some(true), - Some(true)), - - (Some(r##"v"##), - Some(r##"p"##), - Some(r##"Set various optional parameters, matching the key=value form"##), - Some(false), - Some(true)), - - (Some(r##"out"##), - Some(r##"o"##), - Some(r##"Specify the file into which to write the program's output"##), - Some(false), - Some(false)), - ]), + ("organizations", "methods: 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-insight-types-update-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed', 'locations-recommenders-recommendations-mark-succeeded' and 'locations-recommenders-update-config'", vec![ ("locations-insight-types-get-config", Some(r##"Gets the requested InsightTypeConfig. There is only one instance of the config for each InsightType."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/organizations_locations-insight-types-get-config", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/global/recommenders/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/global/recommenders/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/global/recommenders/[INSIGHT_TYPE_ID]/config`"##), + Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config`"##), Some(true), Some(false)), @@ -3694,13 +4483,13 @@ async fn main() { Some(false), Some(false)), ]), - ("locations-recommenders-config", - Some(r##"Updates a Recommender Config. This will create a new revision of the config."##), - "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/organizations_locations-recommenders-config", + ("locations-insight-types-update-config", + Some(r##"Updates an InsightTypeConfig change. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/organizations_locations-insight-types-update-config", vec![ (Some(r##"name"##), None, - Some(r##"Name of recommender config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config"##), + Some(r##"Name of insight type config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config"##), Some(true), Some(false)), @@ -3728,7 +4517,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), + Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), Some(true), Some(false)), @@ -3810,6 +4599,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/organizations_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3866,6 +4683,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-update-config", + Some(r##"Updates a Recommender Config. This will create a new revision of the config."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/organizations_locations-recommenders-update-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of recommender config. Eg, projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3874,14 +4719,14 @@ async fn main() { ]), ]), - ("projects", "methods: 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-insight-types-update-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-failed', 'locations-recommenders-recommendations-mark-succeeded' and 'locations-recommenders-update-config'", vec![ + ("projects", "methods: 'locations-insight-types-get-config', 'locations-insight-types-insights-get', 'locations-insight-types-insights-list', 'locations-insight-types-insights-mark-accepted', 'locations-insight-types-update-config', 'locations-recommenders-get-config', 'locations-recommenders-recommendations-get', 'locations-recommenders-recommendations-list', 'locations-recommenders-recommendations-mark-claimed', 'locations-recommenders-recommendations-mark-dismissed', 'locations-recommenders-recommendations-mark-failed', 'locations-recommenders-recommendations-mark-succeeded' and 'locations-recommenders-update-config'", vec![ ("locations-insight-types-get-config", Some(r##"Gets the requested InsightTypeConfig. There is only one instance of the config for each InsightType."##), "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/projects_locations-insight-types-get-config", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/global/recommenders/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/global/recommenders/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/global/recommenders/[INSIGHT_TYPE_ID]/config`"##), + Some(r##"Required. Name of the InsightTypeConfig to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/insightTypes/[INSIGHT_TYPE_ID]/config`"##), Some(true), Some(false)), @@ -4003,7 +4848,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), + Some(r##"Required. Name of the Recommendation Config to get. Acceptable formats: * `projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `projects/[PROJECT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `organizations/[ORGANIZATION_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config` * `billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/config`"##), Some(true), Some(false)), @@ -4085,6 +4930,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-recommenders-recommendations-mark-dismissed", + Some(r##"Mark the Recommendation State as Dismissed. Users can use this method to indicate to the Recommender API that an ACTIVE recommendation has to be marked back as DISMISSED. MarkRecommendationDismissed can be applied to recommendations in ACTIVE state. Requires the recommender.*.update IAM permission for the specified recommender."##), + "Details at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli/projects_locations-recommenders-recommendations-mark-dismissed", + vec![ + (Some(r##"name"##), + None, + Some(r##"Name of the recommendation."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4181,7 +5054,7 @@ async fn main() { let mut app = App::new("recommender1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230108") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_recommender1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/recommender1_beta1/Cargo.toml b/gen/recommender1_beta1/Cargo.toml index 6def6e496d..075caef384 100644 --- a/gen/recommender1_beta1/Cargo.toml +++ b/gen/recommender1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-recommender1_beta1" -version = "5.0.2-beta-1+20230108" +version = "5.0.2+20230108" authors = ["Sebastian Thiel "] description = "A complete library to interact with Recommender (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/recommender1_beta1" homepage = "https://cloud.google.com/recommender/docs/" -documentation = "https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108" +documentation = "https://docs.rs/google-recommender1_beta1/5.0.2+20230108" license = "MIT" keywords = ["recommender", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/recommender1_beta1/README.md b/gen/recommender1_beta1/README.md index 72795d1647..c93df14f6e 100644 --- a/gen/recommender1_beta1/README.md +++ b/gen/recommender1_beta1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-recommender1_beta1` library allows access to all features of the *Google Recommender* service. -This documentation was generated from *Recommender* crate version *5.0.2-beta-1+20230108*, where *20230108* is the exact revision of the *recommender:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Recommender* crate version *5.0.2+20230108*, where *20230108* is the exact revision of the *recommender:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Recommender* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/recommender/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/Recommender) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/Recommender) ... * billing accounts - * [*locations insight types get config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderUpdateConfigCall) + * [*locations insight types get config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::BillingAccountLocationRecommenderUpdateConfigCall) * folders - * [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationInsightTypeInsightMarkAcceptedCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkFailedCall) and [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkSucceededCall) + * [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationInsightTypeInsightMarkAcceptedCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkFailedCall) and [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::FolderLocationRecommenderRecommendationMarkSucceededCall) * organizations - * [*locations insight types get config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderUpdateConfigCall) + * [*locations insight types get config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::OrganizationLocationRecommenderUpdateConfigCall) * projects - * [*locations insight types get config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderUpdateConfigCall) + * [*locations insight types get config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeGetConfigCall), [*locations insight types insights get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeInsightGetCall), [*locations insight types insights list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeInsightListCall), [*locations insight types insights mark accepted*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeInsightMarkAcceptedCall), [*locations insight types update config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationInsightTypeUpdateConfigCall), [*locations recommenders get config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderGetConfigCall), [*locations recommenders recommendations get*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationGetCall), [*locations recommenders recommendations list*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationListCall), [*locations recommenders recommendations mark claimed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkClaimedCall), [*locations recommenders recommendations mark dismissed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkDismissedCall), [*locations recommenders recommendations mark failed*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkFailedCall), [*locations recommenders recommendations mark succeeded*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderRecommendationMarkSucceededCall) and [*locations recommenders update config*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/api::ProjectLocationRecommenderUpdateConfigCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/Recommender)** +* **[Hub](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/Recommender)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -149,17 +149,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -169,29 +169,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-recommender1_beta1/5.0.2-beta-1+20230108/google_recommender1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-recommender1_beta1/5.0.2+20230108/google_recommender1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/recommender1_beta1/src/api.rs b/gen/recommender1_beta1/src/api.rs index b9966692b0..e67d42cbe8 100644 --- a/gen/recommender1_beta1/src/api.rs +++ b/gen/recommender1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Recommender { Recommender { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://recommender.googleapis.com/".to_string(), _root_url: "https://recommender.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> Recommender { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/recommender1_beta1/src/client.rs b/gen/recommender1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/recommender1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/recommender1_beta1/src/lib.rs b/gen/recommender1_beta1/src/lib.rs index 75a8ace82a..fb273206a4 100644 --- a/gen/recommender1_beta1/src/lib.rs +++ b/gen/recommender1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Recommender* crate version *5.0.2-beta-1+20230108*, where *20230108* is the exact revision of the *recommender:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Recommender* crate version *5.0.2+20230108*, where *20230108* is the exact revision of the *recommender:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Recommender* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/recommender/docs/). diff --git a/gen/redis1-cli/Cargo.toml b/gen/redis1-cli/Cargo.toml index 84c60b7f4a..edc1f6eda6 100644 --- a/gen/redis1-cli/Cargo.toml +++ b/gen/redis1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-redis1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Redis (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/redis1-cli" @@ -20,13 +20,13 @@ name = "redis1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-redis1] path = "../redis1" -version = "4.0.1+20220301" +version = "5.0.2+20230111" + diff --git a/gen/redis1-cli/README.md b/gen/redis1-cli/README.md index 278f5fc3df..4f3eea0997 100644 --- a/gen/redis1-cli/README.md +++ b/gen/redis1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Redis* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Redis* API at revision *20230111*. The CLI is at version *5.0.2*. ```bash redis1 [options] diff --git a/gen/redis1-cli/mkdocs.yml b/gen/redis1-cli/mkdocs.yml index c10afd4613..88a79ee160 100644 --- a/gen/redis1-cli/mkdocs.yml +++ b/gen/redis1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Redis v4.0.1+20220301 +site_name: Cloud Redis v5.0.2+20230111 site_url: http://byron.github.io/google-apis-rs/google-redis1-cli site_description: A complete library to interact with Cloud Redis (protocol v1) @@ -7,25 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/redis1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-instances-create.md', 'Projects', 'Locations Instances Create'] -- ['projects_locations-instances-delete.md', 'Projects', 'Locations Instances Delete'] -- ['projects_locations-instances-export.md', 'Projects', 'Locations Instances Export'] -- ['projects_locations-instances-failover.md', 'Projects', 'Locations Instances Failover'] -- ['projects_locations-instances-get.md', 'Projects', 'Locations Instances Get'] -- ['projects_locations-instances-get-auth-string.md', 'Projects', 'Locations Instances Get Auth String'] -- ['projects_locations-instances-import.md', 'Projects', 'Locations Instances Import'] -- ['projects_locations-instances-list.md', 'Projects', 'Locations Instances List'] -- ['projects_locations-instances-patch.md', 'Projects', 'Locations Instances Patch'] -- ['projects_locations-instances-reschedule-maintenance.md', 'Projects', 'Locations Instances Reschedule Maintenance'] -- ['projects_locations-instances-upgrade.md', 'Projects', 'Locations Instances Upgrade'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Instances Create': 'projects_locations-instances-create.md' + - 'Locations Instances Delete': 'projects_locations-instances-delete.md' + - 'Locations Instances Export': 'projects_locations-instances-export.md' + - 'Locations Instances Failover': 'projects_locations-instances-failover.md' + - 'Locations Instances Get': 'projects_locations-instances-get.md' + - 'Locations Instances Get Auth String': 'projects_locations-instances-get-auth-string.md' + - 'Locations Instances Import': 'projects_locations-instances-import.md' + - 'Locations Instances List': 'projects_locations-instances-list.md' + - 'Locations Instances Patch': 'projects_locations-instances-patch.md' + - 'Locations Instances Reschedule Maintenance': 'projects_locations-instances-reschedule-maintenance.md' + - 'Locations Instances Upgrade': 'projects_locations-instances-upgrade.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' theme: readthedocs diff --git a/gen/redis1-cli/src/client.rs b/gen/redis1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/redis1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/redis1-cli/src/main.rs b/gen/redis1-cli/src/main.rs index ba4478ae62..414a2a8763 100644 --- a/gen/redis1-cli/src/main.rs +++ b/gen/redis1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_redis1::{api, Error, oauth2}; +use google_redis1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -132,6 +131,7 @@ where "connect-mode" => Some(("connectMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "current-location-id" => Some(("currentLocationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "customer-managed-key" => Some(("customerManagedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -161,10 +161,11 @@ where "secondary-ip-range" => Some(("secondaryIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suspension-reasons" => Some(("suspensionReasons", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transit-encryption-mode" => Some(("transitEncryptionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternative-location-id", "auth-enabled", "authorized-network", "can-reschedule", "connect-mode", "create-time", "current-location-id", "description", "display-name", "end-time", "host", "labels", "location-id", "maintenance-policy", "maintenance-schedule", "memory-size-gb", "name", "persistence-config", "persistence-iam-identity", "persistence-mode", "port", "rdb-next-snapshot-time", "rdb-snapshot-period", "rdb-snapshot-start-time", "read-endpoint", "read-endpoint-port", "read-replicas-mode", "redis-configs", "redis-version", "replica-count", "reserved-ip-range", "schedule-deadline-time", "secondary-ip-range", "start-time", "state", "status-message", "tier", "transit-encryption-mode", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternative-location-id", "auth-enabled", "authorized-network", "can-reschedule", "connect-mode", "create-time", "current-location-id", "customer-managed-key", "description", "display-name", "end-time", "host", "labels", "location-id", "maintenance-policy", "maintenance-schedule", "memory-size-gb", "name", "persistence-config", "persistence-iam-identity", "persistence-mode", "port", "rdb-next-snapshot-time", "rdb-snapshot-period", "rdb-snapshot-start-time", "read-endpoint", "read-endpoint-port", "read-replicas-mode", "redis-configs", "redis-version", "replica-count", "reserved-ip-range", "schedule-deadline-time", "secondary-ip-range", "start-time", "state", "status-message", "suspension-reasons", "tier", "transit-encryption-mode", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -649,7 +650,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -727,6 +728,7 @@ where "connect-mode" => Some(("connectMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "current-location-id" => Some(("currentLocationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "customer-managed-key" => Some(("customerManagedKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -756,10 +758,11 @@ where "secondary-ip-range" => Some(("secondaryIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status-message" => Some(("statusMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suspension-reasons" => Some(("suspensionReasons", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "tier" => Some(("tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transit-encryption-mode" => Some(("transitEncryptionMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alternative-location-id", "auth-enabled", "authorized-network", "can-reschedule", "connect-mode", "create-time", "current-location-id", "description", "display-name", "end-time", "host", "labels", "location-id", "maintenance-policy", "maintenance-schedule", "memory-size-gb", "name", "persistence-config", "persistence-iam-identity", "persistence-mode", "port", "rdb-next-snapshot-time", "rdb-snapshot-period", "rdb-snapshot-start-time", "read-endpoint", "read-endpoint-port", "read-replicas-mode", "redis-configs", "redis-version", "replica-count", "reserved-ip-range", "schedule-deadline-time", "secondary-ip-range", "start-time", "state", "status-message", "tier", "transit-encryption-mode", "update-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alternative-location-id", "auth-enabled", "authorized-network", "can-reschedule", "connect-mode", "create-time", "current-location-id", "customer-managed-key", "description", "display-name", "end-time", "host", "labels", "location-id", "maintenance-policy", "maintenance-schedule", "memory-size-gb", "name", "persistence-config", "persistence-iam-identity", "persistence-mode", "port", "rdb-next-snapshot-time", "rdb-snapshot-period", "rdb-snapshot-start-time", "read-endpoint", "read-endpoint-port", "read-replicas-mode", "redis-configs", "redis-version", "replica-count", "reserved-ip-range", "schedule-deadline-time", "secondary-ip-range", "start-time", "state", "status-message", "suspension-reasons", "tier", "transit-encryption-mode", "update-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -774,7 +777,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1004,7 +1007,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1222,7 +1225,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1834,7 +1837,7 @@ async fn main() { let mut app = App::new("redis1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230111") .about("Creates and manages Redis instances on the Google Cloud Platform.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_redis1_cli") .arg(Arg::with_name("url") diff --git a/gen/redis1/Cargo.toml b/gen/redis1/Cargo.toml index 98efc97d57..9693a6433f 100644 --- a/gen/redis1/Cargo.toml +++ b/gen/redis1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-redis1" -version = "5.0.2-beta-1+20230111" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Redis (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/redis1" homepage = "https://cloud.google.com/memorystore/docs/redis/" -documentation = "https://docs.rs/google-redis1/5.0.2-beta-1+20230111" +documentation = "https://docs.rs/google-redis1/5.0.2+20230111" license = "MIT" keywords = ["redis", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/redis1/README.md b/gen/redis1/README.md index 8ef46c3d65..f2549b7742 100644 --- a/gen/redis1/README.md +++ b/gen/redis1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-redis1` library allows access to all features of the *Google Cloud Redis* service. -This documentation was generated from *Cloud Redis* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *redis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Redis* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *redis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Redis* *v1* API can be found at the [official documentation site](https://cloud.google.com/memorystore/docs/redis/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/CloudRedis) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/CloudRedis) ... * projects - * [*locations get*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceDeleteCall), [*locations instances export*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceExportCall), [*locations instances failover*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceFailoverCall), [*locations instances get*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceGetCall), [*locations instances get auth string*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceGetAuthStringCall), [*locations instances import*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceImportCall), [*locations instances list*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstancePatchCall), [*locations instances reschedule maintenance*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceRescheduleMaintenanceCall), [*locations instances upgrade*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationInstanceUpgradeCall), [*locations list*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/api::ProjectLocationOperationListCall) + * [*locations get*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationGetCall), [*locations instances create*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceCreateCall), [*locations instances delete*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceDeleteCall), [*locations instances export*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceExportCall), [*locations instances failover*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceFailoverCall), [*locations instances get*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceGetCall), [*locations instances get auth string*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceGetAuthStringCall), [*locations instances import*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceImportCall), [*locations instances list*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceListCall), [*locations instances patch*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstancePatchCall), [*locations instances reschedule maintenance*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceRescheduleMaintenanceCall), [*locations instances upgrade*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationInstanceUpgradeCall), [*locations list*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/api::ProjectLocationOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/CloudRedis)** +* **[Hub](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/CloudRedis)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::CallBuilder) -* **[Resources](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::CallBuilder) +* **[Resources](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Part)** + * **[Parts](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Delegate) to the -[Method Builder](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Delegate) to the +[Method Builder](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::RequestValue) and -[decodable](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::RequestValue) and +[decodable](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-redis1/5.0.2-beta-1+20230111/google_redis1/client::RequestValue) are moved +* [request values](https://docs.rs/google-redis1/5.0.2+20230111/google_redis1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/redis1/src/api.rs b/gen/redis1/src/api.rs index 3fc409c3ba..ea4a61e98f 100644 --- a/gen/redis1/src/api.rs +++ b/gen/redis1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudRedis { CloudRedis { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://redis.googleapis.com/".to_string(), _root_url: "https://redis.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudRedis { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/redis1/src/client.rs b/gen/redis1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/redis1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/redis1/src/lib.rs b/gen/redis1/src/lib.rs index 648e3bd1b2..4b72255b0d 100644 --- a/gen/redis1/src/lib.rs +++ b/gen/redis1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Redis* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *redis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Redis* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *redis:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Redis* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/memorystore/docs/redis/). diff --git a/gen/remotebuildexecution2-cli/Cargo.toml b/gen/remotebuildexecution2-cli/Cargo.toml index 93089fc8f1..ad8a3be9b9 100644 --- a/gen/remotebuildexecution2-cli/Cargo.toml +++ b/gen/remotebuildexecution2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-remotebuildexecution2-cli" -version = "4.0.1+20210329" +version = "5.0.2+20210329" authors = ["Sebastian Thiel "] description = "A complete library to interact with Remote Build Execution (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/remotebuildexecution2-cli" @@ -20,13 +20,13 @@ name = "remotebuildexecution2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-remotebuildexecution2] path = "../remotebuildexecution2" -version = "4.0.1+20210329" +version = "5.0.2+20210329" + diff --git a/gen/remotebuildexecution2-cli/README.md b/gen/remotebuildexecution2-cli/README.md index 1f765ee174..dded229576 100644 --- a/gen/remotebuildexecution2-cli/README.md +++ b/gen/remotebuildexecution2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Remote Build Execution* API at revision *20210329*. The CLI is at version *4.0.1*. +This documentation was generated from the *Remote Build Execution* API at revision *20210329*. The CLI is at version *5.0.2*. ```bash remotebuildexecution2 [options] diff --git a/gen/remotebuildexecution2-cli/mkdocs.yml b/gen/remotebuildexecution2-cli/mkdocs.yml index 3d16d7a3e1..0d42dae0e8 100644 --- a/gen/remotebuildexecution2-cli/mkdocs.yml +++ b/gen/remotebuildexecution2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Remote Build Execution v4.0.1+20210329 +site_name: Remote Build Execution v5.0.2+20210329 site_url: http://byron.github.io/google-apis-rs/google-remotebuildexecution2-cli site_description: A complete library to interact with Remote Build Execution (protocol v2) @@ -7,17 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/remotebuildexecu docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['action-results_get.md', 'Action Results', 'Get'] -- ['action-results_update.md', 'Action Results', 'Update'] -- ['actions_execute.md', 'Actions', 'Execute'] -- ['blobs_batch-read.md', 'Blobs', 'Batch Read'] -- ['blobs_batch-update.md', 'Blobs', 'Batch Update'] -- ['blobs_find-missing.md', 'Blobs', 'Find Missing'] -- ['blobs_get-tree.md', 'Blobs', 'Get Tree'] -- ['methods_get-capabilities.md', 'Methods', 'Get Capabilities'] -- ['operations_wait-execution.md', 'Operations', 'Wait Execution'] +nav: +- Home: 'index.md' +- 'Action Results': + - 'Get': 'action-results_get.md' + - 'Update': 'action-results_update.md' +- 'Actions': + - 'Execute': 'actions_execute.md' +- 'Blobs': + - 'Batch Read': 'blobs_batch-read.md' + - 'Batch Update': 'blobs_batch-update.md' + - 'Find Missing': 'blobs_find-missing.md' + - 'Get Tree': 'blobs_get-tree.md' +- 'Methods': + - 'Get Capabilities': 'methods_get-capabilities.md' +- 'Operations': + - 'Wait Execution': 'operations_wait-execution.md' theme: readthedocs diff --git a/gen/remotebuildexecution2-cli/src/client.rs b/gen/remotebuildexecution2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/remotebuildexecution2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/remotebuildexecution2-cli/src/main.rs b/gen/remotebuildexecution2-cli/src/main.rs index 0e52b7c38f..5e9a03daa6 100644 --- a/gen/remotebuildexecution2-cli/src/main.rs +++ b/gen/remotebuildexecution2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_remotebuildexecution2::{api, Error, oauth2}; +use google_remotebuildexecution2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,10 +57,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "inline-stdout" => { - call = call.inline_stdout(arg_from_str(value.unwrap_or("false"), err, "inline-stdout", "boolean")); + call = call.inline_stdout( value.map(|v| arg_from_str(v, err, "inline-stdout", "boolean")).unwrap_or(false)); }, "inline-stderr" => { - call = call.inline_stderr(arg_from_str(value.unwrap_or("false"), err, "inline-stderr", "boolean")); + call = call.inline_stderr( value.map(|v| arg_from_str(v, err, "inline-stderr", "boolean")).unwrap_or(false)); }, "inline-output-files" => { call = call.add_inline_output_files(value.unwrap_or("")); @@ -169,7 +168,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "results-cache-policy-priority" => { - call = call.results_cache_policy_priority(arg_from_str(value.unwrap_or("-0"), err, "results-cache-policy-priority", "integer")); + call = call.results_cache_policy_priority( value.map(|v| arg_from_str(v, err, "results-cache-policy-priority", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -569,7 +568,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1188,7 +1187,7 @@ async fn main() { let mut app = App::new("remotebuildexecution2") .author("Sebastian Thiel ") - .version("4.0.1+20210329") + .version("5.0.2+20210329") .about("Supplies a Remote Execution API service for tools such as bazel.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_remotebuildexecution2_cli") .arg(Arg::with_name("url") diff --git a/gen/remotebuildexecution2/Cargo.toml b/gen/remotebuildexecution2/Cargo.toml index 9221f81b77..4e3f32d17b 100644 --- a/gen/remotebuildexecution2/Cargo.toml +++ b/gen/remotebuildexecution2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-remotebuildexecution2" -version = "5.0.2-beta-1+20210329" +version = "5.0.2+20210329" authors = ["Sebastian Thiel "] description = "A complete library to interact with Remote Build Execution (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/remotebuildexecution2" homepage = "https://cloud.google.com/remote-build-execution/docs/" -documentation = "https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329" +documentation = "https://docs.rs/google-remotebuildexecution2/5.0.2+20210329" license = "MIT" keywords = ["remotebuildexecution", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/remotebuildexecution2/README.md b/gen/remotebuildexecution2/README.md index c67687d4b3..3c565673f8 100644 --- a/gen/remotebuildexecution2/README.md +++ b/gen/remotebuildexecution2/README.md @@ -5,26 +5,26 @@ DO NOT EDIT ! --> The `google-remotebuildexecution2` library allows access to all features of the *Google Remote Build Execution* service. -This documentation was generated from *Remote Build Execution* crate version *5.0.2-beta-1+20210329*, where *20210329* is the exact revision of the *remotebuildexecution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Remote Build Execution* crate version *5.0.2+20210329*, where *20210329* is the exact revision of the *remotebuildexecution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Remote Build Execution* *v2* API can be found at the [official documentation site](https://cloud.google.com/remote-build-execution/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/RemoteBuildExecution) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/RemoteBuildExecution) ... * action results - * [*get*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::ActionResultGetCall) and [*update*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::ActionResultUpdateCall) + * [*get*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::ActionResultGetCall) and [*update*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::ActionResultUpdateCall) * actions - * [*execute*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::ActionExecuteCall) + * [*execute*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::ActionExecuteCall) * blobs - * [*batch read*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::BlobBatchReadCall), [*batch update*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::BlobBatchUpdateCall), [*find missing*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::BlobFindMissingCall) and [*get tree*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::BlobGetTreeCall) + * [*batch read*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::BlobBatchReadCall), [*batch update*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::BlobBatchUpdateCall), [*find missing*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::BlobFindMissingCall) and [*get tree*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::BlobGetTreeCall) * operations - * [*wait execution*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::OperationWaitExecutionCall) + * [*wait execution*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::OperationWaitExecutionCall) Other activities are ... -* [get capabilities](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/api::MethodGetCapabilityCall) +* [get capabilities](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/api::MethodGetCapabilityCall) @@ -32,17 +32,17 @@ Other activities are ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/RemoteBuildExecution)** +* **[Hub](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/RemoteBuildExecution)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::CallBuilder) -* **[Resources](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::CallBuilder) +* **[Resources](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Part)** + * **[Parts](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Delegate) to the -[Method Builder](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Delegate) to the +[Method Builder](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::RequestValue) and -[decodable](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::RequestValue) and +[decodable](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-remotebuildexecution2/5.0.2-beta-1+20210329/google_remotebuildexecution2/client::RequestValue) are moved +* [request values](https://docs.rs/google-remotebuildexecution2/5.0.2+20210329/google_remotebuildexecution2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/remotebuildexecution2/src/api.rs b/gen/remotebuildexecution2/src/api.rs index 5a2be00e9d..eb220cd5d5 100644 --- a/gen/remotebuildexecution2/src/api.rs +++ b/gen/remotebuildexecution2/src/api.rs @@ -122,7 +122,7 @@ impl<'a, S> RemoteBuildExecution { RemoteBuildExecution { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://remotebuildexecution.googleapis.com/".to_string(), _root_url: "https://remotebuildexecution.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> RemoteBuildExecution { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/remotebuildexecution2/src/client.rs b/gen/remotebuildexecution2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/remotebuildexecution2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/remotebuildexecution2/src/lib.rs b/gen/remotebuildexecution2/src/lib.rs index dd1326f9ee..dcfe1a171a 100644 --- a/gen/remotebuildexecution2/src/lib.rs +++ b/gen/remotebuildexecution2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Remote Build Execution* crate version *5.0.2-beta-1+20210329*, where *20210329* is the exact revision of the *remotebuildexecution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Remote Build Execution* crate version *5.0.2+20210329*, where *20210329* is the exact revision of the *remotebuildexecution:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Remote Build Execution* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/remote-build-execution/docs/). diff --git a/gen/replicapool1_beta2-cli/Cargo.toml b/gen/replicapool1_beta2-cli/Cargo.toml index 2ad289b58a..f830aa3dfc 100644 --- a/gen/replicapool1_beta2-cli/Cargo.toml +++ b/gen/replicapool1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-replicapool1_beta2-cli" -version = "4.0.1+20160512" +version = "5.0.2+20160512" authors = ["Sebastian Thiel "] description = "A complete library to interact with replicapool (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/replicapool1_beta2-cli" @@ -20,13 +20,13 @@ name = "replicapool1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-replicapool1_beta2] path = "../replicapool1_beta2" -version = "4.0.1+20160512" +version = "5.0.2+20160512" + diff --git a/gen/replicapool1_beta2-cli/README.md b/gen/replicapool1_beta2-cli/README.md index 1b0eaccdd6..6456b6416b 100644 --- a/gen/replicapool1_beta2-cli/README.md +++ b/gen/replicapool1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *replicapool* API at revision *20160512*. The CLI is at version *4.0.1*. +This documentation was generated from the *replicapool* API at revision *20160512*. The CLI is at version *5.0.2*. ```bash replicapool1-beta2 [options] diff --git a/gen/replicapool1_beta2-cli/mkdocs.yml b/gen/replicapool1_beta2-cli/mkdocs.yml index 0bf0b61468..b089250b7f 100644 --- a/gen/replicapool1_beta2-cli/mkdocs.yml +++ b/gen/replicapool1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: replicapool v4.0.1+20160512 +site_name: replicapool v5.0.2+20160512 site_url: http://byron.github.io/google-apis-rs/google-replicapool1_beta2-cli site_description: A complete library to interact with replicapool (protocol v1beta2) @@ -7,20 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/replicapool1_bet docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['instance-group-managers_abandon-instances.md', 'Instance Group Managers', 'Abandon Instances'] -- ['instance-group-managers_delete.md', 'Instance Group Managers', 'Delete'] -- ['instance-group-managers_delete-instances.md', 'Instance Group Managers', 'Delete Instances'] -- ['instance-group-managers_get.md', 'Instance Group Managers', 'Get'] -- ['instance-group-managers_insert.md', 'Instance Group Managers', 'Insert'] -- ['instance-group-managers_list.md', 'Instance Group Managers', 'List'] -- ['instance-group-managers_recreate-instances.md', 'Instance Group Managers', 'Recreate Instances'] -- ['instance-group-managers_resize.md', 'Instance Group Managers', 'Resize'] -- ['instance-group-managers_set-instance-template.md', 'Instance Group Managers', 'Set Instance Template'] -- ['instance-group-managers_set-target-pools.md', 'Instance Group Managers', 'Set Target Pools'] -- ['zone-operations_get.md', 'Zone Operations', 'Get'] -- ['zone-operations_list.md', 'Zone Operations', 'List'] +nav: +- Home: 'index.md' +- 'Instance Group Managers': + - 'Abandon Instances': 'instance-group-managers_abandon-instances.md' + - 'Delete': 'instance-group-managers_delete.md' + - 'Delete Instances': 'instance-group-managers_delete-instances.md' + - 'Get': 'instance-group-managers_get.md' + - 'Insert': 'instance-group-managers_insert.md' + - 'List': 'instance-group-managers_list.md' + - 'Recreate Instances': 'instance-group-managers_recreate-instances.md' + - 'Resize': 'instance-group-managers_resize.md' + - 'Set Instance Template': 'instance-group-managers_set-instance-template.md' + - 'Set Target Pools': 'instance-group-managers_set-target-pools.md' +- 'Zone Operations': + - 'Get': 'zone-operations_get.md' + - 'List': 'zone-operations_list.md' theme: readthedocs diff --git a/gen/replicapool1_beta2-cli/src/client.rs b/gen/replicapool1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/replicapool1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/replicapool1_beta2-cli/src/main.rs b/gen/replicapool1_beta2-cli/src/main.rs index ccd6eb331c..e19a7a63c1 100644 --- a/gen/replicapool1_beta2-cli/src/main.rs +++ b/gen/replicapool1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_replicapool1_beta2::{api, Error, oauth2}; +use google_replicapool1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -433,7 +432,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -856,7 +855,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1483,7 +1482,7 @@ async fn main() { let mut app = App::new("replicapool1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20160512") + .version("5.0.2+20160512") .about("[Deprecated. Please use Instance Group Manager in Compute API] Provides groups of homogenous Compute Engine instances.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_replicapool1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/replicapool1_beta2/Cargo.toml b/gen/replicapool1_beta2/Cargo.toml index d317a9f981..028d6ac73f 100644 --- a/gen/replicapool1_beta2/Cargo.toml +++ b/gen/replicapool1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-replicapool1_beta2" -version = "5.0.2-beta-1+20160512" +version = "5.0.2+20160512" authors = ["Sebastian Thiel "] description = "A complete library to interact with replicapool (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/replicapool1_beta2" homepage = "https://developers.google.com/compute/docs/instance-groups/manager/v1beta2" -documentation = "https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512" +documentation = "https://docs.rs/google-replicapool1_beta2/5.0.2+20160512" license = "MIT" keywords = ["replicapool", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/replicapool1_beta2/README.md b/gen/replicapool1_beta2/README.md index 55bf210bf9..4a7bfb1cff 100644 --- a/gen/replicapool1_beta2/README.md +++ b/gen/replicapool1_beta2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-replicapool1_beta2` library allows access to all features of the *Google replicapool* service. -This documentation was generated from *replicapool* crate version *5.0.2-beta-1+20160512*, where *20160512* is the exact revision of the *replicapool:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *replicapool* crate version *5.0.2+20160512*, where *20160512* is the exact revision of the *replicapool:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *replicapool* *v1_beta2* API can be found at the [official documentation site](https://developers.google.com/compute/docs/instance-groups/manager/v1beta2). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/Replicapool) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/Replicapool) ... -* [instance group managers](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManager) - * [*abandon instances*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerAbandonInstanceCall), [*delete*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerDeleteCall), [*delete instances*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerDeleteInstanceCall), [*get*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerGetCall), [*insert*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerInsertCall), [*list*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerListCall), [*recreate instances*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerRecreateInstanceCall), [*resize*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerResizeCall), [*set instance template*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerSetInstanceTemplateCall) and [*set target pools*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::InstanceGroupManagerSetTargetPoolCall) +* [instance group managers](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManager) + * [*abandon instances*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerAbandonInstanceCall), [*delete*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerDeleteCall), [*delete instances*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerDeleteInstanceCall), [*get*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerGetCall), [*insert*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerInsertCall), [*list*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerListCall), [*recreate instances*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerRecreateInstanceCall), [*resize*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerResizeCall), [*set instance template*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerSetInstanceTemplateCall) and [*set target pools*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::InstanceGroupManagerSetTargetPoolCall) * zone operations - * [*get*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/api::ZoneOperationListCall) + * [*get*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/api::ZoneOperationListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/Replicapool)** +* **[Hub](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/Replicapool)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-replicapool1_beta2/5.0.2-beta-1+20160512/google_replicapool1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-replicapool1_beta2/5.0.2+20160512/google_replicapool1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/replicapool1_beta2/src/api.rs b/gen/replicapool1_beta2/src/api.rs index 9ee0a1f83d..880c3872bc 100644 --- a/gen/replicapool1_beta2/src/api.rs +++ b/gen/replicapool1_beta2/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> Replicapool { Replicapool { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/replicapool/v1beta2/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -148,7 +148,7 @@ impl<'a, S> Replicapool { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/replicapool1_beta2/src/client.rs b/gen/replicapool1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/replicapool1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/replicapool1_beta2/src/lib.rs b/gen/replicapool1_beta2/src/lib.rs index a2a77d97ed..525ee3f119 100644 --- a/gen/replicapool1_beta2/src/lib.rs +++ b/gen/replicapool1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *replicapool* crate version *5.0.2-beta-1+20160512*, where *20160512* is the exact revision of the *replicapool:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *replicapool* crate version *5.0.2+20160512*, where *20160512* is the exact revision of the *replicapool:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *replicapool* *v1_beta2* API can be found at the //! [official documentation site](https://developers.google.com/compute/docs/instance-groups/manager/v1beta2). diff --git a/gen/replicapoolupdater1_beta1-cli/Cargo.toml b/gen/replicapoolupdater1_beta1-cli/Cargo.toml index 5bd1e17bcc..e26eeb25c9 100644 --- a/gen/replicapoolupdater1_beta1-cli/Cargo.toml +++ b/gen/replicapoolupdater1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-replicapoolupdater1_beta1-cli" -version = "4.0.1+20161003" +version = "5.0.2+20161003" authors = ["Sebastian Thiel "] description = "A complete library to interact with replicapoolupdater (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/replicapoolupdater1_beta1-cli" @@ -20,13 +20,13 @@ name = "replicapoolupdater1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-replicapoolupdater1_beta1] path = "../replicapoolupdater1_beta1" -version = "4.0.1+20161003" +version = "5.0.2+20161003" + diff --git a/gen/replicapoolupdater1_beta1-cli/README.md b/gen/replicapoolupdater1_beta1-cli/README.md index 81eda8a785..ed0e129efa 100644 --- a/gen/replicapoolupdater1_beta1-cli/README.md +++ b/gen/replicapoolupdater1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *replicapoolupdater* API at revision *20161003*. The CLI is at version *4.0.1*. +This documentation was generated from the *replicapoolupdater* API at revision *20161003*. The CLI is at version *5.0.2*. ```bash replicapoolupdater1-beta1 [options] diff --git a/gen/replicapoolupdater1_beta1-cli/mkdocs.yml b/gen/replicapoolupdater1_beta1-cli/mkdocs.yml index 47b8e5db25..054be2ff63 100644 --- a/gen/replicapoolupdater1_beta1-cli/mkdocs.yml +++ b/gen/replicapoolupdater1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: replicapoolupdater v4.0.1+20161003 +site_name: replicapoolupdater v5.0.2+20161003 site_url: http://byron.github.io/google-apis-rs/google-replicapoolupdater1_beta1-cli site_description: A complete library to interact with replicapoolupdater (protocol v1beta1) @@ -7,18 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/replicapoolupdat docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['rolling-updates_cancel.md', 'Rolling Updates', 'Cancel'] -- ['rolling-updates_get.md', 'Rolling Updates', 'Get'] -- ['rolling-updates_insert.md', 'Rolling Updates', 'Insert'] -- ['rolling-updates_list.md', 'Rolling Updates', 'List'] -- ['rolling-updates_list-instance-updates.md', 'Rolling Updates', 'List Instance Updates'] -- ['rolling-updates_pause.md', 'Rolling Updates', 'Pause'] -- ['rolling-updates_resume.md', 'Rolling Updates', 'Resume'] -- ['rolling-updates_rollback.md', 'Rolling Updates', 'Rollback'] -- ['zone-operations_get.md', 'Zone Operations', 'Get'] -- ['zone-operations_list.md', 'Zone Operations', 'List'] +nav: +- Home: 'index.md' +- 'Rolling Updates': + - 'Cancel': 'rolling-updates_cancel.md' + - 'Get': 'rolling-updates_get.md' + - 'Insert': 'rolling-updates_insert.md' + - 'List': 'rolling-updates_list.md' + - 'List Instance Updates': 'rolling-updates_list-instance-updates.md' + - 'Pause': 'rolling-updates_pause.md' + - 'Resume': 'rolling-updates_resume.md' + - 'Rollback': 'rolling-updates_rollback.md' +- 'Zone Operations': + - 'Get': 'zone-operations_get.md' + - 'List': 'zone-operations_list.md' theme: readthedocs diff --git a/gen/replicapoolupdater1_beta1-cli/src/client.rs b/gen/replicapoolupdater1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/replicapoolupdater1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/replicapoolupdater1_beta1-cli/src/main.rs b/gen/replicapoolupdater1_beta1-cli/src/main.rs index 8e53e67a86..8c89748f41 100644 --- a/gen/replicapoolupdater1_beta1-cli/src/main.rs +++ b/gen/replicapoolupdater1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_replicapoolupdater1_beta1::{api, Error, oauth2}; +use google_replicapoolupdater1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -268,7 +267,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -330,7 +329,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -600,7 +599,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1111,7 +1110,7 @@ async fn main() { let mut app = App::new("replicapoolupdater1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20161003") + .version("5.0.2+20161003") .about("[Deprecated. Please use compute.instanceGroupManagers.update method. replicapoolupdater API will be disabled after December 30th, 2016] Updates groups of Compute Engine instances.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_replicapoolupdater1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/replicapoolupdater1_beta1/Cargo.toml b/gen/replicapoolupdater1_beta1/Cargo.toml index 837e315d15..333f642244 100644 --- a/gen/replicapoolupdater1_beta1/Cargo.toml +++ b/gen/replicapoolupdater1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-replicapoolupdater1_beta1" -version = "5.0.2-beta-1+20161003" +version = "5.0.2+20161003" authors = ["Sebastian Thiel "] description = "A complete library to interact with replicapoolupdater (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/replicapoolupdater1_beta1" homepage = "https://cloud.google.com/compute/docs/instance-groups/manager/#applying_rolling_updates_using_the_updater_service" -documentation = "https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003" +documentation = "https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003" license = "MIT" keywords = ["replicapoolupdater", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/replicapoolupdater1_beta1/README.md b/gen/replicapoolupdater1_beta1/README.md index cbe782980e..575dad71f7 100644 --- a/gen/replicapoolupdater1_beta1/README.md +++ b/gen/replicapoolupdater1_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-replicapoolupdater1_beta1` library allows access to all features of the *Google replicapoolupdater* service. -This documentation was generated from *replicapoolupdater* crate version *5.0.2-beta-1+20161003*, where *20161003* is the exact revision of the *replicapoolupdater:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *replicapoolupdater* crate version *5.0.2+20161003*, where *20161003* is the exact revision of the *replicapoolupdater:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *replicapoolupdater* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/compute/docs/instance-groups/manager/#applying_rolling_updates_using_the_updater_service). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/Replicapoolupdater) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/Replicapoolupdater) ... -* [rolling updates](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdate) - * [*cancel*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateCancelCall), [*get*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateGetCall), [*insert*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateInsertCall), [*list*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateListCall), [*list instance updates*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateListInstanceUpdateCall), [*pause*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdatePauseCall), [*resume*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateResumeCall) and [*rollback*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateRollbackCall) +* [rolling updates](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdate) + * [*cancel*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateCancelCall), [*get*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateGetCall), [*insert*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateInsertCall), [*list*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateListCall), [*list instance updates*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateListInstanceUpdateCall), [*pause*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdatePauseCall), [*resume*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateResumeCall) and [*rollback*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::RollingUpdateRollbackCall) * zone operations - * [*get*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/api::ZoneOperationListCall) + * [*get*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/api::ZoneOperationListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/Replicapoolupdater)** +* **[Hub](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/Replicapoolupdater)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2-beta-1+20161003/google_replicapoolupdater1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-replicapoolupdater1_beta1/5.0.2+20161003/google_replicapoolupdater1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/replicapoolupdater1_beta1/src/api.rs b/gen/replicapoolupdater1_beta1/src/api.rs index 9b47873411..0c1cf51464 100644 --- a/gen/replicapoolupdater1_beta1/src/api.rs +++ b/gen/replicapoolupdater1_beta1/src/api.rs @@ -134,7 +134,7 @@ impl<'a, S> Replicapoolupdater { Replicapoolupdater { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/replicapoolupdater/v1beta1/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -148,7 +148,7 @@ impl<'a, S> Replicapoolupdater { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/replicapoolupdater1_beta1/src/client.rs b/gen/replicapoolupdater1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/replicapoolupdater1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/replicapoolupdater1_beta1/src/lib.rs b/gen/replicapoolupdater1_beta1/src/lib.rs index 520f633e2f..0f72124adc 100644 --- a/gen/replicapoolupdater1_beta1/src/lib.rs +++ b/gen/replicapoolupdater1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *replicapoolupdater* crate version *5.0.2-beta-1+20161003*, where *20161003* is the exact revision of the *replicapoolupdater:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *replicapoolupdater* crate version *5.0.2+20161003*, where *20161003* is the exact revision of the *replicapoolupdater:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *replicapoolupdater* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/compute/docs/instance-groups/manager/#applying_rolling_updates_using_the_updater_service). diff --git a/gen/reseller1_sandbox-cli/Cargo.toml b/gen/reseller1_sandbox-cli/Cargo.toml index fad7001123..54d45b114a 100644 --- a/gen/reseller1_sandbox-cli/Cargo.toml +++ b/gen/reseller1_sandbox-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-reseller1_sandbox-cli" -version = "4.0.1+20160329" +version = "5.0.2+20160329" authors = ["Sebastian Thiel "] description = "A complete library to interact with reseller (protocol v1sandbox)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/reseller1_sandbox-cli" @@ -20,13 +20,13 @@ name = "reseller1-sandbox" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-reseller1_sandbox] path = "../reseller1_sandbox" -version = "4.0.1+20160329" +version = "5.0.2+20160329" + diff --git a/gen/reseller1_sandbox-cli/README.md b/gen/reseller1_sandbox-cli/README.md index 19332cd893..4b6e19939d 100644 --- a/gen/reseller1_sandbox-cli/README.md +++ b/gen/reseller1_sandbox-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *reseller* API at revision *20160329*. The CLI is at version *4.0.1*. +This documentation was generated from the *reseller* API at revision *20160329*. The CLI is at version *5.0.2*. ```bash reseller1-sandbox [options] diff --git a/gen/reseller1_sandbox-cli/mkdocs.yml b/gen/reseller1_sandbox-cli/mkdocs.yml index 6247780913..0d810f2446 100644 --- a/gen/reseller1_sandbox-cli/mkdocs.yml +++ b/gen/reseller1_sandbox-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: reseller v4.0.1+20160329 +site_name: reseller v5.0.2+20160329 site_url: http://byron.github.io/google-apis-rs/google-reseller1_sandbox-cli site_description: A complete library to interact with reseller (protocol v1sandbox) @@ -7,22 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/reseller1_sandbo docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['customers_get.md', 'Customers', 'Get'] -- ['customers_insert.md', 'Customers', 'Insert'] -- ['customers_patch.md', 'Customers', 'Patch'] -- ['customers_update.md', 'Customers', 'Update'] -- ['subscriptions_activate.md', 'Subscriptions', 'Activate'] -- ['subscriptions_change-plan.md', 'Subscriptions', 'Change Plan'] -- ['subscriptions_change-renewal-settings.md', 'Subscriptions', 'Change Renewal Settings'] -- ['subscriptions_change-seats.md', 'Subscriptions', 'Change Seats'] -- ['subscriptions_delete.md', 'Subscriptions', 'Delete'] -- ['subscriptions_get.md', 'Subscriptions', 'Get'] -- ['subscriptions_insert.md', 'Subscriptions', 'Insert'] -- ['subscriptions_list.md', 'Subscriptions', 'List'] -- ['subscriptions_start-paid-service.md', 'Subscriptions', 'Start Paid Service'] -- ['subscriptions_suspend.md', 'Subscriptions', 'Suspend'] +nav: +- Home: 'index.md' +- 'Customers': + - 'Get': 'customers_get.md' + - 'Insert': 'customers_insert.md' + - 'Patch': 'customers_patch.md' + - 'Update': 'customers_update.md' +- 'Subscriptions': + - 'Activate': 'subscriptions_activate.md' + - 'Change Plan': 'subscriptions_change-plan.md' + - 'Change Renewal Settings': 'subscriptions_change-renewal-settings.md' + - 'Change Seats': 'subscriptions_change-seats.md' + - 'Delete': 'subscriptions_delete.md' + - 'Get': 'subscriptions_get.md' + - 'Insert': 'subscriptions_insert.md' + - 'List': 'subscriptions_list.md' + - 'Start Paid Service': 'subscriptions_start-paid-service.md' + - 'Suspend': 'subscriptions_suspend.md' theme: readthedocs diff --git a/gen/reseller1_sandbox-cli/src/client.rs b/gen/reseller1_sandbox-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/reseller1_sandbox-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/reseller1_sandbox-cli/src/main.rs b/gen/reseller1_sandbox-cli/src/main.rs index 00a93ddb2e..eeb6baabde 100644 --- a/gen/reseller1_sandbox-cli/src/main.rs +++ b/gen/reseller1_sandbox-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_reseller1_sandbox::{api, Error, oauth2}; +use google_reseller1_sandbox::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -948,7 +947,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "customer-name-prefix" => { call = call.customer_name_prefix(value.unwrap_or("")); @@ -1639,7 +1638,7 @@ async fn main() { let mut app = App::new("reseller1-sandbox") .author("Sebastian Thiel ") - .version("4.0.1+20160329") + .version("5.0.2+20160329") .about("Creates and manages your customers and their subscriptions.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_reseller1_sandbox_cli") .arg(Arg::with_name("url") diff --git a/gen/reseller1_sandbox/Cargo.toml b/gen/reseller1_sandbox/Cargo.toml index 43129fd580..2fd6fca176 100644 --- a/gen/reseller1_sandbox/Cargo.toml +++ b/gen/reseller1_sandbox/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-reseller1_sandbox" -version = "5.0.2-beta-1+20160329" +version = "5.0.2+20160329" authors = ["Sebastian Thiel "] description = "A complete library to interact with reseller (protocol v1sandbox)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/reseller1_sandbox" homepage = "https://developers.google.com/google-apps/reseller/" -documentation = "https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329" +documentation = "https://docs.rs/google-reseller1_sandbox/5.0.2+20160329" license = "MIT" keywords = ["reseller", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/reseller1_sandbox/README.md b/gen/reseller1_sandbox/README.md index 447112a4c0..3f8c1f1fd7 100644 --- a/gen/reseller1_sandbox/README.md +++ b/gen/reseller1_sandbox/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-reseller1_sandbox` library allows access to all features of the *Google reseller* service. -This documentation was generated from *reseller* crate version *5.0.2-beta-1+20160329*, where *20160329* is the exact revision of the *reseller:v1sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *reseller* crate version *5.0.2+20160329*, where *20160329* is the exact revision of the *reseller:v1sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *reseller* *v1_sandbox* API can be found at the [official documentation site](https://developers.google.com/google-apps/reseller/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/Reseller) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/Reseller) ... -* [customers](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::Customer) - * [*get*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::CustomerGetCall), [*insert*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::CustomerInsertCall), [*patch*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::CustomerPatchCall) and [*update*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::CustomerUpdateCall) -* [subscriptions](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::Subscription) - * [*activate*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionActivateCall), [*change plan*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionChangePlanCall), [*change renewal settings*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionChangeRenewalSettingCall), [*change seats*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionChangeSeatCall), [*delete*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionDeleteCall), [*get*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionGetCall), [*insert*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionInsertCall), [*list*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionListCall), [*start paid service*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionStartPaidServiceCall) and [*suspend*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/api::SubscriptionSuspendCall) +* [customers](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::Customer) + * [*get*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::CustomerGetCall), [*insert*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::CustomerInsertCall), [*patch*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::CustomerPatchCall) and [*update*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::CustomerUpdateCall) +* [subscriptions](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::Subscription) + * [*activate*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionActivateCall), [*change plan*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionChangePlanCall), [*change renewal settings*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionChangeRenewalSettingCall), [*change seats*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionChangeSeatCall), [*delete*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionDeleteCall), [*get*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionGetCall), [*insert*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionInsertCall), [*list*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionListCall), [*start paid service*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionStartPaidServiceCall) and [*suspend*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/api::SubscriptionSuspendCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/Reseller)** +* **[Hub](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/Reseller)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::CallBuilder) -* **[Resources](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::CallBuilder) +* **[Resources](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Part)** + * **[Parts](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::CallBuilder)** +* **[Activities](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -134,17 +134,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -154,29 +154,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Delegate) to the -[Method Builder](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Delegate) to the +[Method Builder](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::RequestValue) and -[decodable](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::RequestValue) and +[decodable](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-reseller1_sandbox/5.0.2-beta-1+20160329/google_reseller1_sandbox/client::RequestValue) are moved +* [request values](https://docs.rs/google-reseller1_sandbox/5.0.2+20160329/google_reseller1_sandbox/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/reseller1_sandbox/src/api.rs b/gen/reseller1_sandbox/src/api.rs index 7b9e743126..51c246d82b 100644 --- a/gen/reseller1_sandbox/src/api.rs +++ b/gen/reseller1_sandbox/src/api.rs @@ -128,7 +128,7 @@ impl<'a, S> Reseller { Reseller { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/apps/reseller/v1sandbox/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> Reseller { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/reseller1_sandbox/src/client.rs b/gen/reseller1_sandbox/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/reseller1_sandbox/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/reseller1_sandbox/src/lib.rs b/gen/reseller1_sandbox/src/lib.rs index 0c4e86e4da..69ed338920 100644 --- a/gen/reseller1_sandbox/src/lib.rs +++ b/gen/reseller1_sandbox/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *reseller* crate version *5.0.2-beta-1+20160329*, where *20160329* is the exact revision of the *reseller:v1sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *reseller* crate version *5.0.2+20160329*, where *20160329* is the exact revision of the *reseller:v1sandbox* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *reseller* *v1_sandbox* API can be found at the //! [official documentation site](https://developers.google.com/google-apps/reseller/). diff --git a/gen/resourcesettings1-cli/Cargo.toml b/gen/resourcesettings1-cli/Cargo.toml index 975c89390c..5ba46508a1 100644 --- a/gen/resourcesettings1-cli/Cargo.toml +++ b/gen/resourcesettings1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-resourcesettings1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Resource Settings (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/resourcesettings1-cli" @@ -20,13 +20,13 @@ name = "resourcesettings1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-resourcesettings1] path = "../resourcesettings1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/resourcesettings1-cli/README.md b/gen/resourcesettings1-cli/README.md index c5924d2ae8..c8a5dec09e 100644 --- a/gen/resourcesettings1-cli/README.md +++ b/gen/resourcesettings1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Resource Settings* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Resource Settings* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash resourcesettings1 [options] diff --git a/gen/resourcesettings1-cli/mkdocs.yml b/gen/resourcesettings1-cli/mkdocs.yml index ff46e13db6..6bfff8dba1 100644 --- a/gen/resourcesettings1-cli/mkdocs.yml +++ b/gen/resourcesettings1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Resource Settings v4.0.1+20220305 +site_name: Resource Settings v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-resourcesettings1-cli site_description: A complete library to interact with Resource Settings (protocol v1) @@ -7,17 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/resourcesettings docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_settings-get.md', 'Folders', 'Settings Get'] -- ['folders_settings-list.md', 'Folders', 'Settings List'] -- ['folders_settings-patch.md', 'Folders', 'Settings Patch'] -- ['organizations_settings-get.md', 'Organizations', 'Settings Get'] -- ['organizations_settings-list.md', 'Organizations', 'Settings List'] -- ['organizations_settings-patch.md', 'Organizations', 'Settings Patch'] -- ['projects_settings-get.md', 'Projects', 'Settings Get'] -- ['projects_settings-list.md', 'Projects', 'Settings List'] -- ['projects_settings-patch.md', 'Projects', 'Settings Patch'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Settings Get': 'folders_settings-get.md' + - 'Settings List': 'folders_settings-list.md' + - 'Settings Patch': 'folders_settings-patch.md' +- 'Organizations': + - 'Settings Get': 'organizations_settings-get.md' + - 'Settings List': 'organizations_settings-list.md' + - 'Settings Patch': 'organizations_settings-patch.md' +- 'Projects': + - 'Settings Get': 'projects_settings-get.md' + - 'Settings List': 'projects_settings-list.md' + - 'Settings Patch': 'projects_settings-patch.md' theme: readthedocs diff --git a/gen/resourcesettings1-cli/src/client.rs b/gen/resourcesettings1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/resourcesettings1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/resourcesettings1-cli/src/main.rs b/gen/resourcesettings1-cli/src/main.rs index 97e43f75c4..c75a5f867c 100644 --- a/gen/resourcesettings1-cli/src/main.rs +++ b/gen/resourcesettings1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_resourcesettings1::{api, Error, oauth2}; +use google_resourcesettings1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -120,7 +119,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -346,7 +345,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -572,7 +571,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1087,7 +1086,7 @@ async fn main() { let mut app = App::new("resourcesettings1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("The Resource Settings API allows users to control and modify the behavior of their GCP resources (e.g., VM, firewall, Project, etc.) across the Cloud Resource Hierarchy.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_resourcesettings1_cli") .arg(Arg::with_name("url") diff --git a/gen/resourcesettings1/Cargo.toml b/gen/resourcesettings1/Cargo.toml index 32f389227f..84d821c56b 100644 --- a/gen/resourcesettings1/Cargo.toml +++ b/gen/resourcesettings1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-resourcesettings1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Resource Settings (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/resourcesettings1" homepage = "https://cloud.google.com/resource-manager/docs/resource-settings/overview" -documentation = "https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-resourcesettings1/5.0.2+20230123" license = "MIT" keywords = ["resourcesettings", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/resourcesettings1/README.md b/gen/resourcesettings1/README.md index 3e21c7db24..adb971749e 100644 --- a/gen/resourcesettings1/README.md +++ b/gen/resourcesettings1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-resourcesettings1` library allows access to all features of the *Google Resource Settings* service. -This documentation was generated from *Resource Settings* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *resourcesettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Resource Settings* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *resourcesettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Resource Settings* *v1* API can be found at the [official documentation site](https://cloud.google.com/resource-manager/docs/resource-settings/overview). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/ResourceSettings) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/ResourceSettings) ... * folders - * [*settings get*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::FolderSettingGetCall), [*settings list*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::FolderSettingListCall) and [*settings patch*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::FolderSettingPatchCall) + * [*settings get*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::FolderSettingGetCall), [*settings list*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::FolderSettingListCall) and [*settings patch*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::FolderSettingPatchCall) * organizations - * [*settings get*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::OrganizationSettingGetCall), [*settings list*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::OrganizationSettingListCall) and [*settings patch*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::OrganizationSettingPatchCall) + * [*settings get*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::OrganizationSettingGetCall), [*settings list*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::OrganizationSettingListCall) and [*settings patch*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::OrganizationSettingPatchCall) * projects - * [*settings get*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::ProjectSettingGetCall), [*settings list*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::ProjectSettingListCall) and [*settings patch*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/api::ProjectSettingPatchCall) + * [*settings get*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::ProjectSettingGetCall), [*settings list*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::ProjectSettingListCall) and [*settings patch*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/api::ProjectSettingPatchCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/ResourceSettings)** +* **[Hub](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/ResourceSettings)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::CallBuilder) -* **[Resources](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::CallBuilder) +* **[Resources](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Part)** + * **[Parts](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Delegate) to the -[Method Builder](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Delegate) to the +[Method Builder](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::RequestValue) and -[decodable](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::RequestValue) and +[decodable](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-resourcesettings1/5.0.2-beta-1+20230123/google_resourcesettings1/client::RequestValue) are moved +* [request values](https://docs.rs/google-resourcesettings1/5.0.2+20230123/google_resourcesettings1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/resourcesettings1/src/api.rs b/gen/resourcesettings1/src/api.rs index 4fa38b58b0..0faf5a8619 100644 --- a/gen/resourcesettings1/src/api.rs +++ b/gen/resourcesettings1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> ResourceSettings { ResourceSettings { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://resourcesettings.googleapis.com/".to_string(), _root_url: "https://resourcesettings.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> ResourceSettings { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/resourcesettings1/src/client.rs b/gen/resourcesettings1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/resourcesettings1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/resourcesettings1/src/lib.rs b/gen/resourcesettings1/src/lib.rs index a3156e7f4e..3a30201d74 100644 --- a/gen/resourcesettings1/src/lib.rs +++ b/gen/resourcesettings1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Resource Settings* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *resourcesettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Resource Settings* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *resourcesettings:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Resource Settings* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/resource-manager/docs/resource-settings/overview). diff --git a/gen/resourceviews1_beta2-cli/Cargo.toml b/gen/resourceviews1_beta2-cli/Cargo.toml index 3816fbef51..0409b187ca 100644 --- a/gen/resourceviews1_beta2-cli/Cargo.toml +++ b/gen/resourceviews1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-resourceviews1_beta2-cli" -version = "4.0.1+20160512" +version = "5.0.2+20160512" authors = ["Sebastian Thiel "] description = "A complete library to interact with resourceviews (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/resourceviews1_beta2-cli" @@ -20,13 +20,13 @@ name = "resourceviews1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-resourceviews1_beta2] path = "../resourceviews1_beta2" -version = "4.0.1+20160512" +version = "5.0.2+20160512" + diff --git a/gen/resourceviews1_beta2-cli/README.md b/gen/resourceviews1_beta2-cli/README.md index 553f50a958..f996902315 100644 --- a/gen/resourceviews1_beta2-cli/README.md +++ b/gen/resourceviews1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *resourceviews* API at revision *20160512*. The CLI is at version *4.0.1*. +This documentation was generated from the *resourceviews* API at revision *20160512*. The CLI is at version *5.0.2*. ```bash resourceviews1-beta2 [options] diff --git a/gen/resourceviews1_beta2-cli/mkdocs.yml b/gen/resourceviews1_beta2-cli/mkdocs.yml index 58334f1a65..5af8516a48 100644 --- a/gen/resourceviews1_beta2-cli/mkdocs.yml +++ b/gen/resourceviews1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: resourceviews v4.0.1+20160512 +site_name: resourceviews v5.0.2+20160512 site_url: http://byron.github.io/google-apis-rs/google-resourceviews1_beta2-cli site_description: A complete library to interact with resourceviews (protocol v1beta2) @@ -7,19 +7,21 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/resourceviews1_b docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['zone-operations_get.md', 'Zone Operations', 'Get'] -- ['zone-operations_list.md', 'Zone Operations', 'List'] -- ['zone-views_add-resources.md', 'Zone Views', 'Add Resources'] -- ['zone-views_delete.md', 'Zone Views', 'Delete'] -- ['zone-views_get.md', 'Zone Views', 'Get'] -- ['zone-views_get-service.md', 'Zone Views', 'Get Service'] -- ['zone-views_insert.md', 'Zone Views', 'Insert'] -- ['zone-views_list.md', 'Zone Views', 'List'] -- ['zone-views_list-resources.md', 'Zone Views', 'List Resources'] -- ['zone-views_remove-resources.md', 'Zone Views', 'Remove Resources'] -- ['zone-views_set-service.md', 'Zone Views', 'Set Service'] +nav: +- Home: 'index.md' +- 'Zone Operations': + - 'Get': 'zone-operations_get.md' + - 'List': 'zone-operations_list.md' +- 'Zone Views': + - 'Add Resources': 'zone-views_add-resources.md' + - 'Delete': 'zone-views_delete.md' + - 'Get': 'zone-views_get.md' + - 'Get Service': 'zone-views_get-service.md' + - 'Insert': 'zone-views_insert.md' + - 'List': 'zone-views_list.md' + - 'List Resources': 'zone-views_list-resources.md' + - 'Remove Resources': 'zone-views_remove-resources.md' + - 'Set Service': 'zone-views_set-service.md' theme: readthedocs diff --git a/gen/resourceviews1_beta2-cli/src/client.rs b/gen/resourceviews1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/resourceviews1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/resourceviews1_beta2-cli/src/main.rs b/gen/resourceviews1_beta2-cli/src/main.rs index 368d771b32..64dbc63a36 100644 --- a/gen/resourceviews1_beta2-cli/src/main.rs +++ b/gen/resourceviews1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_resourceviews1_beta2::{api, Error, oauth2}; +use google_resourceviews1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -514,7 +513,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -576,7 +575,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "list-state" => { call = call.list_state(value.unwrap_or("")); @@ -1316,7 +1315,7 @@ async fn main() { let mut app = App::new("resourceviews1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20160512") + .version("5.0.2+20160512") .about("The Resource View API allows users to create and manage logical sets of Google Compute Engine instances.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_resourceviews1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/resourceviews1_beta2/Cargo.toml b/gen/resourceviews1_beta2/Cargo.toml index 161c36c2d1..2d3cfb2d59 100644 --- a/gen/resourceviews1_beta2/Cargo.toml +++ b/gen/resourceviews1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-resourceviews1_beta2" -version = "5.0.2-beta-1+20160512" +version = "5.0.2+20160512" authors = ["Sebastian Thiel "] description = "A complete library to interact with resourceviews (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/resourceviews1_beta2" homepage = "https://developers.google.com/compute/" -documentation = "https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512" +documentation = "https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512" license = "MIT" keywords = ["resourceviews", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/resourceviews1_beta2/README.md b/gen/resourceviews1_beta2/README.md index 26cd769bdc..97574c18ed 100644 --- a/gen/resourceviews1_beta2/README.md +++ b/gen/resourceviews1_beta2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-resourceviews1_beta2` library allows access to all features of the *Google resourceviews* service. -This documentation was generated from *resourceviews* crate version *5.0.2-beta-1+20160512*, where *20160512* is the exact revision of the *resourceviews:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *resourceviews* crate version *5.0.2+20160512*, where *20160512* is the exact revision of the *resourceviews:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *resourceviews* *v1_beta2* API can be found at the [official documentation site](https://developers.google.com/compute/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/Resourceviews) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/Resourceviews) ... * zone operations - * [*get*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneOperationListCall) + * [*get*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneOperationGetCall) and [*list*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneOperationListCall) * zone views - * [*add resources*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewAddResourceCall), [*delete*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewDeleteCall), [*get*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewGetCall), [*get service*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewGetServiceCall), [*insert*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewInsertCall), [*list*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewListCall), [*list resources*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewListResourceCall), [*remove resources*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewRemoveResourceCall) and [*set service*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/api::ZoneViewSetServiceCall) + * [*add resources*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewAddResourceCall), [*delete*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewDeleteCall), [*get*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewGetCall), [*get service*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewGetServiceCall), [*insert*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewInsertCall), [*list*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewListCall), [*list resources*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewListResourceCall), [*remove resources*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewRemoveResourceCall) and [*set service*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/api::ZoneViewSetServiceCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/Resourceviews)** +* **[Hub](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/Resourceviews)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-resourceviews1_beta2/5.0.2-beta-1+20160512/google_resourceviews1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-resourceviews1_beta2/5.0.2+20160512/google_resourceviews1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/resourceviews1_beta2/src/api.rs b/gen/resourceviews1_beta2/src/api.rs index 58d254b559..3ce72b3d62 100644 --- a/gen/resourceviews1_beta2/src/api.rs +++ b/gen/resourceviews1_beta2/src/api.rs @@ -145,7 +145,7 @@ impl<'a, S> Resourceviews { Resourceviews { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/resourceviews/v1beta2/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -159,7 +159,7 @@ impl<'a, S> Resourceviews { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/resourceviews1_beta2/src/client.rs b/gen/resourceviews1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/resourceviews1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/resourceviews1_beta2/src/lib.rs b/gen/resourceviews1_beta2/src/lib.rs index c6014a63f4..c11362e444 100644 --- a/gen/resourceviews1_beta2/src/lib.rs +++ b/gen/resourceviews1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *resourceviews* crate version *5.0.2-beta-1+20160512*, where *20160512* is the exact revision of the *resourceviews:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *resourceviews* crate version *5.0.2+20160512*, where *20160512* is the exact revision of the *resourceviews:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *resourceviews* *v1_beta2* API can be found at the //! [official documentation site](https://developers.google.com/compute/). diff --git a/gen/retail2-cli/Cargo.toml b/gen/retail2-cli/Cargo.toml index 526915e73b..b5c98b1cb3 100644 --- a/gen/retail2-cli/Cargo.toml +++ b/gen/retail2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-retail2-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Retail (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/retail2-cli" @@ -20,13 +20,13 @@ name = "retail2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-retail2] path = "../retail2" -version = "4.0.1+20220224" +version = "5.0.2+20230117" + diff --git a/gen/retail2-cli/README.md b/gen/retail2-cli/README.md index 6743d126d0..cd9a606423 100644 --- a/gen/retail2-cli/README.md +++ b/gen/retail2-cli/README.md @@ -25,13 +25,17 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Retail* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Retail* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash retail2 [options] projects + locations-catalogs-attributes-config-add-catalog-attribute (-r )... [-p ]... [-o ] + locations-catalogs-attributes-config-remove-catalog-attribute (-r )... [-p ]... [-o ] + locations-catalogs-attributes-config-replace-catalog-attribute (-r )... [-p ]... [-o ] locations-catalogs-branches-operations-get [-p ]... [-o ] locations-catalogs-branches-products-add-fulfillment-places (-r )... [-p ]... [-o ] + locations-catalogs-branches-products-add-local-inventories (-r )... [-p ]... [-o ] locations-catalogs-branches-products-create (-r )... [-p ]... [-o ] locations-catalogs-branches-products-delete [-p ]... [-o ] locations-catalogs-branches-products-get [-p ]... [-o ] @@ -39,9 +43,17 @@ retail2 [options] locations-catalogs-branches-products-list [-p ]... [-o ] locations-catalogs-branches-products-patch (-r )... [-p ]... [-o ] locations-catalogs-branches-products-remove-fulfillment-places (-r )... [-p ]... [-o ] + locations-catalogs-branches-products-remove-local-inventories (-r )... [-p ]... [-o ] locations-catalogs-branches-products-set-inventory (-r )... [-p ]... [-o ] locations-catalogs-complete-query [-p ]... [-o ] locations-catalogs-completion-data-import (-r )... [-p ]... [-o ] + locations-catalogs-controls-create (-r )... [-p ]... [-o ] + locations-catalogs-controls-delete [-p ]... [-o ] + locations-catalogs-controls-get [-p ]... [-o ] + locations-catalogs-controls-list [-p ]... [-o ] + locations-catalogs-controls-patch (-r )... [-p ]... [-o ] + locations-catalogs-get-attributes-config [-p ]... [-o ] + locations-catalogs-get-completion-config [-p ]... [-o ] locations-catalogs-get-default-branch [-p ]... [-o ] locations-catalogs-list [-p ]... [-o ] locations-catalogs-operations-get [-p ]... [-o ] @@ -49,7 +61,18 @@ retail2 [options] locations-catalogs-patch (-r )... [-p ]... [-o ] locations-catalogs-placements-predict (-r )... [-p ]... [-o ] locations-catalogs-placements-search (-r )... [-p ]... [-o ] + locations-catalogs-serving-configs-add-control (-r )... [-p ]... [-o ] + locations-catalogs-serving-configs-create (-r )... [-p ]... [-o ] + locations-catalogs-serving-configs-delete [-p ]... [-o ] + locations-catalogs-serving-configs-get [-p ]... [-o ] + locations-catalogs-serving-configs-list [-p ]... [-o ] + locations-catalogs-serving-configs-patch (-r )... [-p ]... [-o ] + locations-catalogs-serving-configs-predict (-r )... [-p ]... [-o ] + locations-catalogs-serving-configs-remove-control (-r )... [-p ]... [-o ] + locations-catalogs-serving-configs-search (-r )... [-p ]... [-o ] locations-catalogs-set-default-branch (-r )... [-p ]... [-o ] + locations-catalogs-update-attributes-config (-r )... [-p ]... [-o ] + locations-catalogs-update-completion-config (-r )... [-p ]... [-o ] locations-catalogs-user-events-collect [-p ]... [-o ] locations-catalogs-user-events-import (-r )... [-p ]... [-o ] locations-catalogs-user-events-purge (-r )... [-p ]... [-o ] diff --git a/gen/retail2-cli/mkdocs.yml b/gen/retail2-cli/mkdocs.yml index 536dbfcc5c..0464efa991 100644 --- a/gen/retail2-cli/mkdocs.yml +++ b/gen/retail2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Retail v4.0.1+20220224 +site_name: Cloud Retail v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-retail2-cli site_description: A complete library to interact with Cloud Retail (protocol v2) @@ -7,37 +7,61 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/retail2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-catalogs-branches-operations-get.md', 'Projects', 'Locations Catalogs Branches Operations Get'] -- ['projects_locations-catalogs-branches-products-add-fulfillment-places.md', 'Projects', 'Locations Catalogs Branches Products Add Fulfillment Places'] -- ['projects_locations-catalogs-branches-products-create.md', 'Projects', 'Locations Catalogs Branches Products Create'] -- ['projects_locations-catalogs-branches-products-delete.md', 'Projects', 'Locations Catalogs Branches Products Delete'] -- ['projects_locations-catalogs-branches-products-get.md', 'Projects', 'Locations Catalogs Branches Products Get'] -- ['projects_locations-catalogs-branches-products-import.md', 'Projects', 'Locations Catalogs Branches Products Import'] -- ['projects_locations-catalogs-branches-products-list.md', 'Projects', 'Locations Catalogs Branches Products List'] -- ['projects_locations-catalogs-branches-products-patch.md', 'Projects', 'Locations Catalogs Branches Products Patch'] -- ['projects_locations-catalogs-branches-products-remove-fulfillment-places.md', 'Projects', 'Locations Catalogs Branches Products Remove Fulfillment Places'] -- ['projects_locations-catalogs-branches-products-set-inventory.md', 'Projects', 'Locations Catalogs Branches Products Set Inventory'] -- ['projects_locations-catalogs-complete-query.md', 'Projects', 'Locations Catalogs Complete Query'] -- ['projects_locations-catalogs-completion-data-import.md', 'Projects', 'Locations Catalogs Completion Data Import'] -- ['projects_locations-catalogs-get-default-branch.md', 'Projects', 'Locations Catalogs Get Default Branch'] -- ['projects_locations-catalogs-list.md', 'Projects', 'Locations Catalogs List'] -- ['projects_locations-catalogs-operations-get.md', 'Projects', 'Locations Catalogs Operations Get'] -- ['projects_locations-catalogs-operations-list.md', 'Projects', 'Locations Catalogs Operations List'] -- ['projects_locations-catalogs-patch.md', 'Projects', 'Locations Catalogs Patch'] -- ['projects_locations-catalogs-placements-predict.md', 'Projects', 'Locations Catalogs Placements Predict'] -- ['projects_locations-catalogs-placements-search.md', 'Projects', 'Locations Catalogs Placements Search'] -- ['projects_locations-catalogs-set-default-branch.md', 'Projects', 'Locations Catalogs Set Default Branch'] -- ['projects_locations-catalogs-user-events-collect.md', 'Projects', 'Locations Catalogs User Events Collect'] -- ['projects_locations-catalogs-user-events-import.md', 'Projects', 'Locations Catalogs User Events Import'] -- ['projects_locations-catalogs-user-events-purge.md', 'Projects', 'Locations Catalogs User Events Purge'] -- ['projects_locations-catalogs-user-events-rejoin.md', 'Projects', 'Locations Catalogs User Events Rejoin'] -- ['projects_locations-catalogs-user-events-write.md', 'Projects', 'Locations Catalogs User Events Write'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Catalogs Attributes Config Add Catalog Attribute': 'projects_locations-catalogs-attributes-config-add-catalog-attribute.md' + - 'Locations Catalogs Attributes Config Remove Catalog Attribute': 'projects_locations-catalogs-attributes-config-remove-catalog-attribute.md' + - 'Locations Catalogs Attributes Config Replace Catalog Attribute': 'projects_locations-catalogs-attributes-config-replace-catalog-attribute.md' + - 'Locations Catalogs Branches Operations Get': 'projects_locations-catalogs-branches-operations-get.md' + - 'Locations Catalogs Branches Products Add Fulfillment Places': 'projects_locations-catalogs-branches-products-add-fulfillment-places.md' + - 'Locations Catalogs Branches Products Add Local Inventories': 'projects_locations-catalogs-branches-products-add-local-inventories.md' + - 'Locations Catalogs Branches Products Create': 'projects_locations-catalogs-branches-products-create.md' + - 'Locations Catalogs Branches Products Delete': 'projects_locations-catalogs-branches-products-delete.md' + - 'Locations Catalogs Branches Products Get': 'projects_locations-catalogs-branches-products-get.md' + - 'Locations Catalogs Branches Products Import': 'projects_locations-catalogs-branches-products-import.md' + - 'Locations Catalogs Branches Products List': 'projects_locations-catalogs-branches-products-list.md' + - 'Locations Catalogs Branches Products Patch': 'projects_locations-catalogs-branches-products-patch.md' + - 'Locations Catalogs Branches Products Remove Fulfillment Places': 'projects_locations-catalogs-branches-products-remove-fulfillment-places.md' + - 'Locations Catalogs Branches Products Remove Local Inventories': 'projects_locations-catalogs-branches-products-remove-local-inventories.md' + - 'Locations Catalogs Branches Products Set Inventory': 'projects_locations-catalogs-branches-products-set-inventory.md' + - 'Locations Catalogs Complete Query': 'projects_locations-catalogs-complete-query.md' + - 'Locations Catalogs Completion Data Import': 'projects_locations-catalogs-completion-data-import.md' + - 'Locations Catalogs Controls Create': 'projects_locations-catalogs-controls-create.md' + - 'Locations Catalogs Controls Delete': 'projects_locations-catalogs-controls-delete.md' + - 'Locations Catalogs Controls Get': 'projects_locations-catalogs-controls-get.md' + - 'Locations Catalogs Controls List': 'projects_locations-catalogs-controls-list.md' + - 'Locations Catalogs Controls Patch': 'projects_locations-catalogs-controls-patch.md' + - 'Locations Catalogs Get Attributes Config': 'projects_locations-catalogs-get-attributes-config.md' + - 'Locations Catalogs Get Completion Config': 'projects_locations-catalogs-get-completion-config.md' + - 'Locations Catalogs Get Default Branch': 'projects_locations-catalogs-get-default-branch.md' + - 'Locations Catalogs List': 'projects_locations-catalogs-list.md' + - 'Locations Catalogs Operations Get': 'projects_locations-catalogs-operations-get.md' + - 'Locations Catalogs Operations List': 'projects_locations-catalogs-operations-list.md' + - 'Locations Catalogs Patch': 'projects_locations-catalogs-patch.md' + - 'Locations Catalogs Placements Predict': 'projects_locations-catalogs-placements-predict.md' + - 'Locations Catalogs Placements Search': 'projects_locations-catalogs-placements-search.md' + - 'Locations Catalogs Serving Configs Add Control': 'projects_locations-catalogs-serving-configs-add-control.md' + - 'Locations Catalogs Serving Configs Create': 'projects_locations-catalogs-serving-configs-create.md' + - 'Locations Catalogs Serving Configs Delete': 'projects_locations-catalogs-serving-configs-delete.md' + - 'Locations Catalogs Serving Configs Get': 'projects_locations-catalogs-serving-configs-get.md' + - 'Locations Catalogs Serving Configs List': 'projects_locations-catalogs-serving-configs-list.md' + - 'Locations Catalogs Serving Configs Patch': 'projects_locations-catalogs-serving-configs-patch.md' + - 'Locations Catalogs Serving Configs Predict': 'projects_locations-catalogs-serving-configs-predict.md' + - 'Locations Catalogs Serving Configs Remove Control': 'projects_locations-catalogs-serving-configs-remove-control.md' + - 'Locations Catalogs Serving Configs Search': 'projects_locations-catalogs-serving-configs-search.md' + - 'Locations Catalogs Set Default Branch': 'projects_locations-catalogs-set-default-branch.md' + - 'Locations Catalogs Update Attributes Config': 'projects_locations-catalogs-update-attributes-config.md' + - 'Locations Catalogs Update Completion Config': 'projects_locations-catalogs-update-completion-config.md' + - 'Locations Catalogs User Events Collect': 'projects_locations-catalogs-user-events-collect.md' + - 'Locations Catalogs User Events Import': 'projects_locations-catalogs-user-events-import.md' + - 'Locations Catalogs User Events Purge': 'projects_locations-catalogs-user-events-purge.md' + - 'Locations Catalogs User Events Rejoin': 'projects_locations-catalogs-user-events-rejoin.md' + - 'Locations Catalogs User Events Write': 'projects_locations-catalogs-user-events-write.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' theme: readthedocs diff --git a/gen/retail2-cli/src/client.rs b/gen/retail2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/retail2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/retail2-cli/src/main.rs b/gen/retail2-cli/src/main.rs index 587b7e7889..531c01e073 100644 --- a/gen/retail2-cli/src/main.rs +++ b/gen/retail2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_retail2::{api, Error, oauth2}; +use google_retail2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,276 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_catalogs_attributes_config_add_catalog_attribute(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "catalog-attribute.dynamic-facetable-option" => Some(("catalogAttribute.dynamicFacetableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.exact-searchable-option" => Some(("catalogAttribute.exactSearchableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.in-use" => Some(("catalogAttribute.inUse", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "catalog-attribute.indexable-option" => Some(("catalogAttribute.indexableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.key" => Some(("catalogAttribute.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.retrievable-option" => Some(("catalogAttribute.retrievableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.searchable-option" => Some(("catalogAttribute.searchableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.type" => Some(("catalogAttribute.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["catalog-attribute", "dynamic-facetable-option", "exact-searchable-option", "in-use", "indexable-option", "key", "retrievable-option", "searchable-option", "type"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2AddCatalogAttributeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_attributes_config_add_catalog_attribute(request, opt.value_of("attributes-config").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_attributes_config_remove_catalog_attribute(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "key" => Some(("key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["key"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2RemoveCatalogAttributeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_attributes_config_remove_catalog_attribute(request, opt.value_of("attributes-config").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_attributes_config_replace_catalog_attribute(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "catalog-attribute.dynamic-facetable-option" => Some(("catalogAttribute.dynamicFacetableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.exact-searchable-option" => Some(("catalogAttribute.exactSearchableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.in-use" => Some(("catalogAttribute.inUse", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "catalog-attribute.indexable-option" => Some(("catalogAttribute.indexableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.key" => Some(("catalogAttribute.key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.retrievable-option" => Some(("catalogAttribute.retrievableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.searchable-option" => Some(("catalogAttribute.searchableOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "catalog-attribute.type" => Some(("catalogAttribute.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["catalog-attribute", "dynamic-facetable-option", "exact-searchable-option", "in-use", "indexable-option", "key", "retrievable-option", "searchable-option", "type", "update-mask"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2ReplaceCatalogAttributeRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_attributes_config_replace_catalog_attribute(request, opt.value_of("attributes-config").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_catalogs_branches_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_catalogs_branches_operations_get(opt.value_of("name").unwrap_or("")); @@ -191,6 +460,93 @@ where } } + async fn _projects_locations_catalogs_branches_products_add_local_inventories(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "add-mask" => Some(("addMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "add-time" => Some(("addTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "allow-missing" => Some(("allowMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["add-mask", "add-time", "allow-missing"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2AddLocalInventoriesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_branches_products_add_local_inventories(request, opt.value_of("product").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_catalogs_branches_products_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -534,13 +890,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-mask" => { - call = call.read_mask(value.unwrap_or("")); + call = call.read_mask( value.map(|v| arg_from_str(v, err, "read-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -676,10 +1032,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -816,6 +1172,93 @@ where } } + async fn _projects_locations_catalogs_branches_products_remove_local_inventories(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "allow-missing" => Some(("allowMissing", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "place-ids" => Some(("placeIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "remove-time" => Some(("removeTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-missing", "place-ids", "remove-time"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2RemoveLocalInventoriesRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_branches_products_remove_local_inventories(request, opt.value_of("product").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_catalogs_branches_products_set_inventory(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -961,7 +1404,7 @@ where call = call.query(value.unwrap_or("")); }, "max-suggestions" => { - call = call.max_suggestions(arg_from_str(value.unwrap_or("-0"), err, "max-suggestions", "integer")); + call = call.max_suggestions( value.map(|v| arg_from_str(v, err, "max-suggestions", "int32")).unwrap_or(-0)); }, "language-codes" => { call = call.add_language_codes(value.unwrap_or("")); @@ -1112,6 +1555,492 @@ where } } + async fn _projects_locations_catalogs_controls_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "associated-serving-config-ids" => Some(("associatedServingConfigIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.boost-action.boost" => Some(("rule.boostAction.boost", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "rule.boost-action.products-filter" => Some(("rule.boostAction.productsFilter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.do-not-associate-action.do-not-associate-terms" => Some(("rule.doNotAssociateAction.doNotAssociateTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.do-not-associate-action.query-terms" => Some(("rule.doNotAssociateAction.queryTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.do-not-associate-action.terms" => Some(("rule.doNotAssociateAction.terms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.filter-action.filter" => Some(("rule.filterAction.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.ignore-action.ignore-terms" => Some(("rule.ignoreAction.ignoreTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.oneway-synonyms-action.oneway-terms" => Some(("rule.onewaySynonymsAction.onewayTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.oneway-synonyms-action.query-terms" => Some(("rule.onewaySynonymsAction.queryTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.oneway-synonyms-action.synonyms" => Some(("rule.onewaySynonymsAction.synonyms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.redirect-action.redirect-uri" => Some(("rule.redirectAction.redirectUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.replacement-action.query-terms" => Some(("rule.replacementAction.queryTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.replacement-action.replacement-term" => Some(("rule.replacementAction.replacementTerm", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.replacement-action.term" => Some(("rule.replacementAction.term", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.twoway-synonyms-action.synonyms" => Some(("rule.twowaySynonymsAction.synonyms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "search-solution-use-case" => Some(("searchSolutionUseCase", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "solution-types" => Some(("solutionTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["associated-serving-config-ids", "boost", "boost-action", "display-name", "do-not-associate-action", "do-not-associate-terms", "filter", "filter-action", "ignore-action", "ignore-terms", "name", "oneway-synonyms-action", "oneway-terms", "products-filter", "query-terms", "redirect-action", "redirect-uri", "replacement-action", "replacement-term", "rule", "search-solution-use-case", "solution-types", "synonyms", "term", "terms", "twoway-synonyms-action"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2Control = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_controls_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "control-id" => { + call = call.control_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["control-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_controls_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_controls_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_controls_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_controls_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_controls_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_controls_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_controls_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "associated-serving-config-ids" => Some(("associatedServingConfigIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.boost-action.boost" => Some(("rule.boostAction.boost", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "rule.boost-action.products-filter" => Some(("rule.boostAction.productsFilter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.do-not-associate-action.do-not-associate-terms" => Some(("rule.doNotAssociateAction.doNotAssociateTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.do-not-associate-action.query-terms" => Some(("rule.doNotAssociateAction.queryTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.do-not-associate-action.terms" => Some(("rule.doNotAssociateAction.terms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.filter-action.filter" => Some(("rule.filterAction.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.ignore-action.ignore-terms" => Some(("rule.ignoreAction.ignoreTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.oneway-synonyms-action.oneway-terms" => Some(("rule.onewaySynonymsAction.onewayTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.oneway-synonyms-action.query-terms" => Some(("rule.onewaySynonymsAction.queryTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.oneway-synonyms-action.synonyms" => Some(("rule.onewaySynonymsAction.synonyms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.redirect-action.redirect-uri" => Some(("rule.redirectAction.redirectUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.replacement-action.query-terms" => Some(("rule.replacementAction.queryTerms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "rule.replacement-action.replacement-term" => Some(("rule.replacementAction.replacementTerm", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.replacement-action.term" => Some(("rule.replacementAction.term", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "rule.twoway-synonyms-action.synonyms" => Some(("rule.twowaySynonymsAction.synonyms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "search-solution-use-case" => Some(("searchSolutionUseCase", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "solution-types" => Some(("solutionTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["associated-serving-config-ids", "boost", "boost-action", "display-name", "do-not-associate-action", "do-not-associate-terms", "filter", "filter-action", "ignore-action", "ignore-terms", "name", "oneway-synonyms-action", "oneway-terms", "products-filter", "query-terms", "redirect-action", "redirect-uri", "replacement-action", "replacement-term", "rule", "search-solution-use-case", "solution-types", "synonyms", "term", "terms", "twoway-synonyms-action"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2Control = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_controls_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_get_attributes_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_get_attributes_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_get_completion_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_get_completion_config(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_catalogs_get_default_branch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_catalogs_get_default_branch(opt.value_of("catalog").unwrap_or("")); @@ -1174,7 +2103,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1285,7 +2214,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1380,7 +2309,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1573,6 +2502,7 @@ where "canonical-filter" => Some(("canonicalFilter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "dynamic-facet-spec.mode" => Some(("dynamicFacetSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "offset" => Some(("offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "order-by" => Some(("orderBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "page-categories" => Some(("pageCategories", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), @@ -1583,6 +2513,7 @@ where "query-expansion-spec.condition" => Some(("queryExpansionSpec.condition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query-expansion-spec.pin-unexpanded-results" => Some(("queryExpansionSpec.pinUnexpandedResults", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "search-mode" => Some(("searchMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spell-correction-spec.mode" => Some(("spellCorrectionSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-info.direct-user-request" => Some(("userInfo.directUserRequest", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "user-info.ip-address" => Some(("userInfo.ipAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "user-info.user-agent" => Some(("userInfo.userAgent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1590,7 +2521,7 @@ where "variant-rollup-keys" => Some(("variantRollupKeys", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "visitor-id" => Some(("visitorId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["boost-spec", "branch", "canonical-filter", "condition", "direct-user-request", "dynamic-facet-spec", "filter", "ip-address", "mode", "offset", "order-by", "page-categories", "page-size", "page-token", "personalization-spec", "pin-unexpanded-results", "query", "query-expansion-spec", "search-mode", "skip-boost-spec-validation", "user-agent", "user-id", "user-info", "variant-rollup-keys", "visitor-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["boost-spec", "branch", "canonical-filter", "condition", "direct-user-request", "dynamic-facet-spec", "filter", "ip-address", "labels", "mode", "offset", "order-by", "page-categories", "page-size", "page-token", "personalization-spec", "pin-unexpanded-results", "query", "query-expansion-spec", "search-mode", "skip-boost-spec-validation", "spell-correction-spec", "user-agent", "user-id", "user-info", "variant-rollup-keys", "visitor-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1650,6 +2581,776 @@ where } } + async fn _projects_locations_catalogs_serving_configs_add_control(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "control-id" => Some(("controlId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["control-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2AddControlRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_serving_configs_add_control(request, opt.value_of("serving-config").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "boost-control-ids" => Some(("boostControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diversity-level" => Some(("diversityLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diversity-type" => Some(("diversityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "do-not-associate-control-ids" => Some(("doNotAssociateControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "dynamic-facet-spec.mode" => Some(("dynamicFacetSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enable-category-filter-level" => Some(("enableCategoryFilterLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "facet-control-ids" => Some(("facetControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "filter-control-ids" => Some(("filterControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "ignore-control-ids" => Some(("ignoreControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "model-id" => Some(("modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oneway-synonyms-control-ids" => Some(("onewaySynonymsControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "personalization-spec.mode" => Some(("personalizationSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "price-reranking-level" => Some(("priceRerankingLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "redirect-control-ids" => Some(("redirectControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "replacement-control-ids" => Some(("replacementControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "solution-types" => Some(("solutionTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "twoway-synonyms-control-ids" => Some(("twowaySynonymsControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["boost-control-ids", "display-name", "diversity-level", "diversity-type", "do-not-associate-control-ids", "dynamic-facet-spec", "enable-category-filter-level", "facet-control-ids", "filter-control-ids", "ignore-control-ids", "mode", "model-id", "name", "oneway-synonyms-control-ids", "personalization-spec", "price-reranking-level", "redirect-control-ids", "replacement-control-ids", "solution-types", "twoway-synonyms-control-ids"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2ServingConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_serving_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "serving-config-id" => { + call = call.serving_config_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["serving-config-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_serving_configs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_serving_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_serving_configs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "boost-control-ids" => Some(("boostControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diversity-level" => Some(("diversityLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "diversity-type" => Some(("diversityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "do-not-associate-control-ids" => Some(("doNotAssociateControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "dynamic-facet-spec.mode" => Some(("dynamicFacetSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enable-category-filter-level" => Some(("enableCategoryFilterLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "facet-control-ids" => Some(("facetControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "filter-control-ids" => Some(("filterControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "ignore-control-ids" => Some(("ignoreControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "model-id" => Some(("modelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "oneway-synonyms-control-ids" => Some(("onewaySynonymsControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "personalization-spec.mode" => Some(("personalizationSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "price-reranking-level" => Some(("priceRerankingLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "redirect-control-ids" => Some(("redirectControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "replacement-control-ids" => Some(("replacementControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "solution-types" => Some(("solutionTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "twoway-synonyms-control-ids" => Some(("twowaySynonymsControlIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["boost-control-ids", "display-name", "diversity-level", "diversity-type", "do-not-associate-control-ids", "dynamic-facet-spec", "enable-category-filter-level", "facet-control-ids", "filter-control-ids", "ignore-control-ids", "mode", "model-id", "name", "oneway-synonyms-control-ids", "personalization-spec", "price-reranking-level", "redirect-control-ids", "replacement-control-ids", "solution-types", "twoway-synonyms-control-ids"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2ServingConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_serving_configs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_predict(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.attribution-token" => Some(("userEvent.attributionToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.cart-id" => Some(("userEvent.cartId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.completion-detail.completion-attribution-token" => Some(("userEvent.completionDetail.completionAttributionToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.completion-detail.selected-position" => Some(("userEvent.completionDetail.selectedPosition", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "user-event.completion-detail.selected-suggestion" => Some(("userEvent.completionDetail.selectedSuggestion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.event-time" => Some(("userEvent.eventTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.event-type" => Some(("userEvent.eventType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.experiment-ids" => Some(("userEvent.experimentIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "user-event.filter" => Some(("userEvent.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.offset" => Some(("userEvent.offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "user-event.order-by" => Some(("userEvent.orderBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.page-categories" => Some(("userEvent.pageCategories", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "user-event.page-view-id" => Some(("userEvent.pageViewId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.purchase-transaction.cost" => Some(("userEvent.purchaseTransaction.cost", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "user-event.purchase-transaction.currency-code" => Some(("userEvent.purchaseTransaction.currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.purchase-transaction.id" => Some(("userEvent.purchaseTransaction.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.purchase-transaction.revenue" => Some(("userEvent.purchaseTransaction.revenue", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "user-event.purchase-transaction.tax" => Some(("userEvent.purchaseTransaction.tax", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "user-event.referrer-uri" => Some(("userEvent.referrerUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.search-query" => Some(("userEvent.searchQuery", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.session-id" => Some(("userEvent.sessionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.uri" => Some(("userEvent.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.user-info.direct-user-request" => Some(("userEvent.userInfo.directUserRequest", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "user-event.user-info.ip-address" => Some(("userEvent.userInfo.ipAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.user-info.user-agent" => Some(("userEvent.userInfo.userAgent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.user-info.user-id" => Some(("userEvent.userInfo.userId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-event.visitor-id" => Some(("userEvent.visitorId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attribution-token", "cart-id", "completion-attribution-token", "completion-detail", "cost", "currency-code", "direct-user-request", "event-time", "event-type", "experiment-ids", "filter", "id", "ip-address", "labels", "offset", "order-by", "page-categories", "page-size", "page-token", "page-view-id", "purchase-transaction", "referrer-uri", "revenue", "search-query", "selected-position", "selected-suggestion", "session-id", "tax", "uri", "user-agent", "user-event", "user-id", "user-info", "validate-only", "visitor-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2PredictRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_serving_configs_predict(request, opt.value_of("placement").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_remove_control(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "control-id" => Some(("controlId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["control-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2RemoveControlRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_serving_configs_remove_control(request, opt.value_of("serving-config").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_serving_configs_search(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "boost-spec.skip-boost-spec-validation" => Some(("boostSpec.skipBoostSpecValidation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "branch" => Some(("branch", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "canonical-filter" => Some(("canonicalFilter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dynamic-facet-spec.mode" => Some(("dynamicFacetSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "filter" => Some(("filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "offset" => Some(("offset", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "order-by" => Some(("orderBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "page-categories" => Some(("pageCategories", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "personalization-spec.mode" => Some(("personalizationSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "query" => Some(("query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "query-expansion-spec.condition" => Some(("queryExpansionSpec.condition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "query-expansion-spec.pin-unexpanded-results" => Some(("queryExpansionSpec.pinUnexpandedResults", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "search-mode" => Some(("searchMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "spell-correction-spec.mode" => Some(("spellCorrectionSpec.mode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-info.direct-user-request" => Some(("userInfo.directUserRequest", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "user-info.ip-address" => Some(("userInfo.ipAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-info.user-agent" => Some(("userInfo.userAgent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "user-info.user-id" => Some(("userInfo.userId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "variant-rollup-keys" => Some(("variantRollupKeys", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "visitor-id" => Some(("visitorId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["boost-spec", "branch", "canonical-filter", "condition", "direct-user-request", "dynamic-facet-spec", "filter", "ip-address", "labels", "mode", "offset", "order-by", "page-categories", "page-size", "page-token", "personalization-spec", "pin-unexpanded-results", "query", "query-expansion-spec", "search-mode", "skip-boost-spec-validation", "spell-correction-spec", "user-agent", "user-id", "user-info", "variant-rollup-keys", "visitor-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2SearchRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_serving_configs_search(request, opt.value_of("placement").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_catalogs_set_default_branch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -1737,20 +3438,48 @@ where } } - async fn _projects_locations_catalogs_user_events_collect(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + async fn _projects_locations_catalogs_update_attributes_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { - let mut call = self.hub.projects().locations_catalogs_user_events_collect(opt.value_of("parent").unwrap_or("")); + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "attribute-config-level" => Some(("attributeConfigLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["attribute-config-level", "name"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2AttributesConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_update_attributes_config(request, opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { - "user-event" => { - call = call.user_event(value.unwrap_or("")); - }, - "uri" => { - call = call.uri(value.unwrap_or("")); - }, - "ets" => { - call = call.ets(value.unwrap_or("")); + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1765,7 +3494,195 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["ets", "uri", "user-event"].iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_update_completion_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "allowlist-input-config.big-query-source.data-schema" => Some(("allowlistInputConfig.bigQuerySource.dataSchema", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "allowlist-input-config.big-query-source.dataset-id" => Some(("allowlistInputConfig.bigQuerySource.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "allowlist-input-config.big-query-source.gcs-staging-dir" => Some(("allowlistInputConfig.bigQuerySource.gcsStagingDir", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "allowlist-input-config.big-query-source.partition-date.day" => Some(("allowlistInputConfig.bigQuerySource.partitionDate.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "allowlist-input-config.big-query-source.partition-date.month" => Some(("allowlistInputConfig.bigQuerySource.partitionDate.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "allowlist-input-config.big-query-source.partition-date.year" => Some(("allowlistInputConfig.bigQuerySource.partitionDate.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "allowlist-input-config.big-query-source.project-id" => Some(("allowlistInputConfig.bigQuerySource.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "allowlist-input-config.big-query-source.table-id" => Some(("allowlistInputConfig.bigQuerySource.tableId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "auto-learning" => Some(("autoLearning", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.data-schema" => Some(("denylistInputConfig.bigQuerySource.dataSchema", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.dataset-id" => Some(("denylistInputConfig.bigQuerySource.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.gcs-staging-dir" => Some(("denylistInputConfig.bigQuerySource.gcsStagingDir", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.partition-date.day" => Some(("denylistInputConfig.bigQuerySource.partitionDate.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.partition-date.month" => Some(("denylistInputConfig.bigQuerySource.partitionDate.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.partition-date.year" => Some(("denylistInputConfig.bigQuerySource.partitionDate.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.project-id" => Some(("denylistInputConfig.bigQuerySource.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "denylist-input-config.big-query-source.table-id" => Some(("denylistInputConfig.bigQuerySource.tableId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "last-allowlist-import-operation" => Some(("lastAllowlistImportOperation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "last-denylist-import-operation" => Some(("lastDenylistImportOperation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "last-suggestions-import-operation" => Some(("lastSuggestionsImportOperation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "matching-order" => Some(("matchingOrder", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "max-suggestions" => Some(("maxSuggestions", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "min-prefix-length" => Some(("minPrefixLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.data-schema" => Some(("suggestionsInputConfig.bigQuerySource.dataSchema", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.dataset-id" => Some(("suggestionsInputConfig.bigQuerySource.datasetId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.gcs-staging-dir" => Some(("suggestionsInputConfig.bigQuerySource.gcsStagingDir", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.partition-date.day" => Some(("suggestionsInputConfig.bigQuerySource.partitionDate.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.partition-date.month" => Some(("suggestionsInputConfig.bigQuerySource.partitionDate.month", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.partition-date.year" => Some(("suggestionsInputConfig.bigQuerySource.partitionDate.year", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.project-id" => Some(("suggestionsInputConfig.bigQuerySource.projectId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "suggestions-input-config.big-query-source.table-id" => Some(("suggestionsInputConfig.bigQuerySource.tableId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowlist-input-config", "auto-learning", "big-query-source", "data-schema", "dataset-id", "day", "denylist-input-config", "gcs-staging-dir", "last-allowlist-import-operation", "last-denylist-import-operation", "last-suggestions-import-operation", "matching-order", "max-suggestions", "min-prefix-length", "month", "name", "partition-date", "project-id", "suggestions-input-config", "table-id", "year"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRetailV2CompletionConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_catalogs_update_completion_config(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_catalogs_user_events_collect(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_catalogs_user_events_collect(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "user-event" => { + call = call.user_event(value.unwrap_or("")); + }, + "uri" => { + call = call.uri(value.unwrap_or("")); + }, + "raw-json" => { + call = call.raw_json(value.unwrap_or("")); + }, + "prebuilt-rule" => { + call = call.prebuilt_rule(value.unwrap_or("")); + }, + "ets" => { + call = call.ets( value.map(|v| arg_from_str(v, err, "ets", "int64")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["ets", "prebuilt-rule", "raw-json", "uri", "user-event"].iter().map(|v|*v)); v } )); } } @@ -2130,6 +4047,9 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "write-async" => { + call = call.write_async( value.map(|v| arg_from_str(v, err, "write-async", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -2143,6 +4063,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); + v.extend(["write-async"].iter().map(|v|*v)); v } )); } } @@ -2238,7 +4159,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2352,7 +4273,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2411,12 +4332,24 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-catalogs-attributes-config-add-catalog-attribute", Some(opt)) => { + call_result = self._projects_locations_catalogs_attributes_config_add_catalog_attribute(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-attributes-config-remove-catalog-attribute", Some(opt)) => { + call_result = self._projects_locations_catalogs_attributes_config_remove_catalog_attribute(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-attributes-config-replace-catalog-attribute", Some(opt)) => { + call_result = self._projects_locations_catalogs_attributes_config_replace_catalog_attribute(opt, dry_run, &mut err).await; + }, ("locations-catalogs-branches-operations-get", Some(opt)) => { call_result = self._projects_locations_catalogs_branches_operations_get(opt, dry_run, &mut err).await; }, ("locations-catalogs-branches-products-add-fulfillment-places", Some(opt)) => { call_result = self._projects_locations_catalogs_branches_products_add_fulfillment_places(opt, dry_run, &mut err).await; }, + ("locations-catalogs-branches-products-add-local-inventories", Some(opt)) => { + call_result = self._projects_locations_catalogs_branches_products_add_local_inventories(opt, dry_run, &mut err).await; + }, ("locations-catalogs-branches-products-create", Some(opt)) => { call_result = self._projects_locations_catalogs_branches_products_create(opt, dry_run, &mut err).await; }, @@ -2438,6 +4371,9 @@ where ("locations-catalogs-branches-products-remove-fulfillment-places", Some(opt)) => { call_result = self._projects_locations_catalogs_branches_products_remove_fulfillment_places(opt, dry_run, &mut err).await; }, + ("locations-catalogs-branches-products-remove-local-inventories", Some(opt)) => { + call_result = self._projects_locations_catalogs_branches_products_remove_local_inventories(opt, dry_run, &mut err).await; + }, ("locations-catalogs-branches-products-set-inventory", Some(opt)) => { call_result = self._projects_locations_catalogs_branches_products_set_inventory(opt, dry_run, &mut err).await; }, @@ -2447,6 +4383,27 @@ where ("locations-catalogs-completion-data-import", Some(opt)) => { call_result = self._projects_locations_catalogs_completion_data_import(opt, dry_run, &mut err).await; }, + ("locations-catalogs-controls-create", Some(opt)) => { + call_result = self._projects_locations_catalogs_controls_create(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-controls-delete", Some(opt)) => { + call_result = self._projects_locations_catalogs_controls_delete(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-controls-get", Some(opt)) => { + call_result = self._projects_locations_catalogs_controls_get(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-controls-list", Some(opt)) => { + call_result = self._projects_locations_catalogs_controls_list(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-controls-patch", Some(opt)) => { + call_result = self._projects_locations_catalogs_controls_patch(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-get-attributes-config", Some(opt)) => { + call_result = self._projects_locations_catalogs_get_attributes_config(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-get-completion-config", Some(opt)) => { + call_result = self._projects_locations_catalogs_get_completion_config(opt, dry_run, &mut err).await; + }, ("locations-catalogs-get-default-branch", Some(opt)) => { call_result = self._projects_locations_catalogs_get_default_branch(opt, dry_run, &mut err).await; }, @@ -2468,9 +4425,42 @@ where ("locations-catalogs-placements-search", Some(opt)) => { call_result = self._projects_locations_catalogs_placements_search(opt, dry_run, &mut err).await; }, + ("locations-catalogs-serving-configs-add-control", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_add_control(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-create", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_create(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-delete", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_delete(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-get", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_get(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-list", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_list(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-patch", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_patch(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-predict", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_predict(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-remove-control", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_remove_control(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-serving-configs-search", Some(opt)) => { + call_result = self._projects_locations_catalogs_serving_configs_search(opt, dry_run, &mut err).await; + }, ("locations-catalogs-set-default-branch", Some(opt)) => { call_result = self._projects_locations_catalogs_set_default_branch(opt, dry_run, &mut err).await; }, + ("locations-catalogs-update-attributes-config", Some(opt)) => { + call_result = self._projects_locations_catalogs_update_attributes_config(opt, dry_run, &mut err).await; + }, + ("locations-catalogs-update-completion-config", Some(opt)) => { + call_result = self._projects_locations_catalogs_update_completion_config(opt, dry_run, &mut err).await; + }, ("locations-catalogs-user-events-collect", Some(opt)) => { call_result = self._projects_locations_catalogs_user_events_collect(opt, dry_run, &mut err).await; }, @@ -2577,7 +4567,91 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-catalogs-branches-operations-get', 'locations-catalogs-branches-products-add-fulfillment-places', 'locations-catalogs-branches-products-create', 'locations-catalogs-branches-products-delete', 'locations-catalogs-branches-products-get', 'locations-catalogs-branches-products-import', 'locations-catalogs-branches-products-list', 'locations-catalogs-branches-products-patch', 'locations-catalogs-branches-products-remove-fulfillment-places', 'locations-catalogs-branches-products-set-inventory', 'locations-catalogs-complete-query', 'locations-catalogs-completion-data-import', 'locations-catalogs-get-default-branch', 'locations-catalogs-list', 'locations-catalogs-operations-get', 'locations-catalogs-operations-list', 'locations-catalogs-patch', 'locations-catalogs-placements-predict', 'locations-catalogs-placements-search', 'locations-catalogs-set-default-branch', 'locations-catalogs-user-events-collect', 'locations-catalogs-user-events-import', 'locations-catalogs-user-events-purge', 'locations-catalogs-user-events-rejoin', 'locations-catalogs-user-events-write', 'locations-operations-get', 'locations-operations-list', 'operations-get' and 'operations-list'", vec![ + ("projects", "methods: 'locations-catalogs-attributes-config-add-catalog-attribute', 'locations-catalogs-attributes-config-remove-catalog-attribute', 'locations-catalogs-attributes-config-replace-catalog-attribute', 'locations-catalogs-branches-operations-get', 'locations-catalogs-branches-products-add-fulfillment-places', 'locations-catalogs-branches-products-add-local-inventories', 'locations-catalogs-branches-products-create', 'locations-catalogs-branches-products-delete', 'locations-catalogs-branches-products-get', 'locations-catalogs-branches-products-import', 'locations-catalogs-branches-products-list', 'locations-catalogs-branches-products-patch', 'locations-catalogs-branches-products-remove-fulfillment-places', 'locations-catalogs-branches-products-remove-local-inventories', 'locations-catalogs-branches-products-set-inventory', 'locations-catalogs-complete-query', 'locations-catalogs-completion-data-import', 'locations-catalogs-controls-create', 'locations-catalogs-controls-delete', 'locations-catalogs-controls-get', 'locations-catalogs-controls-list', 'locations-catalogs-controls-patch', 'locations-catalogs-get-attributes-config', 'locations-catalogs-get-completion-config', 'locations-catalogs-get-default-branch', 'locations-catalogs-list', 'locations-catalogs-operations-get', 'locations-catalogs-operations-list', 'locations-catalogs-patch', 'locations-catalogs-placements-predict', 'locations-catalogs-placements-search', 'locations-catalogs-serving-configs-add-control', 'locations-catalogs-serving-configs-create', 'locations-catalogs-serving-configs-delete', 'locations-catalogs-serving-configs-get', 'locations-catalogs-serving-configs-list', 'locations-catalogs-serving-configs-patch', 'locations-catalogs-serving-configs-predict', 'locations-catalogs-serving-configs-remove-control', 'locations-catalogs-serving-configs-search', 'locations-catalogs-set-default-branch', 'locations-catalogs-update-attributes-config', 'locations-catalogs-update-completion-config', 'locations-catalogs-user-events-collect', 'locations-catalogs-user-events-import', 'locations-catalogs-user-events-purge', 'locations-catalogs-user-events-rejoin', 'locations-catalogs-user-events-write', 'locations-operations-get', 'locations-operations-list', 'operations-get' and 'operations-list'", vec![ + ("locations-catalogs-attributes-config-add-catalog-attribute", + Some(r##"Adds the specified CatalogAttribute to the AttributesConfig. If the CatalogAttribute to add already exists, an ALREADY_EXISTS error is returned."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-attributes-config-add-catalog-attribute", + vec![ + (Some(r##"attributes-config"##), + None, + Some(r##"Required. Full AttributesConfig resource name. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/attributesConfig`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-attributes-config-remove-catalog-attribute", + Some(r##"Removes the specified CatalogAttribute from the AttributesConfig. If the CatalogAttribute to remove does not exist, a NOT_FOUND error is returned."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-attributes-config-remove-catalog-attribute", + vec![ + (Some(r##"attributes-config"##), + None, + Some(r##"Required. Full AttributesConfig resource name. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/attributesConfig`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-attributes-config-replace-catalog-attribute", + Some(r##"Replaces the specified CatalogAttribute in the AttributesConfig by updating the catalog attribute with the same CatalogAttribute.key. If the CatalogAttribute to replace does not exist, a NOT_FOUND error is returned."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-attributes-config-replace-catalog-attribute", + vec![ + (Some(r##"attributes-config"##), + None, + Some(r##"Required. Full AttributesConfig resource name. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/attributesConfig`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-catalogs-branches-operations-get", Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-branches-operations-get", @@ -2601,7 +4675,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-branches-products-add-fulfillment-places", - Some(r##"Incrementally adds place IDs to Product.fulfillment_info.place_ids. This process is asynchronous and does not require the Product to exist before updating fulfillment information. If the request is valid, the update will be enqueued and processed downstream. As a consequence, when a response is returned, the added place IDs are not immediately manifested in the Product queried by GetProduct or ListProducts. This feature is only available for users who have Retail Search enabled. Please submit a form [here](https://cloud.google.com/contact) to contact cloud sales if you are interested in using Retail Search."##), + Some(r##"Incrementally adds place IDs to Product.fulfillment_info.place_ids. This process is asynchronous and does not require the Product to exist before updating fulfillment information. If the request is valid, the update will be enqueued and processed downstream. As a consequence, when a response is returned, the added place IDs are not immediately manifested in the Product queried by ProductService.GetProduct or ProductService.ListProducts. The returned Operations will be obsolete after 1 day, and GetOperation API will return NOT_FOUND afterwards. If conflicting updates are issued, the Operations associated with the stale updates will not be marked as done until being obsolete. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-branches-products-add-fulfillment-places", vec![ (Some(r##"product"##), @@ -2622,6 +4696,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-branches-products-add-local-inventories", + Some(r##"Updates local inventory information for a Product at a list of places, while respecting the last update timestamps of each inventory field. This process is asynchronous and does not require the Product to exist before updating inventory information. If the request is valid, the update will be enqueued and processed downstream. As a consequence, when a response is returned, updates are not immediately manifested in the Product queried by ProductService.GetProduct or ProductService.ListProducts. Local inventory information can only be modified using this method. ProductService.CreateProduct and ProductService.UpdateProduct has no effect on local inventories. The returned Operations will be obsolete after 1 day, and GetOperation API will return NOT_FOUND afterwards. If conflicting updates are issued, the Operations associated with the stale updates will not be marked as done until being obsolete. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-branches-products-add-local-inventories", + vec![ + (Some(r##"product"##), + None, + Some(r##"Required. Full resource name of Product, such as `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`. If the caller does not have permission to access the Product, regardless of whether or not it exists, a PERMISSION_DENIED error is returned."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2701,7 +4803,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-branches-products-import", - Some(r##"Bulk import of multiple Products. Request processing may be synchronous. No partial updating is supported. Non-existing items are created. Note that it is possible for a subset of the Products to be successfully updated."##), + Some(r##"Bulk import of multiple Products. Request processing may be synchronous. Non-existing items are created. Note that it is possible for a subset of the Products to be successfully updated."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-branches-products-import", vec![ (Some(r##"parent"##), @@ -2779,7 +4881,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-branches-products-remove-fulfillment-places", - Some(r##"Incrementally removes place IDs from a Product.fulfillment_info.place_ids. This process is asynchronous and does not require the Product to exist before updating fulfillment information. If the request is valid, the update will be enqueued and processed downstream. As a consequence, when a response is returned, the removed place IDs are not immediately manifested in the Product queried by GetProduct or ListProducts. This feature is only available for users who have Retail Search enabled. Please submit a form [here](https://cloud.google.com/contact) to contact cloud sales if you are interested in using Retail Search."##), + Some(r##"Incrementally removes place IDs from a Product.fulfillment_info.place_ids. This process is asynchronous and does not require the Product to exist before updating fulfillment information. If the request is valid, the update will be enqueued and processed downstream. As a consequence, when a response is returned, the removed place IDs are not immediately manifested in the Product queried by ProductService.GetProduct or ProductService.ListProducts. The returned Operations will be obsolete after 1 day, and GetOperation API will return NOT_FOUND afterwards. If conflicting updates are issued, the Operations associated with the stale updates will not be marked as done until being obsolete. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-branches-products-remove-fulfillment-places", vec![ (Some(r##"product"##), @@ -2800,6 +4902,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-branches-products-remove-local-inventories", + Some(r##"Remove local inventory information for a Product at a list of places at a removal timestamp. This process is asynchronous. If the request is valid, the removal will be enqueued and processed downstream. As a consequence, when a response is returned, removals are not immediately manifested in the Product queried by ProductService.GetProduct or ProductService.ListProducts. Local inventory information can only be removed using this method. ProductService.CreateProduct and ProductService.UpdateProduct has no effect on local inventories. The returned Operations will be obsolete after 1 day, and GetOperation API will return NOT_FOUND afterwards. If conflicting updates are issued, the Operations associated with the stale updates will not be marked as done until being obsolete. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-branches-products-remove-local-inventories", + vec![ + (Some(r##"product"##), + None, + Some(r##"Required. Full resource name of Product, such as `projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/some_product_id`. If the caller does not have permission to access the Product, regardless of whether or not it exists, a PERMISSION_DENIED error is returned."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2807,7 +4937,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-branches-products-set-inventory", - Some(r##"Updates inventory information for a Product while respecting the last update timestamps of each inventory field. This process is asynchronous and does not require the Product to exist before updating fulfillment information. If the request is valid, the update will be enqueued and processed downstream. As a consequence, when a response is returned, updates are not immediately manifested in the Product queried by GetProduct or ListProducts. When inventory is updated with CreateProduct and UpdateProduct, the specified inventory field value(s) will overwrite any existing value(s) while ignoring the last update time for this field. Furthermore, the last update time for the specified inventory fields will be overwritten to the time of the CreateProduct or UpdateProduct request. If no inventory fields are set in CreateProductRequest.product, then any pre-existing inventory information for this product will be used. If no inventory fields are set in SetInventoryRequest.set_mask, then any existing inventory information will be preserved. Pre-existing inventory information can only be updated with SetInventory, AddFulfillmentPlaces, and RemoveFulfillmentPlaces. This feature is only available for users who have Retail Search enabled. Please submit a form [here](https://cloud.google.com/contact) to contact cloud sales if you are interested in using Retail Search."##), + Some(r##"Updates inventory information for a Product while respecting the last update timestamps of each inventory field. This process is asynchronous and does not require the Product to exist before updating fulfillment information. If the request is valid, the update is enqueued and processed downstream. As a consequence, when a response is returned, updates are not immediately manifested in the Product queried by ProductService.GetProduct or ProductService.ListProducts. When inventory is updated with ProductService.CreateProduct and ProductService.UpdateProduct, the specified inventory field value(s) overwrite any existing value(s) while ignoring the last update time for this field. Furthermore, the last update times for the specified inventory fields are overwritten by the times of the ProductService.CreateProduct or ProductService.UpdateProduct request. If no inventory fields are set in CreateProductRequest.product, then any pre-existing inventory information for this product is used. If no inventory fields are set in SetInventoryRequest.set_mask, then any existing inventory information is preserved. Pre-existing inventory information can only be updated with ProductService.SetInventory, ProductService.AddFulfillmentPlaces, and ProductService.RemoveFulfillmentPlaces. The returned Operations is obsolete after one day, and the GetOperation API returns `NOT_FOUND` afterwards. If conflicting updates are issued, the Operations associated with the stale updates are not marked as done until they are obsolete. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-branches-products-set-inventory", vec![ (Some(r##"name"##), @@ -2835,7 +4965,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-complete-query", - Some(r##"Completes the specified prefix with keyword suggestions. This feature is only available for users who have Retail Search enabled. Please submit a form [here](https://cloud.google.com/contact) to contact cloud sales if you are interested in using Retail Search."##), + Some(r##"Completes the specified prefix with keyword suggestions. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-complete-query", vec![ (Some(r##"catalog"##), @@ -2857,7 +4987,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-completion-data-import", - Some(r##"Bulk import of processed completion dataset. Request processing may be synchronous. Partial updating is not supported. This feature is only available for users who have Retail Search enabled. Please submit a form [here](https://cloud.google.com/contact) to contact cloud sales if you are interested in using Retail Search."##), + Some(r##"Bulk import of processed completion dataset. Request processing is asynchronous. Partial updating is not supported. The operation is successfully finished only after the imported suggestions are indexed successfully and ready for serving. The process takes hours. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-completion-data-import", vec![ (Some(r##"parent"##), @@ -2878,6 +5008,172 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-controls-create", + Some(r##"Creates a Control. If the Control to create already exists, an ALREADY_EXISTS error is returned."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-controls-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Full resource name of parent catalog. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-controls-delete", + Some(r##"Deletes a Control. If the Control to delete does not exist, a NOT_FOUND error is returned."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-controls-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the Control to delete. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/controls/{control_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-controls-get", + Some(r##"Gets a Control."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-controls-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the Control to get. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/controls/{control_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-controls-list", + Some(r##"Lists all Controls by their parent Catalog."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-controls-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The catalog resource name. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-controls-patch", + Some(r##"Updates a Control. Control cannot be set to a different oneof field, if so an INVALID_ARGUMENT is returned. If the Control to update does not exist, a NOT_FOUND error is returned."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-controls-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Immutable. Fully qualified name `projects/*/locations/global/catalogs/*/controls/*`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-get-attributes-config", + Some(r##"Gets an AttributesConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-get-attributes-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Full AttributesConfig resource name. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/attributesConfig`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-get-completion-config", + Some(r##"Gets a CompletionConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-get-completion-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Full CompletionConfig resource name. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/completionConfig`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3006,7 +5302,7 @@ async fn main() { vec![ (Some(r##"placement"##), None, - Some(r##"Required. Full resource name of the format: {name=projects/*/locations/global/catalogs/default_catalog/placements/*} The ID of the Recommendations AI placement. Before you can request predictions from your model, you must create at least one placement for it. For more information, see [Managing placements](https://cloud.google.com/retail/recommendations-ai/docs/manage-placements). The full list of available placements can be seen at https://console.cloud.google.com/recommendation/catalogs/default_catalog/placements"##), + Some(r##"Required. Full resource name of the format: `{placement=projects/*/locations/global/catalogs/default_catalog/servingConfigs/*}` or `{placement=projects/*/locations/global/catalogs/default_catalog/placements/*}`. We recommend using the `servingConfigs` resource. `placements` is a legacy resource. The ID of the Recommendations AI serving config or placement. Before you can request predictions from your model, you must create at least one serving config or placement for it. For more information, see [Manage serving configs] (https://cloud.google.com/retail/docs/manage-configs). The full list of available serving configs can be seen at https://console.cloud.google.com/ai/retail/catalogs/default_catalog/configs"##), Some(true), Some(false)), @@ -3029,12 +5325,246 @@ async fn main() { Some(false)), ]), ("locations-catalogs-placements-search", - Some(r##"Performs a search. This feature is only available for users who have Retail Search enabled. Please submit a form [here](https://cloud.google.com/contact) to contact cloud sales if you are interested in using Retail Search."##), + Some(r##"Performs a search. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-placements-search", vec![ (Some(r##"placement"##), None, - Some(r##"Required. The resource name of the search engine placement, such as `projects/*/locations/global/catalogs/default_catalog/placements/default_search`. This field is used to identify the serving configuration name and the set of models that will be used to make the search."##), + Some(r##"Required. The resource name of the Retail Search serving config, such as `projects/*/locations/global/catalogs/default_catalog/servingConfigs/default_serving_config` or the name of the legacy placement resource, such as `projects/*/locations/global/catalogs/default_catalog/placements/default_search`. This field is used to identify the serving config name and the set of models that will be used to make the search."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-add-control", + Some(r##"Enables a Control on the specified ServingConfig. The control is added in the last position of the list of controls it belongs to (e.g. if it's a facet spec control it will be applied in the last position of servingConfig.facetSpecIds) Returns a ALREADY_EXISTS error if the control has already been applied. Returns a FAILED_PRECONDITION error if the addition could exceed maximum number of control allowed for that type of control."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-add-control", + vec![ + (Some(r##"serving-config"##), + None, + Some(r##"Required. The source ServingConfig resource name . Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/servingConfigs/{serving_config_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-create", + Some(r##"Creates a ServingConfig. A maximum of 100 ServingConfigs are allowed in a Catalog, otherwise a FAILED_PRECONDITION error is returned."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Full resource name of parent. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-delete", + Some(r##"Deletes a ServingConfig. Returns a NotFound error if the ServingConfig does not exist."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the ServingConfig to delete. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/servingConfigs/{serving_config_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-get", + Some(r##"Gets a ServingConfig. Returns a NotFound error if the ServingConfig does not exist."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the ServingConfig to get. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/servingConfigs/{serving_config_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-list", + Some(r##"Lists all ServingConfigs linked to this catalog."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The catalog resource name. Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-patch", + Some(r##"Updates a ServingConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Immutable. Fully qualified name `projects/*/locations/global/catalogs/*/servingConfig/*`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-predict", + Some(r##"Makes a recommendation prediction."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-predict", + vec![ + (Some(r##"placement"##), + None, + Some(r##"Required. Full resource name of the format: `{placement=projects/*/locations/global/catalogs/default_catalog/servingConfigs/*}` or `{placement=projects/*/locations/global/catalogs/default_catalog/placements/*}`. We recommend using the `servingConfigs` resource. `placements` is a legacy resource. The ID of the Recommendations AI serving config or placement. Before you can request predictions from your model, you must create at least one serving config or placement for it. For more information, see [Manage serving configs] (https://cloud.google.com/retail/docs/manage-configs). The full list of available serving configs can be seen at https://console.cloud.google.com/ai/retail/catalogs/default_catalog/configs"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-remove-control", + Some(r##"Disables a Control on the specified ServingConfig. The control is removed from the ServingConfig. Returns a NOT_FOUND error if the Control is not enabled for the ServingConfig."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-remove-control", + vec![ + (Some(r##"serving-config"##), + None, + Some(r##"Required. The source ServingConfig resource name . Format: `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/servingConfigs/{serving_config_id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-serving-configs-search", + Some(r##"Performs a search. This feature is only available for users who have Retail Search enabled. Enable Retail Search on Cloud Console before using this feature."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-serving-configs-search", + vec![ + (Some(r##"placement"##), + None, + Some(r##"Required. The resource name of the Retail Search serving config, such as `projects/*/locations/global/catalogs/default_catalog/servingConfigs/default_serving_config` or the name of the legacy placement resource, such as `projects/*/locations/global/catalogs/default_catalog/placements/default_search`. This field is used to identify the serving config name and the set of models that will be used to make the search."##), Some(true), Some(false)), @@ -3078,6 +5608,62 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-update-attributes-config", + Some(r##"Updates the AttributesConfig. The catalog attributes in the request will be updated in the catalog, or inserted if they do not exist. Existing catalog attributes not included in the request will remain unchanged. Attributes that are assigned to products, but do not exist at the catalog level, are always included in the response. The product attribute is assigned default values for missing catalog attribute fields, e.g., searchable and dynamic facetable options."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-update-attributes-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Immutable. The fully qualified resource name of the attribute config. Format: `projects/*/locations/*/catalogs/*/attributesConfig`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-catalogs-update-completion-config", + Some(r##"Updates the CompletionConfigs."##), + "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-update-completion-config", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Immutable. Fully qualified name `projects/*/locations/*/catalogs/*/completionConfig`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -3107,7 +5693,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-user-events-import", - Some(r##"Bulk import of User events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. Operation.response is of type ImportResponse. Note that it is possible for a subset of the items to be successfully inserted. Operation.metadata is of type ImportMetadata."##), + Some(r##"Bulk import of User events. Request processing might be synchronous. Events that already exist are skipped. Use this method for backfilling historical user events. `Operation.response` is of type `ImportResponse`. Note that it is possible for a subset of the items to be successfully inserted. `Operation.metadata` is of type `ImportMetadata`."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-user-events-import", vec![ (Some(r##"parent"##), @@ -3163,7 +5749,7 @@ async fn main() { Some(false)), ]), ("locations-catalogs-user-events-rejoin", - Some(r##"Triggers a user event rejoin operation with latest product catalog. Events will not be annotated with detailed product information if product is missing from the catalog at the time the user event is ingested, and these events are stored as unjoined events with a limited usage on training and serving. This API can be used to trigger a 'join' operation on specified events with latest version of product catalog. It can also be used to correct events joined with wrong product catalog."##), + Some(r##"Starts a user-event rejoin operation with latest product catalog. Events are not annotated with detailed product information for products that are missing from the catalog when the user event is ingested. These events are stored as unjoined events with limited usage on training and serving. You can use this method to start a join operation on specified events with the latest version of product catalog. You can also use this method to correct events joined with the wrong product catalog. A rejoin operation can take hours or days to complete."##), "Details at http://byron.github.io/google-apis-rs/google_retail2_cli/projects_locations-catalogs-user-events-rejoin", vec![ (Some(r##"parent"##), @@ -3312,7 +5898,7 @@ async fn main() { let mut app = App::new("retail2") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230117") .about("Cloud Retail service enables customers to build end-to-end personalized recommendation systems without requiring a high level of expertise in machine learning, recommendation system, or Google Cloud.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_retail2_cli") .arg(Arg::with_name("url") diff --git a/gen/retail2/Cargo.toml b/gen/retail2/Cargo.toml index 29545af8a9..127302655b 100644 --- a/gen/retail2/Cargo.toml +++ b/gen/retail2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-retail2" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Retail (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/retail2" homepage = "https://cloud.google.com/recommendations" -documentation = "https://docs.rs/google-retail2/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-retail2/5.0.2+20230117" license = "MIT" keywords = ["retail", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/retail2/README.md b/gen/retail2/README.md index 7d66beb9b0..3154d401e6 100644 --- a/gen/retail2/README.md +++ b/gen/retail2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-retail2` library allows access to all features of the *Google Cloud Retail* service. -This documentation was generated from *Cloud Retail* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *retail:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Retail* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *retail:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Retail* *v2* API can be found at the [official documentation site](https://cloud.google.com/recommendations). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/CloudRetail) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/CloudRetail) ... * projects - * [*locations catalogs attributes config add catalog attribute*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogAttributesConfigAddCatalogAttributeCall), [*locations catalogs attributes config remove catalog attribute*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogAttributesConfigRemoveCatalogAttributeCall), [*locations catalogs attributes config replace catalog attribute*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogAttributesConfigReplaceCatalogAttributeCall), [*locations catalogs branches operations get*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchOperationGetCall), [*locations catalogs branches products add fulfillment places*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductAddFulfillmentPlaceCall), [*locations catalogs branches products add local inventories*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductAddLocalInventoryCall), [*locations catalogs branches products create*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductCreateCall), [*locations catalogs branches products delete*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductDeleteCall), [*locations catalogs branches products get*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductGetCall), [*locations catalogs branches products import*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductImportCall), [*locations catalogs branches products list*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductListCall), [*locations catalogs branches products patch*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductPatchCall), [*locations catalogs branches products remove fulfillment places*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductRemoveFulfillmentPlaceCall), [*locations catalogs branches products remove local inventories*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductRemoveLocalInventoryCall), [*locations catalogs branches products set inventory*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogBranchProductSetInventoryCall), [*locations catalogs complete query*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogCompleteQueryCall), [*locations catalogs completion data import*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogCompletionDataImportCall), [*locations catalogs controls create*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogControlCreateCall), [*locations catalogs controls delete*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogControlDeleteCall), [*locations catalogs controls get*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogControlGetCall), [*locations catalogs controls list*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogControlListCall), [*locations catalogs controls patch*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogControlPatchCall), [*locations catalogs get attributes config*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogGetAttributesConfigCall), [*locations catalogs get completion config*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogGetCompletionConfigCall), [*locations catalogs get default branch*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogGetDefaultBranchCall), [*locations catalogs list*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogListCall), [*locations catalogs operations get*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogOperationGetCall), [*locations catalogs operations list*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogOperationListCall), [*locations catalogs patch*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogPatchCall), [*locations catalogs placements predict*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogPlacementPredictCall), [*locations catalogs placements search*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogPlacementSearchCall), [*locations catalogs serving configs add control*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigAddControlCall), [*locations catalogs serving configs create*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigCreateCall), [*locations catalogs serving configs delete*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigDeleteCall), [*locations catalogs serving configs get*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigGetCall), [*locations catalogs serving configs list*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigListCall), [*locations catalogs serving configs patch*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigPatchCall), [*locations catalogs serving configs predict*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigPredictCall), [*locations catalogs serving configs remove control*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigRemoveControlCall), [*locations catalogs serving configs search*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogServingConfigSearchCall), [*locations catalogs set default branch*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogSetDefaultBranchCall), [*locations catalogs update attributes config*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogUpdateAttributesConfigCall), [*locations catalogs update completion config*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogUpdateCompletionConfigCall), [*locations catalogs user events collect*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogUserEventCollectCall), [*locations catalogs user events import*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogUserEventImportCall), [*locations catalogs user events purge*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogUserEventPurgeCall), [*locations catalogs user events rejoin*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogUserEventRejoinCall), [*locations catalogs user events write*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationCatalogUserEventWriteCall), [*locations operations get*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectLocationOperationListCall), [*operations get*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectOperationGetCall) and [*operations list*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/api::ProjectOperationListCall) + * [*locations catalogs attributes config add catalog attribute*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogAttributesConfigAddCatalogAttributeCall), [*locations catalogs attributes config remove catalog attribute*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogAttributesConfigRemoveCatalogAttributeCall), [*locations catalogs attributes config replace catalog attribute*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogAttributesConfigReplaceCatalogAttributeCall), [*locations catalogs branches operations get*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchOperationGetCall), [*locations catalogs branches products add fulfillment places*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductAddFulfillmentPlaceCall), [*locations catalogs branches products add local inventories*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductAddLocalInventoryCall), [*locations catalogs branches products create*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductCreateCall), [*locations catalogs branches products delete*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductDeleteCall), [*locations catalogs branches products get*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductGetCall), [*locations catalogs branches products import*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductImportCall), [*locations catalogs branches products list*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductListCall), [*locations catalogs branches products patch*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductPatchCall), [*locations catalogs branches products remove fulfillment places*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductRemoveFulfillmentPlaceCall), [*locations catalogs branches products remove local inventories*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductRemoveLocalInventoryCall), [*locations catalogs branches products set inventory*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogBranchProductSetInventoryCall), [*locations catalogs complete query*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogCompleteQueryCall), [*locations catalogs completion data import*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogCompletionDataImportCall), [*locations catalogs controls create*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogControlCreateCall), [*locations catalogs controls delete*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogControlDeleteCall), [*locations catalogs controls get*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogControlGetCall), [*locations catalogs controls list*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogControlListCall), [*locations catalogs controls patch*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogControlPatchCall), [*locations catalogs get attributes config*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogGetAttributesConfigCall), [*locations catalogs get completion config*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogGetCompletionConfigCall), [*locations catalogs get default branch*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogGetDefaultBranchCall), [*locations catalogs list*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogListCall), [*locations catalogs operations get*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogOperationGetCall), [*locations catalogs operations list*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogOperationListCall), [*locations catalogs patch*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogPatchCall), [*locations catalogs placements predict*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogPlacementPredictCall), [*locations catalogs placements search*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogPlacementSearchCall), [*locations catalogs serving configs add control*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigAddControlCall), [*locations catalogs serving configs create*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigCreateCall), [*locations catalogs serving configs delete*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigDeleteCall), [*locations catalogs serving configs get*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigGetCall), [*locations catalogs serving configs list*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigListCall), [*locations catalogs serving configs patch*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigPatchCall), [*locations catalogs serving configs predict*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigPredictCall), [*locations catalogs serving configs remove control*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigRemoveControlCall), [*locations catalogs serving configs search*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogServingConfigSearchCall), [*locations catalogs set default branch*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogSetDefaultBranchCall), [*locations catalogs update attributes config*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogUpdateAttributesConfigCall), [*locations catalogs update completion config*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogUpdateCompletionConfigCall), [*locations catalogs user events collect*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogUserEventCollectCall), [*locations catalogs user events import*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogUserEventImportCall), [*locations catalogs user events purge*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogUserEventPurgeCall), [*locations catalogs user events rejoin*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogUserEventRejoinCall), [*locations catalogs user events write*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationCatalogUserEventWriteCall), [*locations operations get*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectLocationOperationListCall), [*operations get*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectOperationGetCall) and [*operations list*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/api::ProjectOperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/CloudRetail)** +* **[Hub](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/CloudRetail)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::CallBuilder) -* **[Resources](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::CallBuilder) +* **[Resources](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Part)** + * **[Parts](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Delegate) to the -[Method Builder](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Delegate) to the +[Method Builder](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::RequestValue) and -[decodable](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::RequestValue) and +[decodable](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-retail2/5.0.2-beta-1+20230117/google_retail2/client::RequestValue) are moved +* [request values](https://docs.rs/google-retail2/5.0.2+20230117/google_retail2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/retail2/src/api.rs b/gen/retail2/src/api.rs index dd4fea25e0..2892e1b9b2 100644 --- a/gen/retail2/src/api.rs +++ b/gen/retail2/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudRetail { CloudRetail { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://retail.googleapis.com/".to_string(), _root_url: "https://retail.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CloudRetail { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/retail2/src/client.rs b/gen/retail2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/retail2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/retail2/src/lib.rs b/gen/retail2/src/lib.rs index a25fd7a879..25b28cfed1 100644 --- a/gen/retail2/src/lib.rs +++ b/gen/retail2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Retail* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *retail:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Retail* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *retail:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Retail* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/recommendations). diff --git a/gen/run1-cli/Cargo.toml b/gen/run1-cli/Cargo.toml index a0657a5aff..81cc246824 100644 --- a/gen/run1-cli/Cargo.toml +++ b/gen/run1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-run1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Run (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/run1-cli" @@ -20,13 +20,13 @@ name = "run1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-run1] path = "../run1" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/run1-cli/README.md b/gen/run1-cli/README.md index b9cd914f3b..8ac56d3de0 100644 --- a/gen/run1-cli/README.md +++ b/gen/run1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Run* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Run* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash run1 [options] @@ -37,6 +37,7 @@ run1 [options] domainmappings-delete [-p ]... [-o ] domainmappings-get [-p ]... [-o ] domainmappings-list [-p ]... [-o ] + executions-cancel (-r )... [-p ]... [-o ] executions-delete [-p ]... [-o ] executions-get [-p ]... [-o ] executions-list [-p ]... [-o ] diff --git a/gen/run1-cli/mkdocs.yml b/gen/run1-cli/mkdocs.yml index 7ce1cccb84..1afc445caa 100644 --- a/gen/run1-cli/mkdocs.yml +++ b/gen/run1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Run v4.0.1+20220225 +site_name: Cloud Run v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-run1-cli site_description: A complete library to interact with Cloud Run (protocol v1) @@ -7,61 +7,64 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/run1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['namespaces_authorizeddomains-list.md', 'Namespaces', 'Authorizeddomains List'] -- ['namespaces_configurations-get.md', 'Namespaces', 'Configurations Get'] -- ['namespaces_configurations-list.md', 'Namespaces', 'Configurations List'] -- ['namespaces_domainmappings-create.md', 'Namespaces', 'Domainmappings Create'] -- ['namespaces_domainmappings-delete.md', 'Namespaces', 'Domainmappings Delete'] -- ['namespaces_domainmappings-get.md', 'Namespaces', 'Domainmappings Get'] -- ['namespaces_domainmappings-list.md', 'Namespaces', 'Domainmappings List'] -- ['namespaces_executions-delete.md', 'Namespaces', 'Executions Delete'] -- ['namespaces_executions-get.md', 'Namespaces', 'Executions Get'] -- ['namespaces_executions-list.md', 'Namespaces', 'Executions List'] -- ['namespaces_jobs-create.md', 'Namespaces', 'Jobs Create'] -- ['namespaces_jobs-delete.md', 'Namespaces', 'Jobs Delete'] -- ['namespaces_jobs-get.md', 'Namespaces', 'Jobs Get'] -- ['namespaces_jobs-list.md', 'Namespaces', 'Jobs List'] -- ['namespaces_jobs-replace-job.md', 'Namespaces', 'Jobs Replace Job'] -- ['namespaces_jobs-run.md', 'Namespaces', 'Jobs Run'] -- ['namespaces_revisions-delete.md', 'Namespaces', 'Revisions Delete'] -- ['namespaces_revisions-get.md', 'Namespaces', 'Revisions Get'] -- ['namespaces_revisions-list.md', 'Namespaces', 'Revisions List'] -- ['namespaces_routes-get.md', 'Namespaces', 'Routes Get'] -- ['namespaces_routes-list.md', 'Namespaces', 'Routes List'] -- ['namespaces_services-create.md', 'Namespaces', 'Services Create'] -- ['namespaces_services-delete.md', 'Namespaces', 'Services Delete'] -- ['namespaces_services-get.md', 'Namespaces', 'Services Get'] -- ['namespaces_services-list.md', 'Namespaces', 'Services List'] -- ['namespaces_services-replace-service.md', 'Namespaces', 'Services Replace Service'] -- ['namespaces_tasks-get.md', 'Namespaces', 'Tasks Get'] -- ['namespaces_tasks-list.md', 'Namespaces', 'Tasks List'] -- ['projects_authorizeddomains-list.md', 'Projects', 'Authorizeddomains List'] -- ['projects_locations-authorizeddomains-list.md', 'Projects', 'Locations Authorizeddomains List'] -- ['projects_locations-configurations-get.md', 'Projects', 'Locations Configurations Get'] -- ['projects_locations-configurations-list.md', 'Projects', 'Locations Configurations List'] -- ['projects_locations-domainmappings-create.md', 'Projects', 'Locations Domainmappings Create'] -- ['projects_locations-domainmappings-delete.md', 'Projects', 'Locations Domainmappings Delete'] -- ['projects_locations-domainmappings-get.md', 'Projects', 'Locations Domainmappings Get'] -- ['projects_locations-domainmappings-list.md', 'Projects', 'Locations Domainmappings List'] -- ['projects_locations-jobs-get-iam-policy.md', 'Projects', 'Locations Jobs Get Iam Policy'] -- ['projects_locations-jobs-set-iam-policy.md', 'Projects', 'Locations Jobs Set Iam Policy'] -- ['projects_locations-jobs-test-iam-permissions.md', 'Projects', 'Locations Jobs Test Iam Permissions'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-revisions-delete.md', 'Projects', 'Locations Revisions Delete'] -- ['projects_locations-revisions-get.md', 'Projects', 'Locations Revisions Get'] -- ['projects_locations-revisions-list.md', 'Projects', 'Locations Revisions List'] -- ['projects_locations-routes-get.md', 'Projects', 'Locations Routes Get'] -- ['projects_locations-routes-list.md', 'Projects', 'Locations Routes List'] -- ['projects_locations-services-create.md', 'Projects', 'Locations Services Create'] -- ['projects_locations-services-delete.md', 'Projects', 'Locations Services Delete'] -- ['projects_locations-services-get.md', 'Projects', 'Locations Services Get'] -- ['projects_locations-services-get-iam-policy.md', 'Projects', 'Locations Services Get Iam Policy'] -- ['projects_locations-services-list.md', 'Projects', 'Locations Services List'] -- ['projects_locations-services-replace-service.md', 'Projects', 'Locations Services Replace Service'] -- ['projects_locations-services-set-iam-policy.md', 'Projects', 'Locations Services Set Iam Policy'] -- ['projects_locations-services-test-iam-permissions.md', 'Projects', 'Locations Services Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Namespaces': + - 'Authorizeddomains List': 'namespaces_authorizeddomains-list.md' + - 'Configurations Get': 'namespaces_configurations-get.md' + - 'Configurations List': 'namespaces_configurations-list.md' + - 'Domainmappings Create': 'namespaces_domainmappings-create.md' + - 'Domainmappings Delete': 'namespaces_domainmappings-delete.md' + - 'Domainmappings Get': 'namespaces_domainmappings-get.md' + - 'Domainmappings List': 'namespaces_domainmappings-list.md' + - 'Executions Cancel': 'namespaces_executions-cancel.md' + - 'Executions Delete': 'namespaces_executions-delete.md' + - 'Executions Get': 'namespaces_executions-get.md' + - 'Executions List': 'namespaces_executions-list.md' + - 'Jobs Create': 'namespaces_jobs-create.md' + - 'Jobs Delete': 'namespaces_jobs-delete.md' + - 'Jobs Get': 'namespaces_jobs-get.md' + - 'Jobs List': 'namespaces_jobs-list.md' + - 'Jobs Replace Job': 'namespaces_jobs-replace-job.md' + - 'Jobs Run': 'namespaces_jobs-run.md' + - 'Revisions Delete': 'namespaces_revisions-delete.md' + - 'Revisions Get': 'namespaces_revisions-get.md' + - 'Revisions List': 'namespaces_revisions-list.md' + - 'Routes Get': 'namespaces_routes-get.md' + - 'Routes List': 'namespaces_routes-list.md' + - 'Services Create': 'namespaces_services-create.md' + - 'Services Delete': 'namespaces_services-delete.md' + - 'Services Get': 'namespaces_services-get.md' + - 'Services List': 'namespaces_services-list.md' + - 'Services Replace Service': 'namespaces_services-replace-service.md' + - 'Tasks Get': 'namespaces_tasks-get.md' + - 'Tasks List': 'namespaces_tasks-list.md' +- 'Projects': + - 'Authorizeddomains List': 'projects_authorizeddomains-list.md' + - 'Locations Authorizeddomains List': 'projects_locations-authorizeddomains-list.md' + - 'Locations Configurations Get': 'projects_locations-configurations-get.md' + - 'Locations Configurations List': 'projects_locations-configurations-list.md' + - 'Locations Domainmappings Create': 'projects_locations-domainmappings-create.md' + - 'Locations Domainmappings Delete': 'projects_locations-domainmappings-delete.md' + - 'Locations Domainmappings Get': 'projects_locations-domainmappings-get.md' + - 'Locations Domainmappings List': 'projects_locations-domainmappings-list.md' + - 'Locations Jobs Get Iam Policy': 'projects_locations-jobs-get-iam-policy.md' + - 'Locations Jobs Set Iam Policy': 'projects_locations-jobs-set-iam-policy.md' + - 'Locations Jobs Test Iam Permissions': 'projects_locations-jobs-test-iam-permissions.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Revisions Delete': 'projects_locations-revisions-delete.md' + - 'Locations Revisions Get': 'projects_locations-revisions-get.md' + - 'Locations Revisions List': 'projects_locations-revisions-list.md' + - 'Locations Routes Get': 'projects_locations-routes-get.md' + - 'Locations Routes List': 'projects_locations-routes-list.md' + - 'Locations Services Create': 'projects_locations-services-create.md' + - 'Locations Services Delete': 'projects_locations-services-delete.md' + - 'Locations Services Get': 'projects_locations-services-get.md' + - 'Locations Services Get Iam Policy': 'projects_locations-services-get-iam-policy.md' + - 'Locations Services List': 'projects_locations-services-list.md' + - 'Locations Services Replace Service': 'projects_locations-services-replace-service.md' + - 'Locations Services Set Iam Policy': 'projects_locations-services-set-iam-policy.md' + - 'Locations Services Test Iam Permissions': 'projects_locations-services-test-iam-permissions.md' theme: readthedocs diff --git a/gen/run1-cli/src/client.rs b/gen/run1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/run1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/run1-cli/src/main.rs b/gen/run1-cli/src/main.rs index 66e9e4ca73..73616df164 100644 --- a/gen/run1-cli/src/main.rs +++ b/gen/run1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_run1::{api, Error, oauth2}; +use google_run1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -169,19 +168,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -470,19 +469,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -537,6 +536,90 @@ where } } + async fn _namespaces_executions_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CancelExecutionRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.namespaces().executions_cancel(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _namespaces_executions_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.namespaces().executions_delete(opt.value_of("name").unwrap_or("")); @@ -658,19 +741,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -784,11 +867,12 @@ where "spec.template.spec.template.spec.service-account-name" => Some(("spec.template.spec.template.spec.serviceAccountName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.template.spec.template.spec.timeout-seconds" => Some(("spec.template.spec.template.spec.timeoutSeconds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.execution-count" => Some(("status.executionCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "status.latest-created-execution.completion-timestamp" => Some(("status.latestCreatedExecution.completionTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.latest-created-execution.creation-timestamp" => Some(("status.latestCreatedExecution.creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.latest-created-execution.name" => Some(("status.latestCreatedExecution.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.observed-generation" => Some(("status.observedGeneration", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "api-version", "cluster-name", "creation-timestamp", "deletion-grace-period-seconds", "deletion-timestamp", "execution-count", "finalizers", "generate-name", "generation", "kind", "labels", "latest-created-execution", "max-retries", "metadata", "name", "namespace", "observed-generation", "parallelism", "resource-version", "self-link", "service-account-name", "spec", "status", "task-count", "template", "timeout-seconds", "uid"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "api-version", "cluster-name", "completion-timestamp", "creation-timestamp", "deletion-grace-period-seconds", "deletion-timestamp", "execution-count", "finalizers", "generate-name", "generation", "kind", "labels", "latest-created-execution", "max-retries", "metadata", "name", "namespace", "observed-generation", "parallelism", "resource-version", "self-link", "service-account-name", "spec", "status", "task-count", "template", "timeout-seconds", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -969,19 +1053,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -1095,11 +1179,12 @@ where "spec.template.spec.template.spec.service-account-name" => Some(("spec.template.spec.template.spec.serviceAccountName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "spec.template.spec.template.spec.timeout-seconds" => Some(("spec.template.spec.template.spec.timeoutSeconds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.execution-count" => Some(("status.executionCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "status.latest-created-execution.completion-timestamp" => Some(("status.latestCreatedExecution.completionTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.latest-created-execution.creation-timestamp" => Some(("status.latestCreatedExecution.creationTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.latest-created-execution.name" => Some(("status.latestCreatedExecution.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.observed-generation" => Some(("status.observedGeneration", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "api-version", "cluster-name", "creation-timestamp", "deletion-grace-period-seconds", "deletion-timestamp", "execution-count", "finalizers", "generate-name", "generation", "kind", "labels", "latest-created-execution", "max-retries", "metadata", "name", "namespace", "observed-generation", "parallelism", "resource-version", "self-link", "service-account-name", "spec", "status", "task-count", "template", "timeout-seconds", "uid"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "api-version", "cluster-name", "completion-timestamp", "creation-timestamp", "deletion-grace-period-seconds", "deletion-timestamp", "execution-count", "finalizers", "generate-name", "generation", "kind", "labels", "latest-created-execution", "max-retries", "metadata", "name", "namespace", "observed-generation", "parallelism", "resource-version", "self-link", "service-account-name", "spec", "status", "task-count", "template", "timeout-seconds", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1367,19 +1452,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -1493,19 +1578,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -1811,19 +1896,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -2064,19 +2149,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -2141,7 +2226,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2200,7 +2285,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2308,19 +2393,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -2609,19 +2694,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -2683,7 +2768,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2914,7 +2999,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3090,19 +3175,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -3216,19 +3301,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -3534,7 +3619,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3590,19 +3675,19 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "watch" => { - call = call.watch(arg_from_str(value.unwrap_or("false"), err, "watch", "boolean")); + call = call.watch( value.map(|v| arg_from_str(v, err, "watch", "boolean")).unwrap_or(false)); }, "resource-version" => { call = call.resource_version(value.unwrap_or("")); }, "limit" => { - call = call.limit(arg_from_str(value.unwrap_or("-0"), err, "limit", "integer")); + call = call.limit( value.map(|v| arg_from_str(v, err, "limit", "int32")).unwrap_or(-0)); }, "label-selector" => { call = call.label_selector(value.unwrap_or("")); }, "include-uninitialized" => { - call = call.include_uninitialized(arg_from_str(value.unwrap_or("false"), err, "include-uninitialized", "boolean")); + call = call.include_uninitialized( value.map(|v| arg_from_str(v, err, "include-uninitialized", "boolean")).unwrap_or(false)); }, "field-selector" => { call = call.field_selector(value.unwrap_or("")); @@ -3984,6 +4069,9 @@ where ("domainmappings-list", Some(opt)) => { call_result = self._namespaces_domainmappings_list(opt, dry_run, &mut err).await; }, + ("executions-cancel", Some(opt)) => { + call_result = self._namespaces_executions_cancel(opt, dry_run, &mut err).await; + }, ("executions-delete", Some(opt)) => { call_result = self._namespaces_executions_delete(opt, dry_run, &mut err).await; }, @@ -4209,7 +4297,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("namespaces", "methods: 'authorizeddomains-list', 'configurations-get', 'configurations-list', 'domainmappings-create', 'domainmappings-delete', 'domainmappings-get', 'domainmappings-list', 'executions-delete', 'executions-get', 'executions-list', 'jobs-create', 'jobs-delete', 'jobs-get', 'jobs-list', 'jobs-replace-job', 'jobs-run', 'revisions-delete', 'revisions-get', 'revisions-list', 'routes-get', 'routes-list', 'services-create', 'services-delete', 'services-get', 'services-list', 'services-replace-service', 'tasks-get' and 'tasks-list'", vec![ + ("namespaces", "methods: 'authorizeddomains-list', 'configurations-get', 'configurations-list', 'domainmappings-create', 'domainmappings-delete', 'domainmappings-get', 'domainmappings-list', 'executions-cancel', 'executions-delete', 'executions-get', 'executions-list', 'jobs-create', 'jobs-delete', 'jobs-get', 'jobs-list', 'jobs-replace-job', 'jobs-run', 'revisions-delete', 'revisions-get', 'revisions-list', 'routes-get', 'routes-list', 'services-create', 'services-delete', 'services-get', 'services-list', 'services-replace-service', 'tasks-get' and 'tasks-list'", vec![ ("authorizeddomains-list", Some(r##"List authorized domains."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_authorizeddomains-list", @@ -4238,7 +4326,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the configuration to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the configuration to retrieve. For Cloud Run, replace {namespace_id} with the project ID or number."##), Some(true), Some(false)), @@ -4260,7 +4348,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the configurations should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the configurations should be listed. For Cloud Run, replace {namespace_id} with the project ID or number."##), Some(true), Some(false)), @@ -4282,7 +4370,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace in which the domain mapping should be created. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace in which the domain mapping should be created. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4310,7 +4398,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the domain mapping to delete. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the domain mapping to delete. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4332,7 +4420,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the domain mapping to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the domain mapping to retrieve. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4349,12 +4437,12 @@ async fn main() { Some(false)), ]), ("domainmappings-list", - Some(r##"List domain mappings."##), + Some(r##"List all domain mappings."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_domainmappings-list", vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the domain mappings should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the domain mappings should be listed. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4364,6 +4452,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("executions-cancel", + Some(r##"Cancel an execution."##), + "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_executions-cancel", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the execution to cancel. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -4376,7 +4492,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the execution to delete. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The name of the execution to delete. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4398,7 +4514,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the execution to retrieve. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The name of the execution to retrieve. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4420,7 +4536,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The namespace from which the executions should be listed. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The namespace from which the executions should be listed. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4442,7 +4558,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The namespace in which the job should be created. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The namespace in which the job should be created. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4470,7 +4586,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the job to delete. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The name of the job to delete. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4492,7 +4608,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the job to retrieve. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The name of the job to retrieve. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4514,7 +4630,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The namespace from which the jobs should be listed. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The namespace from which the jobs should be listed. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4536,7 +4652,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the service being replaced. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The name of the job being replaced. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4564,7 +4680,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the job to run. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The name of the job to run. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4592,7 +4708,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the revision to delete. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the revision to delete. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4614,7 +4730,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the revision to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the revision to retrieve. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4636,7 +4752,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the revisions should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the revisions should be listed. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4658,7 +4774,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the route to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the route to retrieve. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4680,7 +4796,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the routes should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the routes should be listed. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4697,12 +4813,12 @@ async fn main() { Some(false)), ]), ("services-create", - Some(r##"Create a service."##), + Some(r##"Creates a new Service. Service creation will trigger a new deployment. Use GetService, and check service.status to determine if the Service is ready."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_services-create", vec![ (Some(r##"parent"##), None, - Some(r##"The namespace in which the service should be created. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The resource's parent. In Cloud Run, it may be one of the following: * `{project_id_or_number}` * `namespaces/{project_id_or_number}` * `namespaces/{project_id_or_number}/services` * `projects/{project_id_or_number}/locations/{region}` * `projects/{project_id_or_number}/regions/{region}`"##), Some(true), Some(false)), @@ -4725,12 +4841,12 @@ async fn main() { Some(false)), ]), ("services-delete", - Some(r##"Delete a service. This will cause the Service to stop serving traffic and will delete the child entities like Routes, Configurations and Revisions."##), + Some(r##"Deletes the provided service. This will cause the Service to stop serving traffic and will delete all associated Revisions."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_services-delete", vec![ (Some(r##"name"##), None, - Some(r##"The name of the service to delete. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The fully qualified name of the service to delete. It can be any of the following forms: * `namespaces/{project_id_or_number}/services/{service_name}` (only when the `endpoint` is regional) * `projects/{project_id_or_number}/locations/{region}/services/{service_name}` * `projects/{project_id_or_number}/regions/{region}/services/{service_name}`"##), Some(true), Some(false)), @@ -4747,12 +4863,12 @@ async fn main() { Some(false)), ]), ("services-get", - Some(r##"Get information about a service."##), + Some(r##"Gets information about a service."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_services-get", vec![ (Some(r##"name"##), None, - Some(r##"The name of the service to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The fully qualified name of the service to retrieve. It can be any of the following forms: * `namespaces/{project_id_or_number}/services/{service_name}` (only when the `endpoint` is regional) * `projects/{project_id_or_number}/locations/{region}/services/{service_name}` * `projects/{project_id_or_number}/regions/{region}/services/{service_name}`"##), Some(true), Some(false)), @@ -4769,12 +4885,12 @@ async fn main() { Some(false)), ]), ("services-list", - Some(r##"List services."##), + Some(r##"Lists services for the given project and region."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_services-list", vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the services should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The parent from where the resources should be listed. In Cloud Run, it may be one of the following: * `{project_id_or_number}` * `namespaces/{project_id_or_number}` * `namespaces/{project_id_or_number}/services` * `projects/{project_id_or_number}/locations/{region}` * `projects/{project_id_or_number}/regions/{region}`"##), Some(true), Some(false)), @@ -4791,12 +4907,12 @@ async fn main() { Some(false)), ]), ("services-replace-service", - Some(r##"Replace a service. Only the spec and metadata labels and annotations are modifiable. After the Update request, Cloud Run will work to make the 'status' match the requested 'spec'. May provide metadata.resourceVersion to enforce update from last read for optimistic concurrency control."##), + Some(r##"Replaces a service. Only the spec and metadata labels and annotations are modifiable. After the Update request, Cloud Run will work to make the 'status' match the requested 'spec'. May provide metadata.resourceVersion to enforce update from last read for optimistic concurrency control."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/namespaces_services-replace-service", vec![ (Some(r##"name"##), None, - Some(r##"The name of the service being replaced. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The fully qualified name of the service to replace. It can be any of the following forms: * `namespaces/{project_id_or_number}/services/{service_name}` (only when the `endpoint` is regional) * `projects/{project_id_or_number}/locations/{region}/services/{service_name}` * `projects/{project_id_or_number}/regions/{region}/services/{service_name}`"##), Some(true), Some(false)), @@ -4824,7 +4940,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the task to retrieve. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The name of the task to retrieve. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4846,7 +4962,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The namespace from which the tasks should be listed. Replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The namespace from which the tasks should be listed. Replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4915,7 +5031,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the configuration to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the configuration to retrieve. For Cloud Run, replace {namespace_id} with the project ID or number."##), Some(true), Some(false)), @@ -4937,7 +5053,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the configurations should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the configurations should be listed. For Cloud Run, replace {namespace_id} with the project ID or number."##), Some(true), Some(false)), @@ -4959,7 +5075,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace in which the domain mapping should be created. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace in which the domain mapping should be created. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -4987,7 +5103,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the domain mapping to delete. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the domain mapping to delete. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5009,7 +5125,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the domain mapping to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the domain mapping to retrieve. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5026,12 +5142,12 @@ async fn main() { Some(false)), ]), ("locations-domainmappings-list", - Some(r##"List domain mappings."##), + Some(r##"List all domain mappings."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/projects_locations-domainmappings-list", vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the domain mappings should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the domain mappings should be listed. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5053,7 +5169,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5075,7 +5191,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5103,7 +5219,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5153,7 +5269,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the revision to delete. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the revision to delete. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5175,7 +5291,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the revision to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the revision to retrieve. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5197,7 +5313,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the revisions should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the revisions should be listed. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5219,7 +5335,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The name of the route to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The name of the route to retrieve. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5241,7 +5357,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the routes should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"The namespace from which the routes should be listed. For Cloud Run (fully managed), replace {namespace} with the project ID or number. It takes the form namespaces/{namespace}. For example: namespaces/PROJECT_ID"##), Some(true), Some(false)), @@ -5258,12 +5374,12 @@ async fn main() { Some(false)), ]), ("locations-services-create", - Some(r##"Create a service."##), + Some(r##"Creates a new Service. Service creation will trigger a new deployment. Use GetService, and check service.status to determine if the Service is ready."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/projects_locations-services-create", vec![ (Some(r##"parent"##), None, - Some(r##"The namespace in which the service should be created. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The resource's parent. In Cloud Run, it may be one of the following: * `{project_id_or_number}` * `namespaces/{project_id_or_number}` * `namespaces/{project_id_or_number}/services` * `projects/{project_id_or_number}/locations/{region}` * `projects/{project_id_or_number}/regions/{region}`"##), Some(true), Some(false)), @@ -5286,12 +5402,12 @@ async fn main() { Some(false)), ]), ("locations-services-delete", - Some(r##"Delete a service. This will cause the Service to stop serving traffic and will delete the child entities like Routes, Configurations and Revisions."##), + Some(r##"Deletes the provided service. This will cause the Service to stop serving traffic and will delete all associated Revisions."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/projects_locations-services-delete", vec![ (Some(r##"name"##), None, - Some(r##"The name of the service to delete. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The fully qualified name of the service to delete. It can be any of the following forms: * `namespaces/{project_id_or_number}/services/{service_name}` (only when the `endpoint` is regional) * `projects/{project_id_or_number}/locations/{region}/services/{service_name}` * `projects/{project_id_or_number}/regions/{region}/services/{service_name}`"##), Some(true), Some(false)), @@ -5308,12 +5424,12 @@ async fn main() { Some(false)), ]), ("locations-services-get", - Some(r##"Get information about a service."##), + Some(r##"Gets information about a service."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/projects_locations-services-get", vec![ (Some(r##"name"##), None, - Some(r##"The name of the service to retrieve. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The fully qualified name of the service to retrieve. It can be any of the following forms: * `namespaces/{project_id_or_number}/services/{service_name}` (only when the `endpoint` is regional) * `projects/{project_id_or_number}/locations/{region}/services/{service_name}` * `projects/{project_id_or_number}/regions/{region}/services/{service_name}`"##), Some(true), Some(false)), @@ -5330,12 +5446,12 @@ async fn main() { Some(false)), ]), ("locations-services-get-iam-policy", - Some(r##"Get the IAM Access Control policy currently in effect for the given Cloud Run service. This result does not include any inherited policies."##), + Some(r##"Gets the IAM Access Control policy currently in effect for the given Cloud Run service. This result does not include any inherited policies."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/projects_locations-services-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5352,12 +5468,12 @@ async fn main() { Some(false)), ]), ("locations-services-list", - Some(r##"List services."##), + Some(r##"Lists services for the given project and region."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/projects_locations-services-list", vec![ (Some(r##"parent"##), None, - Some(r##"The namespace from which the services should be listed. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The parent from where the resources should be listed. In Cloud Run, it may be one of the following: * `{project_id_or_number}` * `namespaces/{project_id_or_number}` * `namespaces/{project_id_or_number}/services` * `projects/{project_id_or_number}/locations/{region}` * `projects/{project_id_or_number}/regions/{region}`"##), Some(true), Some(false)), @@ -5374,12 +5490,12 @@ async fn main() { Some(false)), ]), ("locations-services-replace-service", - Some(r##"Replace a service. Only the spec and metadata labels and annotations are modifiable. After the Update request, Cloud Run will work to make the 'status' match the requested 'spec'. May provide metadata.resourceVersion to enforce update from last read for optimistic concurrency control."##), + Some(r##"Replaces a service. Only the spec and metadata labels and annotations are modifiable. After the Update request, Cloud Run will work to make the 'status' match the requested 'spec'. May provide metadata.resourceVersion to enforce update from last read for optimistic concurrency control."##), "Details at http://byron.github.io/google-apis-rs/google_run1_cli/projects_locations-services-replace-service", vec![ (Some(r##"name"##), None, - Some(r##"The name of the service being replaced. For Cloud Run (fully managed), replace {namespace_id} with the project ID or number."##), + Some(r##"Required. The fully qualified name of the service to replace. It can be any of the following forms: * `namespaces/{project_id_or_number}/services/{service_name}` (only when the `endpoint` is regional) * `projects/{project_id_or_number}/locations/{region}/services/{service_name}` * `projects/{project_id_or_number}/regions/{region}/services/{service_name}`"##), Some(true), Some(false)), @@ -5407,7 +5523,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5435,7 +5551,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -5463,7 +5579,7 @@ async fn main() { let mut app = App::new("run1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("Deploy and manage user provided container images that scale automatically based on incoming requests. The Cloud Run Admin API v1 follows the Knative Serving API specification, while v2 is aligned with Google Cloud AIP-based API standards, as described in https://google.aip.dev/.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_run1_cli") .arg(Arg::with_name("url") diff --git a/gen/run1/Cargo.toml b/gen/run1/Cargo.toml index f11caab6c4..046a381525 100644 --- a/gen/run1/Cargo.toml +++ b/gen/run1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-run1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Run (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/run1" homepage = "https://cloud.google.com/run/" -documentation = "https://docs.rs/google-run1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-run1/5.0.2+20230113" license = "MIT" keywords = ["run", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/run1/README.md b/gen/run1/README.md index fedf1f77d0..b850cb4505 100644 --- a/gen/run1/README.md +++ b/gen/run1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-run1` library allows access to all features of the *Google Cloud Run* service. -This documentation was generated from *Cloud Run* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *run:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Run* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *run:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Run* *v1* API can be found at the [official documentation site](https://cloud.google.com/run/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/CloudRun) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-run1/5.0.2+20230113/google_run1/CloudRun) ... * namespaces - * [*authorizeddomains list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceAuthorizeddomainListCall), [*configurations get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceConfigurationGetCall), [*configurations list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceConfigurationListCall), [*domainmappings create*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceDomainmappingCreateCall), [*domainmappings delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceDomainmappingDeleteCall), [*domainmappings get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceDomainmappingGetCall), [*domainmappings list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceDomainmappingListCall), [*executions cancel*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceExecutionCancelCall), [*executions delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceExecutionDeleteCall), [*executions get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceExecutionGetCall), [*executions list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceExecutionListCall), [*jobs create*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceJobCreateCall), [*jobs delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceJobDeleteCall), [*jobs get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceJobGetCall), [*jobs list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceJobListCall), [*jobs replace job*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceJobReplaceJobCall), [*jobs run*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceJobRunCall), [*revisions delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceRevisionDeleteCall), [*revisions get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceRevisionGetCall), [*revisions list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceRevisionListCall), [*routes get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceRouteGetCall), [*routes list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceRouteListCall), [*services create*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceServiceCreateCall), [*services delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceServiceDeleteCall), [*services get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceServiceGetCall), [*services list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceServiceListCall), [*services replace service*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceServiceReplaceServiceCall), [*tasks get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceTaskGetCall) and [*tasks list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::NamespaceTaskListCall) + * [*authorizeddomains list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceAuthorizeddomainListCall), [*configurations get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceConfigurationGetCall), [*configurations list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceConfigurationListCall), [*domainmappings create*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceDomainmappingCreateCall), [*domainmappings delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceDomainmappingDeleteCall), [*domainmappings get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceDomainmappingGetCall), [*domainmappings list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceDomainmappingListCall), [*executions cancel*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceExecutionCancelCall), [*executions delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceExecutionDeleteCall), [*executions get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceExecutionGetCall), [*executions list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceExecutionListCall), [*jobs create*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceJobCreateCall), [*jobs delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceJobDeleteCall), [*jobs get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceJobGetCall), [*jobs list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceJobListCall), [*jobs replace job*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceJobReplaceJobCall), [*jobs run*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceJobRunCall), [*revisions delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceRevisionDeleteCall), [*revisions get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceRevisionGetCall), [*revisions list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceRevisionListCall), [*routes get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceRouteGetCall), [*routes list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceRouteListCall), [*services create*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceServiceCreateCall), [*services delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceServiceDeleteCall), [*services get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceServiceGetCall), [*services list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceServiceListCall), [*services replace service*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceServiceReplaceServiceCall), [*tasks get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceTaskGetCall) and [*tasks list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::NamespaceTaskListCall) * projects - * [*authorizeddomains list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectAuthorizeddomainListCall), [*locations authorizeddomains list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationAuthorizeddomainListCall), [*locations configurations get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationConfigurationGetCall), [*locations configurations list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationConfigurationListCall), [*locations domainmappings create*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationDomainmappingCreateCall), [*locations domainmappings delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationDomainmappingDeleteCall), [*locations domainmappings get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationDomainmappingGetCall), [*locations domainmappings list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationDomainmappingListCall), [*locations jobs get iam policy*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationJobGetIamPolicyCall), [*locations jobs set iam policy*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationJobSetIamPolicyCall), [*locations jobs test iam permissions*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationJobTestIamPermissionCall), [*locations list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationListCall), [*locations revisions delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationRevisionDeleteCall), [*locations revisions get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationRevisionGetCall), [*locations revisions list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationRevisionListCall), [*locations routes get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationRouteGetCall), [*locations routes list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationRouteListCall), [*locations services create*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceCreateCall), [*locations services delete*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceDeleteCall), [*locations services get*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceGetCall), [*locations services get iam policy*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceGetIamPolicyCall), [*locations services list*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceListCall), [*locations services replace service*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceReplaceServiceCall), [*locations services set iam policy*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceSetIamPolicyCall) and [*locations services test iam permissions*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/api::ProjectLocationServiceTestIamPermissionCall) + * [*authorizeddomains list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectAuthorizeddomainListCall), [*locations authorizeddomains list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationAuthorizeddomainListCall), [*locations configurations get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationConfigurationGetCall), [*locations configurations list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationConfigurationListCall), [*locations domainmappings create*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationDomainmappingCreateCall), [*locations domainmappings delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationDomainmappingDeleteCall), [*locations domainmappings get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationDomainmappingGetCall), [*locations domainmappings list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationDomainmappingListCall), [*locations jobs get iam policy*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationJobGetIamPolicyCall), [*locations jobs set iam policy*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationJobSetIamPolicyCall), [*locations jobs test iam permissions*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationJobTestIamPermissionCall), [*locations list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationListCall), [*locations revisions delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationRevisionDeleteCall), [*locations revisions get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationRevisionGetCall), [*locations revisions list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationRevisionListCall), [*locations routes get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationRouteGetCall), [*locations routes list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationRouteListCall), [*locations services create*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceCreateCall), [*locations services delete*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceDeleteCall), [*locations services get*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceGetCall), [*locations services get iam policy*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceGetIamPolicyCall), [*locations services list*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceListCall), [*locations services replace service*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceReplaceServiceCall), [*locations services set iam policy*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceSetIamPolicyCall) and [*locations services test iam permissions*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/api::ProjectLocationServiceTestIamPermissionCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/CloudRun)** +* **[Hub](https://docs.rs/google-run1/5.0.2+20230113/google_run1/CloudRun)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::CallBuilder) -* **[Resources](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::CallBuilder) +* **[Resources](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Part)** + * **[Parts](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Delegate) to the -[Method Builder](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Delegate) to the +[Method Builder](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::RequestValue) and -[decodable](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::RequestValue) and +[decodable](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-run1/5.0.2-beta-1+20230113/google_run1/client::RequestValue) are moved +* [request values](https://docs.rs/google-run1/5.0.2+20230113/google_run1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/run1/src/api.rs b/gen/run1/src/api.rs index 33e5b02336..2aa2c33c40 100644 --- a/gen/run1/src/api.rs +++ b/gen/run1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> CloudRun { CloudRun { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://run.googleapis.com/".to_string(), _root_url: "https://run.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudRun { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/run1/src/client.rs b/gen/run1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/run1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/run1/src/lib.rs b/gen/run1/src/lib.rs index 29789a6d3a..f442a924bb 100644 --- a/gen/run1/src/lib.rs +++ b/gen/run1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Run* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *run:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Run* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *run:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Run* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/run/). diff --git a/gen/run2-cli/Cargo.toml b/gen/run2-cli/Cargo.toml index 1296de5d97..d3558d6791 100644 --- a/gen/run2-cli/Cargo.toml +++ b/gen/run2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-run2-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Run (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/run2-cli" @@ -20,13 +20,13 @@ name = "run2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-run2] path = "../run2" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/run2-cli/README.md b/gen/run2-cli/README.md index 09af306ca2..59002058b4 100644 --- a/gen/run2-cli/README.md +++ b/gen/run2-cli/README.md @@ -25,14 +25,29 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Run* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Run* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash run2 [options] projects + locations-jobs-create (-r )... [-p ]... [-o ] + locations-jobs-delete [-p ]... [-o ] + locations-jobs-executions-delete [-p ]... [-o ] + locations-jobs-executions-get [-p ]... [-o ] + locations-jobs-executions-list [-p ]... [-o ] + locations-jobs-executions-tasks-get [-p ]... [-o ] + locations-jobs-executions-tasks-list [-p ]... [-o ] + locations-jobs-get [-p ]... [-o ] + locations-jobs-get-iam-policy [-p ]... [-o ] + locations-jobs-list [-p ]... [-o ] + locations-jobs-patch (-r )... [-p ]... [-o ] + locations-jobs-run (-r )... [-p ]... [-o ] + locations-jobs-set-iam-policy (-r )... [-p ]... [-o ] + locations-jobs-test-iam-permissions (-r )... [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] locations-operations-list [-p ]... [-o ] + locations-operations-wait (-r )... [-p ]... [-o ] locations-services-create (-r )... [-p ]... [-o ] locations-services-delete [-p ]... [-o ] locations-services-get [-p ]... [-o ] diff --git a/gen/run2-cli/mkdocs.yml b/gen/run2-cli/mkdocs.yml index aaadfa32b9..707622b316 100644 --- a/gen/run2-cli/mkdocs.yml +++ b/gen/run2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Run v4.0.1+20220225 +site_name: Cloud Run v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-run2-cli site_description: A complete library to interact with Cloud Run (protocol v2) @@ -7,22 +7,38 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/run2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-services-create.md', 'Projects', 'Locations Services Create'] -- ['projects_locations-services-delete.md', 'Projects', 'Locations Services Delete'] -- ['projects_locations-services-get.md', 'Projects', 'Locations Services Get'] -- ['projects_locations-services-get-iam-policy.md', 'Projects', 'Locations Services Get Iam Policy'] -- ['projects_locations-services-list.md', 'Projects', 'Locations Services List'] -- ['projects_locations-services-patch.md', 'Projects', 'Locations Services Patch'] -- ['projects_locations-services-revisions-delete.md', 'Projects', 'Locations Services Revisions Delete'] -- ['projects_locations-services-revisions-get.md', 'Projects', 'Locations Services Revisions Get'] -- ['projects_locations-services-revisions-list.md', 'Projects', 'Locations Services Revisions List'] -- ['projects_locations-services-set-iam-policy.md', 'Projects', 'Locations Services Set Iam Policy'] -- ['projects_locations-services-test-iam-permissions.md', 'Projects', 'Locations Services Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Jobs Create': 'projects_locations-jobs-create.md' + - 'Locations Jobs Delete': 'projects_locations-jobs-delete.md' + - 'Locations Jobs Executions Delete': 'projects_locations-jobs-executions-delete.md' + - 'Locations Jobs Executions Get': 'projects_locations-jobs-executions-get.md' + - 'Locations Jobs Executions List': 'projects_locations-jobs-executions-list.md' + - 'Locations Jobs Executions Tasks Get': 'projects_locations-jobs-executions-tasks-get.md' + - 'Locations Jobs Executions Tasks List': 'projects_locations-jobs-executions-tasks-list.md' + - 'Locations Jobs Get': 'projects_locations-jobs-get.md' + - 'Locations Jobs Get Iam Policy': 'projects_locations-jobs-get-iam-policy.md' + - 'Locations Jobs List': 'projects_locations-jobs-list.md' + - 'Locations Jobs Patch': 'projects_locations-jobs-patch.md' + - 'Locations Jobs Run': 'projects_locations-jobs-run.md' + - 'Locations Jobs Set Iam Policy': 'projects_locations-jobs-set-iam-policy.md' + - 'Locations Jobs Test Iam Permissions': 'projects_locations-jobs-test-iam-permissions.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Operations Wait': 'projects_locations-operations-wait.md' + - 'Locations Services Create': 'projects_locations-services-create.md' + - 'Locations Services Delete': 'projects_locations-services-delete.md' + - 'Locations Services Get': 'projects_locations-services-get.md' + - 'Locations Services Get Iam Policy': 'projects_locations-services-get-iam-policy.md' + - 'Locations Services List': 'projects_locations-services-list.md' + - 'Locations Services Patch': 'projects_locations-services-patch.md' + - 'Locations Services Revisions Delete': 'projects_locations-services-revisions-delete.md' + - 'Locations Services Revisions Get': 'projects_locations-services-revisions-get.md' + - 'Locations Services Revisions List': 'projects_locations-services-revisions-list.md' + - 'Locations Services Set Iam Policy': 'projects_locations-services-set-iam-policy.md' + - 'Locations Services Test Iam Permissions': 'projects_locations-services-test-iam-permissions.md' theme: readthedocs diff --git a/gen/run2-cli/src/client.rs b/gen/run2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/run2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/run2-cli/src/main.rs b/gen/run2-cli/src/main.rs index 3b6dcb0aa4..536d36c9ab 100644 --- a/gen/run2-cli/src/main.rs +++ b/gen/run2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_run2::{api, Error, oauth2}; +use google_run2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,1046 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _projects_locations_jobs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "binary-authorization.breakglass-justification" => Some(("binaryAuthorization.breakglassJustification", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "binary-authorization.use-default" => Some(("binaryAuthorization.useDefault", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "client" => Some(("client", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "client-version" => Some(("clientVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creator" => Some(("creator", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "delete-time" => Some(("deleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-count" => Some(("executionCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "generation" => Some(("generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "last-modifier" => Some(("lastModifier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-created-execution.completion-time" => Some(("latestCreatedExecution.completionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-created-execution.create-time" => Some(("latestCreatedExecution.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-created-execution.name" => Some(("latestCreatedExecution.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "launch-stage" => Some(("launchStage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "observed-generation" => Some(("observedGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "reconciling" => Some(("reconciling", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "template.annotations" => Some(("template.annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "template.labels" => Some(("template.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "template.parallelism" => Some(("template.parallelism", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "template.task-count" => Some(("template.taskCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "template.template.encryption-key" => Some(("template.template.encryptionKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.execution-environment" => Some(("template.template.executionEnvironment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.max-retries" => Some(("template.template.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "template.template.service-account" => Some(("template.template.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.timeout" => Some(("template.template.timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.vpc-access.connector" => Some(("template.template.vpcAccess.connector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.vpc-access.egress" => Some(("template.template.vpcAccess.egress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.execution-reason" => Some(("terminalCondition.executionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.last-transition-time" => Some(("terminalCondition.lastTransitionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.message" => Some(("terminalCondition.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.reason" => Some(("terminalCondition.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.revision-reason" => Some(("terminalCondition.revisionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.severity" => Some(("terminalCondition.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.state" => Some(("terminalCondition.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.type" => Some(("terminalCondition.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "binary-authorization", "breakglass-justification", "client", "client-version", "completion-time", "connector", "create-time", "creator", "delete-time", "egress", "encryption-key", "etag", "execution-count", "execution-environment", "execution-reason", "expire-time", "generation", "labels", "last-modifier", "last-transition-time", "latest-created-execution", "launch-stage", "max-retries", "message", "name", "observed-generation", "parallelism", "reason", "reconciling", "revision-reason", "service-account", "severity", "state", "task-count", "template", "terminal-condition", "timeout", "type", "uid", "update-time", "use-default", "vpc-access"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRunV2Job = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_jobs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "job-id" => { + call = call.job_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["job-id", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "etag" => { + call = call.etag(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["etag", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_executions_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_executions_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "etag" => { + call = call.etag(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["etag", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_executions_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_executions_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_executions_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_executions_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_executions_tasks_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_executions_tasks_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_executions_tasks_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_executions_tasks_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_get_iam_policy(opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "options-requested-policy-version" => { + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["options-requested-policy-version"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_jobs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "show-deleted" => { + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); + }, + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token", "show-deleted"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "binary-authorization.breakglass-justification" => Some(("binaryAuthorization.breakglassJustification", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "binary-authorization.use-default" => Some(("binaryAuthorization.useDefault", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "client" => Some(("client", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "client-version" => Some(("clientVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "creator" => Some(("creator", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "delete-time" => Some(("deleteTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "execution-count" => Some(("executionCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "generation" => Some(("generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "last-modifier" => Some(("lastModifier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-created-execution.completion-time" => Some(("latestCreatedExecution.completionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-created-execution.create-time" => Some(("latestCreatedExecution.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "latest-created-execution.name" => Some(("latestCreatedExecution.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "launch-stage" => Some(("launchStage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "observed-generation" => Some(("observedGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "reconciling" => Some(("reconciling", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "template.annotations" => Some(("template.annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "template.labels" => Some(("template.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "template.parallelism" => Some(("template.parallelism", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "template.task-count" => Some(("template.taskCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "template.template.encryption-key" => Some(("template.template.encryptionKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.execution-environment" => Some(("template.template.executionEnvironment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.max-retries" => Some(("template.template.maxRetries", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "template.template.service-account" => Some(("template.template.serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.timeout" => Some(("template.template.timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.vpc-access.connector" => Some(("template.template.vpcAccess.connector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "template.template.vpc-access.egress" => Some(("template.template.vpcAccess.egress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.execution-reason" => Some(("terminalCondition.executionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.last-transition-time" => Some(("terminalCondition.lastTransitionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.message" => Some(("terminalCondition.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.reason" => Some(("terminalCondition.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.revision-reason" => Some(("terminalCondition.revisionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.severity" => Some(("terminalCondition.severity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.state" => Some(("terminalCondition.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terminal-condition.type" => Some(("terminalCondition.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "binary-authorization", "breakglass-justification", "client", "client-version", "completion-time", "connector", "create-time", "creator", "delete-time", "egress", "encryption-key", "etag", "execution-count", "execution-environment", "execution-reason", "expire-time", "generation", "labels", "last-modifier", "last-transition-time", "latest-created-execution", "launch-stage", "max-retries", "message", "name", "observed-generation", "parallelism", "reason", "reconciling", "revision-reason", "service-account", "severity", "state", "task-count", "template", "terminal-condition", "timeout", "type", "uid", "update-time", "use-default", "vpc-access"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRunV2Job = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_jobs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "validate-only" => { + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); + }, + "allow-missing" => { + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-missing", "validate-only"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_run(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "validate-only" => Some(("validateOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "validate-only"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleCloudRunV2RunJobRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_jobs_run(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_jobs_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_jobs_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIamV1TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_jobs_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_operations_delete(opt.value_of("name").unwrap_or("")); @@ -165,7 +1204,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -217,6 +1256,91 @@ where } } + async fn _projects_locations_operations_wait(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "timeout" => Some(("timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["timeout"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleLongrunningWaitOperationRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_operations_wait(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_services_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -262,11 +1386,10 @@ where "observed-generation" => Some(("observedGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "reconciling" => Some(("reconciling", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "template.annotations" => Some(("template.annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "template.confidential" => Some(("template.confidential", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), - "template.container-concurrency" => Some(("template.containerConcurrency", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "template.encryption-key" => Some(("template.encryptionKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.execution-environment" => Some(("template.executionEnvironment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.labels" => Some(("template.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "template.max-instance-request-concurrency" => Some(("template.maxInstanceRequestConcurrency", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "template.revision" => Some(("template.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.scaling.max-instance-count" => Some(("template.scaling.maxInstanceCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "template.scaling.min-instance-count" => Some(("template.scaling.minInstanceCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -274,9 +1397,7 @@ where "template.timeout" => Some(("template.timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.vpc-access.connector" => Some(("template.vpcAccess.connector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.vpc-access.egress" => Some(("template.vpcAccess.egress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "terminal-condition.domain-mapping-reason" => Some(("terminalCondition.domainMappingReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.execution-reason" => Some(("terminalCondition.executionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "terminal-condition.internal-reason" => Some(("terminalCondition.internalReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.last-transition-time" => Some(("terminalCondition.lastTransitionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.message" => Some(("terminalCondition.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.reason" => Some(("terminalCondition.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -288,7 +1409,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uri" => Some(("uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "binary-authorization", "breakglass-justification", "client", "client-version", "confidential", "connector", "container-concurrency", "create-time", "creator", "delete-time", "description", "domain-mapping-reason", "egress", "encryption-key", "etag", "execution-environment", "execution-reason", "expire-time", "generation", "ingress", "internal-reason", "labels", "last-modifier", "last-transition-time", "latest-created-revision", "latest-ready-revision", "launch-stage", "max-instance-count", "message", "min-instance-count", "name", "observed-generation", "reason", "reconciling", "revision", "revision-reason", "scaling", "service-account", "severity", "state", "template", "terminal-condition", "timeout", "type", "uid", "update-time", "uri", "use-default", "vpc-access"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "binary-authorization", "breakglass-justification", "client", "client-version", "connector", "create-time", "creator", "delete-time", "description", "egress", "encryption-key", "etag", "execution-environment", "execution-reason", "expire-time", "generation", "ingress", "labels", "last-modifier", "last-transition-time", "latest-created-revision", "latest-ready-revision", "launch-stage", "max-instance-count", "max-instance-request-concurrency", "message", "min-instance-count", "name", "observed-generation", "reason", "reconciling", "revision", "revision-reason", "scaling", "service-account", "severity", "state", "template", "terminal-condition", "timeout", "type", "uid", "update-time", "uri", "use-default", "vpc-access"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -297,13 +1418,13 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::GoogleCloudRunOpV2Service = json::value::from_value(object).unwrap(); + let mut request: api::GoogleCloudRunV2Service = json::value::from_value(object).unwrap(); let mut call = self.hub.projects().locations_services_create(request, opt.value_of("parent").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "service-id" => { call = call.service_id(value.unwrap_or("")); @@ -362,7 +1483,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "etag" => { call = call.etag(value.unwrap_or("")); @@ -473,7 +1594,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -529,13 +1650,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -629,11 +1750,10 @@ where "observed-generation" => Some(("observedGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "reconciling" => Some(("reconciling", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "template.annotations" => Some(("template.annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), - "template.confidential" => Some(("template.confidential", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), - "template.container-concurrency" => Some(("template.containerConcurrency", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "template.encryption-key" => Some(("template.encryptionKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.execution-environment" => Some(("template.executionEnvironment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.labels" => Some(("template.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "template.max-instance-request-concurrency" => Some(("template.maxInstanceRequestConcurrency", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "template.revision" => Some(("template.revision", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.scaling.max-instance-count" => Some(("template.scaling.maxInstanceCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "template.scaling.min-instance-count" => Some(("template.scaling.minInstanceCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -641,9 +1761,7 @@ where "template.timeout" => Some(("template.timeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.vpc-access.connector" => Some(("template.vpcAccess.connector", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "template.vpc-access.egress" => Some(("template.vpcAccess.egress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "terminal-condition.domain-mapping-reason" => Some(("terminalCondition.domainMappingReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.execution-reason" => Some(("terminalCondition.executionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), - "terminal-condition.internal-reason" => Some(("terminalCondition.internalReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.last-transition-time" => Some(("terminalCondition.lastTransitionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.message" => Some(("terminalCondition.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "terminal-condition.reason" => Some(("terminalCondition.reason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -655,7 +1773,7 @@ where "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "uri" => Some(("uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "binary-authorization", "breakglass-justification", "client", "client-version", "confidential", "connector", "container-concurrency", "create-time", "creator", "delete-time", "description", "domain-mapping-reason", "egress", "encryption-key", "etag", "execution-environment", "execution-reason", "expire-time", "generation", "ingress", "internal-reason", "labels", "last-modifier", "last-transition-time", "latest-created-revision", "latest-ready-revision", "launch-stage", "max-instance-count", "message", "min-instance-count", "name", "observed-generation", "reason", "reconciling", "revision", "revision-reason", "scaling", "service-account", "severity", "state", "template", "terminal-condition", "timeout", "type", "uid", "update-time", "uri", "use-default", "vpc-access"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "binary-authorization", "breakglass-justification", "client", "client-version", "connector", "create-time", "creator", "delete-time", "description", "egress", "encryption-key", "etag", "execution-environment", "execution-reason", "expire-time", "generation", "ingress", "labels", "last-modifier", "last-transition-time", "latest-created-revision", "latest-ready-revision", "launch-stage", "max-instance-count", "max-instance-request-concurrency", "message", "min-instance-count", "name", "observed-generation", "reason", "reconciling", "revision", "revision-reason", "scaling", "service-account", "severity", "state", "template", "terminal-condition", "timeout", "type", "uid", "update-time", "uri", "use-default", "vpc-access"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -664,19 +1782,16 @@ where FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } - let mut request: api::GoogleCloudRunOpV2Service = json::value::from_value(object).unwrap(); + let mut request: api::GoogleCloudRunV2Service = json::value::from_value(object).unwrap(); let mut call = self.hub.projects().locations_services_patch(request, opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); - }, - "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -691,7 +1806,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["allow-missing", "update-mask", "validate-only"].iter().map(|v|*v)); + v.extend(["allow-missing", "validate-only"].iter().map(|v|*v)); v } )); } } @@ -732,7 +1847,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "validate-only" => { - call = call.validate_only(arg_from_str(value.unwrap_or("false"), err, "validate-only", "boolean")); + call = call.validate_only( value.map(|v| arg_from_str(v, err, "validate-only", "boolean")).unwrap_or(false)); }, "etag" => { call = call.etag(value.unwrap_or("")); @@ -843,13 +1958,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1077,6 +2192,48 @@ where match self.opt.subcommand() { ("projects", Some(opt)) => { match opt.subcommand() { + ("locations-jobs-create", Some(opt)) => { + call_result = self._projects_locations_jobs_create(opt, dry_run, &mut err).await; + }, + ("locations-jobs-delete", Some(opt)) => { + call_result = self._projects_locations_jobs_delete(opt, dry_run, &mut err).await; + }, + ("locations-jobs-executions-delete", Some(opt)) => { + call_result = self._projects_locations_jobs_executions_delete(opt, dry_run, &mut err).await; + }, + ("locations-jobs-executions-get", Some(opt)) => { + call_result = self._projects_locations_jobs_executions_get(opt, dry_run, &mut err).await; + }, + ("locations-jobs-executions-list", Some(opt)) => { + call_result = self._projects_locations_jobs_executions_list(opt, dry_run, &mut err).await; + }, + ("locations-jobs-executions-tasks-get", Some(opt)) => { + call_result = self._projects_locations_jobs_executions_tasks_get(opt, dry_run, &mut err).await; + }, + ("locations-jobs-executions-tasks-list", Some(opt)) => { + call_result = self._projects_locations_jobs_executions_tasks_list(opt, dry_run, &mut err).await; + }, + ("locations-jobs-get", Some(opt)) => { + call_result = self._projects_locations_jobs_get(opt, dry_run, &mut err).await; + }, + ("locations-jobs-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_jobs_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-jobs-list", Some(opt)) => { + call_result = self._projects_locations_jobs_list(opt, dry_run, &mut err).await; + }, + ("locations-jobs-patch", Some(opt)) => { + call_result = self._projects_locations_jobs_patch(opt, dry_run, &mut err).await; + }, + ("locations-jobs-run", Some(opt)) => { + call_result = self._projects_locations_jobs_run(opt, dry_run, &mut err).await; + }, + ("locations-jobs-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_jobs_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-jobs-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_jobs_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-operations-delete", Some(opt)) => { call_result = self._projects_locations_operations_delete(opt, dry_run, &mut err).await; }, @@ -1086,6 +2243,9 @@ where ("locations-operations-list", Some(opt)) => { call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; }, + ("locations-operations-wait", Some(opt)) => { + call_result = self._projects_locations_operations_wait(opt, dry_run, &mut err).await; + }, ("locations-services-create", Some(opt)) => { call_result = self._projects_locations_services_create(opt, dry_run, &mut err).await; }, @@ -1198,7 +2358,345 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-services-create', 'locations-services-delete', 'locations-services-get', 'locations-services-get-iam-policy', 'locations-services-list', 'locations-services-patch', 'locations-services-revisions-delete', 'locations-services-revisions-get', 'locations-services-revisions-list', 'locations-services-set-iam-policy' and 'locations-services-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-jobs-create', 'locations-jobs-delete', 'locations-jobs-executions-delete', 'locations-jobs-executions-get', 'locations-jobs-executions-list', 'locations-jobs-executions-tasks-get', 'locations-jobs-executions-tasks-list', 'locations-jobs-get', 'locations-jobs-get-iam-policy', 'locations-jobs-list', 'locations-jobs-patch', 'locations-jobs-run', 'locations-jobs-set-iam-policy', 'locations-jobs-test-iam-permissions', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-operations-wait', 'locations-services-create', 'locations-services-delete', 'locations-services-get', 'locations-services-get-iam-policy', 'locations-services-list', 'locations-services-patch', 'locations-services-revisions-delete', 'locations-services-revisions-get', 'locations-services-revisions-list', 'locations-services-set-iam-policy' and 'locations-services-test-iam-permissions'", vec![ + ("locations-jobs-create", + Some(r##"Creates a Job."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The location and project in which this Job should be created. Format: projects/{project}/locations/{location}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-delete", + Some(r##"Deletes a Job."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The full name of the Job. Format: projects/{project}/locations/{location}/jobs/{job}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-executions-delete", + Some(r##"Deletes an Execution."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-executions-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the Execution to delete. Format: projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-executions-get", + Some(r##"Gets information about an Execution."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-executions-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The full name of the Execution. Format: projects/{project}/locations/{location}/jobs/{job}/executions/{execution}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-executions-list", + Some(r##"Lists Executions from a Job."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-executions-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The Execution from which the Executions should be listed. To list all Executions across Jobs, use "-" instead of Job name. Format: projects/{project}/locations/{location}/jobs/{job}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-executions-tasks-get", + Some(r##"Gets information about a Task."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-executions-tasks-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The full name of the Task. Format: projects/{project}/locations/{location}/jobs/{job}/executions/{execution}/tasks/{task}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-executions-tasks-list", + Some(r##"Lists Tasks from an Execution of a Job."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-executions-tasks-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The Execution from which the Tasks should be listed. To list all Tasks across Executions of a Job, use "-" instead of Execution name. To list all Tasks across Jobs, use "-" instead of Job name. Format: projects/{project}/locations/{location}/jobs/{job}/executions/{execution}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-get", + Some(r##"Gets information about a Job."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The full name of the Job. Format: projects/{project}/locations/{location}/jobs/{job}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-get-iam-policy", + Some(r##"Gets the IAM Access Control policy currently in effect for the given Job. This result does not include any inherited policies."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-list", + Some(r##"Lists Jobs."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The location and project to list resources on. Format: projects/{project}/locations/{location}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-patch", + Some(r##"Updates a Job."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"The fully qualified name of this Job. Format: projects/{project}/locations/{location}/jobs/{job}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-run", + Some(r##"Triggers creation of a new Execution of this Job."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-run", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The full name of the Job. Format: projects/{project}/locations/{location}/jobs/{job}, where {project} can be project id or number."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-set-iam-policy", + Some(r##"Sets the IAM Access control policy for the specified Job. Overwrites any existing policy."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-jobs-test-iam-permissions", + Some(r##"Returns permissions that a caller has on the specified Project. There are no permissions required for making this API call."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-jobs-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("locations-operations-delete", Some(r##"Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`."##), "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-operations-delete", @@ -1259,6 +2757,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-wait", + Some(r##"Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done."##), + "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-operations-wait", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource to wait on."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1271,7 +2797,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The location and project in which this service should be created. Format: projects/{projectnumber}/locations/{location}"##), + Some(r##"Required. The location and project in which this service should be created. Format: projects/{project}/locations/{location}, where {project} can be project id or number. Only lowercase characters, digits, and hyphens."##), Some(true), Some(false)), @@ -1299,7 +2825,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The full name of the Service. Format: projects/{projectnumber}/locations/{location}/services/{service}"##), + Some(r##"Required. The full name of the Service. Format: projects/{project}/locations/{location}/services/{service}, where {project} can be project id or number."##), Some(true), Some(false)), @@ -1321,7 +2847,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The full name of the Service. Format: projects/{projectnumber}/locations/{location}/services/{service}"##), + Some(r##"Required. The full name of the Service. Format: projects/{project}/locations/{location}/services/{service}, where {project} can be project id or number."##), Some(true), Some(false)), @@ -1338,12 +2864,12 @@ async fn main() { Some(false)), ]), ("locations-services-get-iam-policy", - Some(r##"Get the IAM Access Control policy currently in effect for the given Cloud Run Service. This result does not include any inherited policies."##), + Some(r##"Gets the IAM Access Control policy currently in effect for the given Cloud Run Service. This result does not include any inherited policies."##), "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-services-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1360,12 +2886,12 @@ async fn main() { Some(false)), ]), ("locations-services-list", - Some(r##"List Services."##), + Some(r##"Lists Services."##), "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-services-list", vec![ (Some(r##"parent"##), None, - Some(r##"Required. The location and project to list resources on. Location must be a valid GCP region, and may not be the "-" wildcard. Format: projects/{projectnumber}/locations/{location}"##), + Some(r##"Required. The location and project to list resources on. Location must be a valid Google Cloud region, and cannot be the "-" wildcard. Format: projects/{project}/locations/{location}, where {project} can be project id or number."##), Some(true), Some(false)), @@ -1410,7 +2936,7 @@ async fn main() { Some(false)), ]), ("locations-services-revisions-delete", - Some(r##"Delete a Revision."##), + Some(r##"Deletes a Revision."##), "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-services-revisions-delete", vec![ (Some(r##"name"##), @@ -1454,7 +2980,7 @@ async fn main() { Some(false)), ]), ("locations-services-revisions-list", - Some(r##"List Revisions from a given Service, or from a given location."##), + Some(r##"Lists Revisions from a given Service, or from a given location."##), "Details at http://byron.github.io/google-apis-rs/google_run2_cli/projects_locations-services-revisions-list", vec![ (Some(r##"parent"##), @@ -1481,7 +3007,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1509,7 +3035,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1537,7 +3063,7 @@ async fn main() { let mut app = App::new("run2") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("Deploy and manage user provided container images that scale automatically based on incoming requests. The Cloud Run Admin API v1 follows the Knative Serving API specification, while v2 is aligned with Google Cloud AIP-based API standards, as described in https://google.aip.dev/.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_run2_cli") .arg(Arg::with_name("url") diff --git a/gen/run2/Cargo.toml b/gen/run2/Cargo.toml index 900eba8a37..f36d1f2e03 100644 --- a/gen/run2/Cargo.toml +++ b/gen/run2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-run2" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Run (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/run2" homepage = "https://cloud.google.com/run/" -documentation = "https://docs.rs/google-run2/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-run2/5.0.2+20230113" license = "MIT" keywords = ["run", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/run2/README.md b/gen/run2/README.md index 6a21c00205..eae08294e4 100644 --- a/gen/run2/README.md +++ b/gen/run2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-run2` library allows access to all features of the *Google Cloud Run* service. -This documentation was generated from *Cloud Run* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *run:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Run* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *run:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Run* *v2* API can be found at the [official documentation site](https://cloud.google.com/run/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/CloudRun) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-run2/5.0.2+20230113/google_run2/CloudRun) ... * projects - * [*locations jobs create*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobDeleteCall), [*locations jobs executions delete*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobExecutionDeleteCall), [*locations jobs executions get*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobExecutionGetCall), [*locations jobs executions list*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobExecutionListCall), [*locations jobs executions tasks get*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobExecutionTaskGetCall), [*locations jobs executions tasks list*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobExecutionTaskListCall), [*locations jobs get*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobGetCall), [*locations jobs get iam policy*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobGetIamPolicyCall), [*locations jobs list*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobListCall), [*locations jobs patch*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobPatchCall), [*locations jobs run*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobRunCall), [*locations jobs set iam policy*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobSetIamPolicyCall), [*locations jobs test iam permissions*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationJobTestIamPermissionCall), [*locations operations delete*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationOperationListCall), [*locations operations wait*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationOperationWaitCall), [*locations services create*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceCreateCall), [*locations services delete*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceDeleteCall), [*locations services get*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceGetCall), [*locations services get iam policy*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceGetIamPolicyCall), [*locations services list*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceListCall), [*locations services patch*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServicePatchCall), [*locations services revisions delete*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceRevisionDeleteCall), [*locations services revisions get*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceRevisionGetCall), [*locations services revisions list*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceRevisionListCall), [*locations services set iam policy*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceSetIamPolicyCall) and [*locations services test iam permissions*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/api::ProjectLocationServiceTestIamPermissionCall) + * [*locations jobs create*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobDeleteCall), [*locations jobs executions delete*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobExecutionDeleteCall), [*locations jobs executions get*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobExecutionGetCall), [*locations jobs executions list*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobExecutionListCall), [*locations jobs executions tasks get*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobExecutionTaskGetCall), [*locations jobs executions tasks list*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobExecutionTaskListCall), [*locations jobs get*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobGetCall), [*locations jobs get iam policy*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobGetIamPolicyCall), [*locations jobs list*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobListCall), [*locations jobs patch*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobPatchCall), [*locations jobs run*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobRunCall), [*locations jobs set iam policy*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobSetIamPolicyCall), [*locations jobs test iam permissions*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationJobTestIamPermissionCall), [*locations operations delete*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationOperationListCall), [*locations operations wait*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationOperationWaitCall), [*locations services create*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceCreateCall), [*locations services delete*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceDeleteCall), [*locations services get*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceGetCall), [*locations services get iam policy*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceGetIamPolicyCall), [*locations services list*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceListCall), [*locations services patch*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServicePatchCall), [*locations services revisions delete*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceRevisionDeleteCall), [*locations services revisions get*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceRevisionGetCall), [*locations services revisions list*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceRevisionListCall), [*locations services set iam policy*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceSetIamPolicyCall) and [*locations services test iam permissions*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/api::ProjectLocationServiceTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/CloudRun)** +* **[Hub](https://docs.rs/google-run2/5.0.2+20230113/google_run2/CloudRun)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::CallBuilder) -* **[Resources](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::CallBuilder) +* **[Resources](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Part)** + * **[Parts](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -136,17 +136,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -156,29 +156,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Delegate) to the -[Method Builder](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Delegate) to the +[Method Builder](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::RequestValue) and -[decodable](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::RequestValue) and +[decodable](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-run2/5.0.2-beta-1+20230113/google_run2/client::RequestValue) are moved +* [request values](https://docs.rs/google-run2/5.0.2+20230113/google_run2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/run2/src/api.rs b/gen/run2/src/api.rs index 48bae1d140..5837009dc1 100644 --- a/gen/run2/src/api.rs +++ b/gen/run2/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> CloudRun { CloudRun { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://run.googleapis.com/".to_string(), _root_url: "https://run.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> CloudRun { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/run2/src/client.rs b/gen/run2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/run2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/run2/src/lib.rs b/gen/run2/src/lib.rs index 06fe9a0a0a..e3ada5f469 100644 --- a/gen/run2/src/lib.rs +++ b/gen/run2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Run* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *run:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Run* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *run:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Run* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/run/). diff --git a/gen/runtimeconfig1-cli/Cargo.toml b/gen/runtimeconfig1-cli/Cargo.toml index 7e8cb2d168..b205609ae6 100644 --- a/gen/runtimeconfig1-cli/Cargo.toml +++ b/gen/runtimeconfig1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-runtimeconfig1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230109" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud RuntimeConfig (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/runtimeconfig1-cli" @@ -20,13 +20,13 @@ name = "runtimeconfig1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-runtimeconfig1] path = "../runtimeconfig1" -version = "4.0.1+20220228" +version = "5.0.2+20230109" + diff --git a/gen/runtimeconfig1-cli/README.md b/gen/runtimeconfig1-cli/README.md index e78e4f893a..5f515b65f3 100644 --- a/gen/runtimeconfig1-cli/README.md +++ b/gen/runtimeconfig1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud RuntimeConfig* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud RuntimeConfig* API at revision *20230109*. The CLI is at version *5.0.2*. ```bash runtimeconfig1 [options] diff --git a/gen/runtimeconfig1-cli/mkdocs.yml b/gen/runtimeconfig1-cli/mkdocs.yml index 76a355dfd0..3697087b9b 100644 --- a/gen/runtimeconfig1-cli/mkdocs.yml +++ b/gen/runtimeconfig1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud RuntimeConfig v4.0.1+20220228 +site_name: Cloud RuntimeConfig v5.0.2+20230109 site_url: http://byron.github.io/google-apis-rs/google-runtimeconfig1-cli site_description: A complete library to interact with Cloud RuntimeConfig (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/runtimeconfig1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_list.md', 'Operations', 'List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'List': 'operations_list.md' theme: readthedocs diff --git a/gen/runtimeconfig1-cli/src/client.rs b/gen/runtimeconfig1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/runtimeconfig1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/runtimeconfig1-cli/src/main.rs b/gen/runtimeconfig1-cli/src/main.rs index ca44265a13..d02d0a5943 100644 --- a/gen/runtimeconfig1-cli/src/main.rs +++ b/gen/runtimeconfig1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_runtimeconfig1::{api, Error, oauth2}; +use google_runtimeconfig1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -197,7 +196,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -423,7 +422,7 @@ async fn main() { let mut app = App::new("runtimeconfig1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230109") .about("The Runtime Configurator allows you to dynamically configure and expose variables through Google Cloud Platform. In addition, you can also set Watchers and Waiters that will watch for changes to your data and return based on certain conditions.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_runtimeconfig1_cli") .arg(Arg::with_name("url") diff --git a/gen/runtimeconfig1/Cargo.toml b/gen/runtimeconfig1/Cargo.toml index 801ca910fc..8f21e4aa9e 100644 --- a/gen/runtimeconfig1/Cargo.toml +++ b/gen/runtimeconfig1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-runtimeconfig1" -version = "5.0.2-beta-1+20230109" +version = "5.0.2+20230109" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud RuntimeConfig (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/runtimeconfig1" homepage = "https://cloud.google.com/deployment-manager/runtime-configurator/" -documentation = "https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109" +documentation = "https://docs.rs/google-runtimeconfig1/5.0.2+20230109" license = "MIT" keywords = ["runtimeconfig", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/runtimeconfig1/README.md b/gen/runtimeconfig1/README.md index 1cbc55a74a..6b96b7e335 100644 --- a/gen/runtimeconfig1/README.md +++ b/gen/runtimeconfig1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-runtimeconfig1` library allows access to all features of the *Google Cloud RuntimeConfig* service. -This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2-beta-1+20230109*, where *20230109* is the exact revision of the *runtimeconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2+20230109*, where *20230109* is the exact revision of the *runtimeconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud RuntimeConfig* *v1* API can be found at the [official documentation site](https://cloud.google.com/deployment-manager/runtime-configurator/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/CloudRuntimeConfig) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/CloudRuntimeConfig) ... -* [operations](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/api::Operation) - * [*cancel*](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/api::OperationCancelCall), [*delete*](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/api::OperationDeleteCall) and [*list*](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/api::OperationListCall) +* [operations](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/api::Operation) + * [*cancel*](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/api::OperationCancelCall), [*delete*](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/api::OperationDeleteCall) and [*list*](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/api::OperationListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/CloudRuntimeConfig)** +* **[Hub](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/CloudRuntimeConfig)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::CallBuilder) -* **[Resources](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::CallBuilder) +* **[Resources](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Part)** + * **[Parts](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -123,17 +123,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -143,29 +143,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Delegate) to the -[Method Builder](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Delegate) to the +[Method Builder](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::RequestValue) and -[decodable](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::RequestValue) and +[decodable](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-runtimeconfig1/5.0.2-beta-1+20230109/google_runtimeconfig1/client::RequestValue) are moved +* [request values](https://docs.rs/google-runtimeconfig1/5.0.2+20230109/google_runtimeconfig1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/runtimeconfig1/src/api.rs b/gen/runtimeconfig1/src/api.rs index ff29865102..ec9aedaf60 100644 --- a/gen/runtimeconfig1/src/api.rs +++ b/gen/runtimeconfig1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> CloudRuntimeConfig { CloudRuntimeConfig { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://runtimeconfig.googleapis.com/".to_string(), _root_url: "https://runtimeconfig.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> CloudRuntimeConfig { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/runtimeconfig1/src/client.rs b/gen/runtimeconfig1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/runtimeconfig1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/runtimeconfig1/src/lib.rs b/gen/runtimeconfig1/src/lib.rs index 33049fa09c..b247cbb5a2 100644 --- a/gen/runtimeconfig1/src/lib.rs +++ b/gen/runtimeconfig1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2-beta-1+20230109*, where *20230109* is the exact revision of the *runtimeconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2+20230109*, where *20230109* is the exact revision of the *runtimeconfig:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud RuntimeConfig* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/deployment-manager/runtime-configurator/). diff --git a/gen/runtimeconfig1_beta1-cli/Cargo.toml b/gen/runtimeconfig1_beta1-cli/Cargo.toml index 92038c2a25..804ae1d097 100644 --- a/gen/runtimeconfig1_beta1-cli/Cargo.toml +++ b/gen/runtimeconfig1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-runtimeconfig1_beta1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud RuntimeConfig (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/runtimeconfig1_beta1-cli" @@ -20,13 +20,13 @@ name = "runtimeconfig1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-runtimeconfig1_beta1] path = "../runtimeconfig1_beta1" -version = "4.0.1+20220228" +version = "5.0.2+20230123" + diff --git a/gen/runtimeconfig1_beta1-cli/README.md b/gen/runtimeconfig1_beta1-cli/README.md index 59a0e21279..a958290f39 100644 --- a/gen/runtimeconfig1_beta1-cli/README.md +++ b/gen/runtimeconfig1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud RuntimeConfig* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud RuntimeConfig* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash runtimeconfig1-beta1 [options] diff --git a/gen/runtimeconfig1_beta1-cli/mkdocs.yml b/gen/runtimeconfig1_beta1-cli/mkdocs.yml index d4a608a278..2ec0f1acda 100644 --- a/gen/runtimeconfig1_beta1-cli/mkdocs.yml +++ b/gen/runtimeconfig1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud RuntimeConfig v4.0.1+20220228 +site_name: Cloud RuntimeConfig v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-runtimeconfig1_beta1-cli site_description: A complete library to interact with Cloud RuntimeConfig (protocol v1beta1) @@ -7,30 +7,31 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/runtimeconfig1_b docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_configs-create.md', 'Projects', 'Configs Create'] -- ['projects_configs-delete.md', 'Projects', 'Configs Delete'] -- ['projects_configs-get.md', 'Projects', 'Configs Get'] -- ['projects_configs-get-iam-policy.md', 'Projects', 'Configs Get Iam Policy'] -- ['projects_configs-list.md', 'Projects', 'Configs List'] -- ['projects_configs-operations-get.md', 'Projects', 'Configs Operations Get'] -- ['projects_configs-operations-test-iam-permissions.md', 'Projects', 'Configs Operations Test Iam Permissions'] -- ['projects_configs-set-iam-policy.md', 'Projects', 'Configs Set Iam Policy'] -- ['projects_configs-test-iam-permissions.md', 'Projects', 'Configs Test Iam Permissions'] -- ['projects_configs-update.md', 'Projects', 'Configs Update'] -- ['projects_configs-variables-create.md', 'Projects', 'Configs Variables Create'] -- ['projects_configs-variables-delete.md', 'Projects', 'Configs Variables Delete'] -- ['projects_configs-variables-get.md', 'Projects', 'Configs Variables Get'] -- ['projects_configs-variables-list.md', 'Projects', 'Configs Variables List'] -- ['projects_configs-variables-test-iam-permissions.md', 'Projects', 'Configs Variables Test Iam Permissions'] -- ['projects_configs-variables-update.md', 'Projects', 'Configs Variables Update'] -- ['projects_configs-variables-watch.md', 'Projects', 'Configs Variables Watch'] -- ['projects_configs-waiters-create.md', 'Projects', 'Configs Waiters Create'] -- ['projects_configs-waiters-delete.md', 'Projects', 'Configs Waiters Delete'] -- ['projects_configs-waiters-get.md', 'Projects', 'Configs Waiters Get'] -- ['projects_configs-waiters-list.md', 'Projects', 'Configs Waiters List'] -- ['projects_configs-waiters-test-iam-permissions.md', 'Projects', 'Configs Waiters Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Configs Create': 'projects_configs-create.md' + - 'Configs Delete': 'projects_configs-delete.md' + - 'Configs Get': 'projects_configs-get.md' + - 'Configs Get Iam Policy': 'projects_configs-get-iam-policy.md' + - 'Configs List': 'projects_configs-list.md' + - 'Configs Operations Get': 'projects_configs-operations-get.md' + - 'Configs Operations Test Iam Permissions': 'projects_configs-operations-test-iam-permissions.md' + - 'Configs Set Iam Policy': 'projects_configs-set-iam-policy.md' + - 'Configs Test Iam Permissions': 'projects_configs-test-iam-permissions.md' + - 'Configs Update': 'projects_configs-update.md' + - 'Configs Variables Create': 'projects_configs-variables-create.md' + - 'Configs Variables Delete': 'projects_configs-variables-delete.md' + - 'Configs Variables Get': 'projects_configs-variables-get.md' + - 'Configs Variables List': 'projects_configs-variables-list.md' + - 'Configs Variables Test Iam Permissions': 'projects_configs-variables-test-iam-permissions.md' + - 'Configs Variables Update': 'projects_configs-variables-update.md' + - 'Configs Variables Watch': 'projects_configs-variables-watch.md' + - 'Configs Waiters Create': 'projects_configs-waiters-create.md' + - 'Configs Waiters Delete': 'projects_configs-waiters-delete.md' + - 'Configs Waiters Get': 'projects_configs-waiters-get.md' + - 'Configs Waiters List': 'projects_configs-waiters-list.md' + - 'Configs Waiters Test Iam Permissions': 'projects_configs-waiters-test-iam-permissions.md' theme: readthedocs diff --git a/gen/runtimeconfig1_beta1-cli/src/client.rs b/gen/runtimeconfig1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/runtimeconfig1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/runtimeconfig1_beta1-cli/src/main.rs b/gen/runtimeconfig1_beta1-cli/src/main.rs index 5193693524..dabcdce734 100644 --- a/gen/runtimeconfig1_beta1-cli/src/main.rs +++ b/gen/runtimeconfig1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_runtimeconfig1_beta1::{api, Error, oauth2}; +use google_runtimeconfig1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -252,7 +251,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -311,7 +310,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -854,7 +853,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "recursive" => { - call = call.recursive(arg_from_str(value.unwrap_or("false"), err, "recursive", "boolean")); + call = call.recursive( value.map(|v| arg_from_str(v, err, "recursive", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -962,13 +961,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "return-values" => { - call = call.return_values(arg_from_str(value.unwrap_or("false"), err, "return-values", "boolean")); + call = call.return_values( value.map(|v| arg_from_str(v, err, "return-values", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1491,7 +1490,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1856,7 +1855,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1922,7 +1921,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1950,7 +1949,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1978,7 +1977,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2128,7 +2127,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2306,7 +2305,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2334,7 +2333,7 @@ async fn main() { let mut app = App::new("runtimeconfig1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230123") .about("The Runtime Configurator allows you to dynamically configure and expose variables through Google Cloud Platform. In addition, you can also set Watchers and Waiters that will watch for changes to your data and return based on certain conditions.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_runtimeconfig1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/runtimeconfig1_beta1/Cargo.toml b/gen/runtimeconfig1_beta1/Cargo.toml index 34efa0fbfc..4b0715b7a2 100644 --- a/gen/runtimeconfig1_beta1/Cargo.toml +++ b/gen/runtimeconfig1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-runtimeconfig1_beta1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud RuntimeConfig (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/runtimeconfig1_beta1" homepage = "https://cloud.google.com/deployment-manager/runtime-configurator/" -documentation = "https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123" license = "MIT" keywords = ["runtimeconfig", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/runtimeconfig1_beta1/README.md b/gen/runtimeconfig1_beta1/README.md index d6f3e4c646..a6e1f79155 100644 --- a/gen/runtimeconfig1_beta1/README.md +++ b/gen/runtimeconfig1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-runtimeconfig1_beta1` library allows access to all features of the *Google Cloud RuntimeConfig* service. -This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *runtimeconfig:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *runtimeconfig:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud RuntimeConfig* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/deployment-manager/runtime-configurator/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/CloudRuntimeConfig) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/CloudRuntimeConfig) ... * projects - * [*configs create*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigCreateCall), [*configs delete*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigDeleteCall), [*configs get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigGetCall), [*configs get iam policy*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigGetIamPolicyCall), [*configs list*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigListCall), [*configs operations get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigOperationGetCall), [*configs operations test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigOperationTestIamPermissionCall), [*configs set iam policy*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigSetIamPolicyCall), [*configs test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigTestIamPermissionCall), [*configs update*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigUpdateCall), [*configs variables create*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableCreateCall), [*configs variables delete*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableDeleteCall), [*configs variables get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableGetCall), [*configs variables list*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableListCall), [*configs variables test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableTestIamPermissionCall), [*configs variables update*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableUpdateCall), [*configs variables watch*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableWatchCall), [*configs waiters create*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterCreateCall), [*configs waiters delete*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterDeleteCall), [*configs waiters get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterGetCall), [*configs waiters list*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterListCall) and [*configs waiters test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterTestIamPermissionCall) + * [*configs create*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigCreateCall), [*configs delete*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigDeleteCall), [*configs get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigGetCall), [*configs get iam policy*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigGetIamPolicyCall), [*configs list*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigListCall), [*configs operations get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigOperationGetCall), [*configs operations test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigOperationTestIamPermissionCall), [*configs set iam policy*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigSetIamPolicyCall), [*configs test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigTestIamPermissionCall), [*configs update*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigUpdateCall), [*configs variables create*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableCreateCall), [*configs variables delete*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableDeleteCall), [*configs variables get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableGetCall), [*configs variables list*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableListCall), [*configs variables test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableTestIamPermissionCall), [*configs variables update*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableUpdateCall), [*configs variables watch*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigVariableWatchCall), [*configs waiters create*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterCreateCall), [*configs waiters delete*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterDeleteCall), [*configs waiters get*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterGetCall), [*configs waiters list*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterListCall) and [*configs waiters test iam permissions*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/api::ProjectConfigWaiterTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/CloudRuntimeConfig)** +* **[Hub](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/CloudRuntimeConfig)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-runtimeconfig1_beta1/5.0.2-beta-1+20230123/google_runtimeconfig1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-runtimeconfig1_beta1/5.0.2+20230123/google_runtimeconfig1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/runtimeconfig1_beta1/src/api.rs b/gen/runtimeconfig1_beta1/src/api.rs index a75ad41f62..d2cbec4eb4 100644 --- a/gen/runtimeconfig1_beta1/src/api.rs +++ b/gen/runtimeconfig1_beta1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> CloudRuntimeConfig { CloudRuntimeConfig { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://runtimeconfig.googleapis.com/".to_string(), _root_url: "https://runtimeconfig.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> CloudRuntimeConfig { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/runtimeconfig1_beta1/src/client.rs b/gen/runtimeconfig1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/runtimeconfig1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/runtimeconfig1_beta1/src/lib.rs b/gen/runtimeconfig1_beta1/src/lib.rs index 0c23a94b7c..e9fd33ff5b 100644 --- a/gen/runtimeconfig1_beta1/src/lib.rs +++ b/gen/runtimeconfig1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *runtimeconfig:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud RuntimeConfig* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *runtimeconfig:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud RuntimeConfig* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/deployment-manager/runtime-configurator/). diff --git a/gen/safebrowsing4-cli/Cargo.toml b/gen/safebrowsing4-cli/Cargo.toml index d44ecc439a..acbd5ae8ec 100644 --- a/gen/safebrowsing4-cli/Cargo.toml +++ b/gen/safebrowsing4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-safebrowsing4-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230108" authors = ["Sebastian Thiel "] description = "A complete library to interact with Safebrowsing (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/safebrowsing4-cli" @@ -20,13 +20,13 @@ name = "safebrowsing4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-safebrowsing4] path = "../safebrowsing4" -version = "4.0.1+20220305" +version = "5.0.2+20230108" + diff --git a/gen/safebrowsing4-cli/README.md b/gen/safebrowsing4-cli/README.md index dd8808e047..71c2721a1e 100644 --- a/gen/safebrowsing4-cli/README.md +++ b/gen/safebrowsing4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Safebrowsing* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Safebrowsing* API at revision *20230108*. The CLI is at version *5.0.2*. ```bash safebrowsing4 [options] diff --git a/gen/safebrowsing4-cli/mkdocs.yml b/gen/safebrowsing4-cli/mkdocs.yml index 08a84d226c..aa54954bfd 100644 --- a/gen/safebrowsing4-cli/mkdocs.yml +++ b/gen/safebrowsing4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Safebrowsing v4.0.1+20220305 +site_name: Safebrowsing v5.0.2+20230108 site_url: http://byron.github.io/google-apis-rs/google-safebrowsing4-cli site_description: A complete library to interact with Safebrowsing (protocol v4) @@ -7,15 +7,22 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/safebrowsing4-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['encoded-full-hashes_get.md', 'Encoded Full Hashes', 'Get'] -- ['encoded-updates_get.md', 'Encoded Updates', 'Get'] -- ['full-hashes_find.md', 'Full Hashes', 'Find'] -- ['threat-hits_create.md', 'Threat Hits', 'Create'] -- ['threat-list-updates_fetch.md', 'Threat List Updates', 'Fetch'] -- ['threat-lists_list.md', 'Threat Lists', 'List'] -- ['threat-matches_find.md', 'Threat Matches', 'Find'] +nav: +- Home: 'index.md' +- 'Encoded Full Hashes': + - 'Get': 'encoded-full-hashes_get.md' +- 'Encoded Updates': + - 'Get': 'encoded-updates_get.md' +- 'Full Hashes': + - 'Find': 'full-hashes_find.md' +- 'Threat Hits': + - 'Create': 'threat-hits_create.md' +- 'Threat List Updates': + - 'Fetch': 'threat-list-updates_fetch.md' +- 'Threat Lists': + - 'List': 'threat-lists_list.md' +- 'Threat Matches': + - 'Find': 'threat-matches_find.md' theme: readthedocs diff --git a/gen/safebrowsing4-cli/src/client.rs b/gen/safebrowsing4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/safebrowsing4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/safebrowsing4-cli/src/main.rs b/gen/safebrowsing4-cli/src/main.rs index 911fcdfa67..26a4a2f6cd 100644 --- a/gen/safebrowsing4-cli/src/main.rs +++ b/gen/safebrowsing4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_safebrowsing4::{api, Error, oauth2}; +use google_safebrowsing4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -888,7 +887,7 @@ async fn main() { let mut app = App::new("safebrowsing4") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230108") .about("Enables client applications to check web resources (most commonly URLs) against Google-generated lists of unsafe web resources. The Safe Browsing APIs are for non-commercial use only. If you need to use APIs to detect malicious URLs for commercial purposes – meaning “for sale or revenue-generating purposes” – please refer to the Web Risk API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_safebrowsing4_cli") .arg(Arg::with_name("folder") diff --git a/gen/safebrowsing4/Cargo.toml b/gen/safebrowsing4/Cargo.toml index 19b4534a6d..e05017986a 100644 --- a/gen/safebrowsing4/Cargo.toml +++ b/gen/safebrowsing4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-safebrowsing4" -version = "5.0.2-beta-1+20230108" +version = "5.0.2+20230108" authors = ["Sebastian Thiel "] description = "A complete library to interact with Safebrowsing (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/safebrowsing4" homepage = "https://developers.google.com/safe-browsing/" -documentation = "https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108" +documentation = "https://docs.rs/google-safebrowsing4/5.0.2+20230108" license = "MIT" keywords = ["safebrowsing", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/safebrowsing4/README.md b/gen/safebrowsing4/README.md index e4960c06ec..116bc963a4 100644 --- a/gen/safebrowsing4/README.md +++ b/gen/safebrowsing4/README.md @@ -5,28 +5,28 @@ DO NOT EDIT ! --> The `google-safebrowsing4` library allows access to all features of the *Google Safebrowsing* service. -This documentation was generated from *Safebrowsing* crate version *5.0.2-beta-1+20230108*, where *20230108* is the exact revision of the *safebrowsing:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Safebrowsing* crate version *5.0.2+20230108*, where *20230108* is the exact revision of the *safebrowsing:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Safebrowsing* *v4* API can be found at the [official documentation site](https://developers.google.com/safe-browsing/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/Safebrowsing) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/Safebrowsing) ... * encoded full hashes - * [*get*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/api::EncodedFullHashGetCall) + * [*get*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/api::EncodedFullHashGetCall) * encoded updates - * [*get*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/api::EncodedUpdateGetCall) + * [*get*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/api::EncodedUpdateGetCall) * full hashes - * [*find*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/api::FullHashFindCall) + * [*find*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/api::FullHashFindCall) * threat hits - * [*create*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/api::ThreatHitCreateCall) + * [*create*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/api::ThreatHitCreateCall) * threat list updates - * [*fetch*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/api::ThreatListUpdateFetchCall) + * [*fetch*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/api::ThreatListUpdateFetchCall) * threat lists - * [*list*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/api::ThreatListListCall) + * [*list*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/api::ThreatListListCall) * threat matches - * [*find*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/api::ThreatMatchFindCall) + * [*find*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/api::ThreatMatchFindCall) @@ -35,17 +35,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/Safebrowsing)** +* **[Hub](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/Safebrowsing)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::CallBuilder) -* **[Resources](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::CallBuilder) +* **[Resources](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Part)** + * **[Parts](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Delegate) to the -[Method Builder](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Delegate) to the +[Method Builder](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::RequestValue) and -[decodable](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::RequestValue) and +[decodable](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-safebrowsing4/5.0.2-beta-1+20230108/google_safebrowsing4/client::RequestValue) are moved +* [request values](https://docs.rs/google-safebrowsing4/5.0.2+20230108/google_safebrowsing4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/safebrowsing4/src/api.rs b/gen/safebrowsing4/src/api.rs index 9b85ab5016..1db51ddbaf 100644 --- a/gen/safebrowsing4/src/api.rs +++ b/gen/safebrowsing4/src/api.rs @@ -99,7 +99,7 @@ impl<'a, S> Safebrowsing { Safebrowsing { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://safebrowsing.googleapis.com/".to_string(), _root_url: "https://safebrowsing.googleapis.com/".to_string(), } @@ -128,7 +128,7 @@ impl<'a, S> Safebrowsing { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/safebrowsing4/src/client.rs b/gen/safebrowsing4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/safebrowsing4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/safebrowsing4/src/lib.rs b/gen/safebrowsing4/src/lib.rs index d1e3be137c..55a2491191 100644 --- a/gen/safebrowsing4/src/lib.rs +++ b/gen/safebrowsing4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Safebrowsing* crate version *5.0.2-beta-1+20230108*, where *20230108* is the exact revision of the *safebrowsing:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Safebrowsing* crate version *5.0.2+20230108*, where *20230108* is the exact revision of the *safebrowsing:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Safebrowsing* *v4* API can be found at the //! [official documentation site](https://developers.google.com/safe-browsing/). diff --git a/gen/sasportal1_alpha1-cli/Cargo.toml b/gen/sasportal1_alpha1-cli/Cargo.toml index 03e244e2a4..7bac76b00f 100644 --- a/gen/sasportal1_alpha1-cli/Cargo.toml +++ b/gen/sasportal1_alpha1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-sasportal1_alpha1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with sasportal (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sasportal1_alpha1-cli" @@ -20,13 +20,13 @@ name = "sasportal1-alpha1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-sasportal1_alpha1] path = "../sasportal1_alpha1" -version = "4.0.1+20220301" +version = "5.0.2+20230121" + diff --git a/gen/sasportal1_alpha1-cli/README.md b/gen/sasportal1_alpha1-cli/README.md index 23e0807c85..49b8cdcd35 100644 --- a/gen/sasportal1_alpha1-cli/README.md +++ b/gen/sasportal1_alpha1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *sasportal* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *sasportal* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash sasportal1-alpha1 [options] diff --git a/gen/sasportal1_alpha1-cli/mkdocs.yml b/gen/sasportal1_alpha1-cli/mkdocs.yml index c44d71ef8c..7b05bda185 100644 --- a/gen/sasportal1_alpha1-cli/mkdocs.yml +++ b/gen/sasportal1_alpha1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: sasportal v4.0.1+20220301 +site_name: sasportal v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-sasportal1_alpha1-cli site_description: A complete library to interact with sasportal (protocol v1alpha1) @@ -7,85 +7,90 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/sasportal1_alpha docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['customers_deployments-create.md', 'Customers', 'Deployments Create'] -- ['customers_deployments-delete.md', 'Customers', 'Deployments Delete'] -- ['customers_deployments-devices-create.md', 'Customers', 'Deployments Devices Create'] -- ['customers_deployments-devices-create-signed.md', 'Customers', 'Deployments Devices Create Signed'] -- ['customers_deployments-devices-list.md', 'Customers', 'Deployments Devices List'] -- ['customers_deployments-get.md', 'Customers', 'Deployments Get'] -- ['customers_deployments-list.md', 'Customers', 'Deployments List'] -- ['customers_deployments-move.md', 'Customers', 'Deployments Move'] -- ['customers_deployments-patch.md', 'Customers', 'Deployments Patch'] -- ['customers_devices-create.md', 'Customers', 'Devices Create'] -- ['customers_devices-create-signed.md', 'Customers', 'Devices Create Signed'] -- ['customers_devices-delete.md', 'Customers', 'Devices Delete'] -- ['customers_devices-get.md', 'Customers', 'Devices Get'] -- ['customers_devices-list.md', 'Customers', 'Devices List'] -- ['customers_devices-move.md', 'Customers', 'Devices Move'] -- ['customers_devices-patch.md', 'Customers', 'Devices Patch'] -- ['customers_devices-sign-device.md', 'Customers', 'Devices Sign Device'] -- ['customers_devices-update-signed.md', 'Customers', 'Devices Update Signed'] -- ['customers_get.md', 'Customers', 'Get'] -- ['customers_list.md', 'Customers', 'List'] -- ['customers_nodes-create.md', 'Customers', 'Nodes Create'] -- ['customers_nodes-delete.md', 'Customers', 'Nodes Delete'] -- ['customers_nodes-deployments-create.md', 'Customers', 'Nodes Deployments Create'] -- ['customers_nodes-deployments-list.md', 'Customers', 'Nodes Deployments List'] -- ['customers_nodes-devices-create.md', 'Customers', 'Nodes Devices Create'] -- ['customers_nodes-devices-create-signed.md', 'Customers', 'Nodes Devices Create Signed'] -- ['customers_nodes-devices-list.md', 'Customers', 'Nodes Devices List'] -- ['customers_nodes-get.md', 'Customers', 'Nodes Get'] -- ['customers_nodes-list.md', 'Customers', 'Nodes List'] -- ['customers_nodes-move.md', 'Customers', 'Nodes Move'] -- ['customers_nodes-nodes-create.md', 'Customers', 'Nodes Nodes Create'] -- ['customers_nodes-nodes-list.md', 'Customers', 'Nodes Nodes List'] -- ['customers_nodes-patch.md', 'Customers', 'Nodes Patch'] -- ['customers_patch.md', 'Customers', 'Patch'] -- ['deployments_devices-delete.md', 'Deployments', 'Devices Delete'] -- ['deployments_devices-get.md', 'Deployments', 'Devices Get'] -- ['deployments_devices-move.md', 'Deployments', 'Devices Move'] -- ['deployments_devices-patch.md', 'Deployments', 'Devices Patch'] -- ['deployments_devices-sign-device.md', 'Deployments', 'Devices Sign Device'] -- ['deployments_devices-update-signed.md', 'Deployments', 'Devices Update Signed'] -- ['deployments_get.md', 'Deployments', 'Get'] -- ['installer_generate-secret.md', 'Installer', 'Generate Secret'] -- ['installer_validate.md', 'Installer', 'Validate'] -- ['nodes_deployments-delete.md', 'Nodes', 'Deployments Delete'] -- ['nodes_deployments-devices-create.md', 'Nodes', 'Deployments Devices Create'] -- ['nodes_deployments-devices-create-signed.md', 'Nodes', 'Deployments Devices Create Signed'] -- ['nodes_deployments-devices-list.md', 'Nodes', 'Deployments Devices List'] -- ['nodes_deployments-get.md', 'Nodes', 'Deployments Get'] -- ['nodes_deployments-list.md', 'Nodes', 'Deployments List'] -- ['nodes_deployments-move.md', 'Nodes', 'Deployments Move'] -- ['nodes_deployments-patch.md', 'Nodes', 'Deployments Patch'] -- ['nodes_devices-create.md', 'Nodes', 'Devices Create'] -- ['nodes_devices-create-signed.md', 'Nodes', 'Devices Create Signed'] -- ['nodes_devices-delete.md', 'Nodes', 'Devices Delete'] -- ['nodes_devices-get.md', 'Nodes', 'Devices Get'] -- ['nodes_devices-list.md', 'Nodes', 'Devices List'] -- ['nodes_devices-move.md', 'Nodes', 'Devices Move'] -- ['nodes_devices-patch.md', 'Nodes', 'Devices Patch'] -- ['nodes_devices-sign-device.md', 'Nodes', 'Devices Sign Device'] -- ['nodes_devices-update-signed.md', 'Nodes', 'Devices Update Signed'] -- ['nodes_get.md', 'Nodes', 'Get'] -- ['nodes_nodes-create.md', 'Nodes', 'Nodes Create'] -- ['nodes_nodes-delete.md', 'Nodes', 'Nodes Delete'] -- ['nodes_nodes-deployments-create.md', 'Nodes', 'Nodes Deployments Create'] -- ['nodes_nodes-deployments-list.md', 'Nodes', 'Nodes Deployments List'] -- ['nodes_nodes-devices-create.md', 'Nodes', 'Nodes Devices Create'] -- ['nodes_nodes-devices-create-signed.md', 'Nodes', 'Nodes Devices Create Signed'] -- ['nodes_nodes-devices-list.md', 'Nodes', 'Nodes Devices List'] -- ['nodes_nodes-get.md', 'Nodes', 'Nodes Get'] -- ['nodes_nodes-list.md', 'Nodes', 'Nodes List'] -- ['nodes_nodes-move.md', 'Nodes', 'Nodes Move'] -- ['nodes_nodes-nodes-create.md', 'Nodes', 'Nodes Nodes Create'] -- ['nodes_nodes-nodes-list.md', 'Nodes', 'Nodes Nodes List'] -- ['nodes_nodes-patch.md', 'Nodes', 'Nodes Patch'] -- ['policies_get.md', 'Policies', 'Get'] -- ['policies_set.md', 'Policies', 'Set'] -- ['policies_test.md', 'Policies', 'Test'] +nav: +- Home: 'index.md' +- 'Customers': + - 'Deployments Create': 'customers_deployments-create.md' + - 'Deployments Delete': 'customers_deployments-delete.md' + - 'Deployments Devices Create': 'customers_deployments-devices-create.md' + - 'Deployments Devices Create Signed': 'customers_deployments-devices-create-signed.md' + - 'Deployments Devices List': 'customers_deployments-devices-list.md' + - 'Deployments Get': 'customers_deployments-get.md' + - 'Deployments List': 'customers_deployments-list.md' + - 'Deployments Move': 'customers_deployments-move.md' + - 'Deployments Patch': 'customers_deployments-patch.md' + - 'Devices Create': 'customers_devices-create.md' + - 'Devices Create Signed': 'customers_devices-create-signed.md' + - 'Devices Delete': 'customers_devices-delete.md' + - 'Devices Get': 'customers_devices-get.md' + - 'Devices List': 'customers_devices-list.md' + - 'Devices Move': 'customers_devices-move.md' + - 'Devices Patch': 'customers_devices-patch.md' + - 'Devices Sign Device': 'customers_devices-sign-device.md' + - 'Devices Update Signed': 'customers_devices-update-signed.md' + - 'Get': 'customers_get.md' + - 'List': 'customers_list.md' + - 'Nodes Create': 'customers_nodes-create.md' + - 'Nodes Delete': 'customers_nodes-delete.md' + - 'Nodes Deployments Create': 'customers_nodes-deployments-create.md' + - 'Nodes Deployments List': 'customers_nodes-deployments-list.md' + - 'Nodes Devices Create': 'customers_nodes-devices-create.md' + - 'Nodes Devices Create Signed': 'customers_nodes-devices-create-signed.md' + - 'Nodes Devices List': 'customers_nodes-devices-list.md' + - 'Nodes Get': 'customers_nodes-get.md' + - 'Nodes List': 'customers_nodes-list.md' + - 'Nodes Move': 'customers_nodes-move.md' + - 'Nodes Nodes Create': 'customers_nodes-nodes-create.md' + - 'Nodes Nodes List': 'customers_nodes-nodes-list.md' + - 'Nodes Patch': 'customers_nodes-patch.md' + - 'Patch': 'customers_patch.md' +- 'Deployments': + - 'Devices Delete': 'deployments_devices-delete.md' + - 'Devices Get': 'deployments_devices-get.md' + - 'Devices Move': 'deployments_devices-move.md' + - 'Devices Patch': 'deployments_devices-patch.md' + - 'Devices Sign Device': 'deployments_devices-sign-device.md' + - 'Devices Update Signed': 'deployments_devices-update-signed.md' + - 'Get': 'deployments_get.md' +- 'Installer': + - 'Generate Secret': 'installer_generate-secret.md' + - 'Validate': 'installer_validate.md' +- 'Nodes': + - 'Deployments Delete': 'nodes_deployments-delete.md' + - 'Deployments Devices Create': 'nodes_deployments-devices-create.md' + - 'Deployments Devices Create Signed': 'nodes_deployments-devices-create-signed.md' + - 'Deployments Devices List': 'nodes_deployments-devices-list.md' + - 'Deployments Get': 'nodes_deployments-get.md' + - 'Deployments List': 'nodes_deployments-list.md' + - 'Deployments Move': 'nodes_deployments-move.md' + - 'Deployments Patch': 'nodes_deployments-patch.md' + - 'Devices Create': 'nodes_devices-create.md' + - 'Devices Create Signed': 'nodes_devices-create-signed.md' + - 'Devices Delete': 'nodes_devices-delete.md' + - 'Devices Get': 'nodes_devices-get.md' + - 'Devices List': 'nodes_devices-list.md' + - 'Devices Move': 'nodes_devices-move.md' + - 'Devices Patch': 'nodes_devices-patch.md' + - 'Devices Sign Device': 'nodes_devices-sign-device.md' + - 'Devices Update Signed': 'nodes_devices-update-signed.md' + - 'Get': 'nodes_get.md' + - 'Nodes Create': 'nodes_nodes-create.md' + - 'Nodes Delete': 'nodes_nodes-delete.md' + - 'Nodes Deployments Create': 'nodes_nodes-deployments-create.md' + - 'Nodes Deployments List': 'nodes_nodes-deployments-list.md' + - 'Nodes Devices Create': 'nodes_nodes-devices-create.md' + - 'Nodes Devices Create Signed': 'nodes_nodes-devices-create-signed.md' + - 'Nodes Devices List': 'nodes_nodes-devices-list.md' + - 'Nodes Get': 'nodes_nodes-get.md' + - 'Nodes List': 'nodes_nodes-list.md' + - 'Nodes Move': 'nodes_nodes-move.md' + - 'Nodes Nodes Create': 'nodes_nodes-nodes-create.md' + - 'Nodes Nodes List': 'nodes_nodes-nodes-list.md' + - 'Nodes Patch': 'nodes_nodes-patch.md' +- 'Policies': + - 'Get': 'policies_get.md' + - 'Set': 'policies_set.md' + - 'Test': 'policies_test.md' theme: readthedocs diff --git a/gen/sasportal1_alpha1-cli/src/client.rs b/gen/sasportal1_alpha1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/sasportal1_alpha1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/sasportal1_alpha1-cli/src/main.rs b/gen/sasportal1_alpha1-cli/src/main.rs index 3248431dd6..940faa7bc0 100644 --- a/gen/sasportal1_alpha1-cli/src/main.rs +++ b/gen/sasportal1_alpha1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_sasportal1_alpha1::{api, Error, oauth2}; +use google_sasportal1_alpha1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -222,9 +221,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -250,6 +251,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -261,9 +263,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -284,7 +288,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -440,7 +444,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -554,7 +558,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -734,7 +738,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -814,9 +818,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -842,6 +848,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -853,9 +860,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -876,7 +885,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1136,7 +1145,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1304,9 +1313,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1332,6 +1343,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1343,9 +1355,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1366,7 +1380,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1381,7 +1395,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1461,9 +1475,11 @@ where "device.active-config.installation-params.antenna-beamwidth" => Some(("device.activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-downtilt" => Some(("device.activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-gain" => Some(("device.activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.antenna-gain-new-field" => Some(("device.activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-model" => Some(("device.activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.cpe-cbsd-indication" => Some(("device.activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.active-config.installation-params.eirp-capability" => Some(("device.activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.eirp-capability-new-field" => Some(("device.activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height" => Some(("device.activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height-type" => Some(("device.activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.horizontal-accuracy" => Some(("device.activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1489,6 +1505,7 @@ where "device.device-metadata.nrqz-validation.cpi-id" => Some(("device.deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.latitude" => Some(("device.deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.longitude" => Some(("device.deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device.device-metadata.nrqz-validation.state" => Some(("device.deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.display-name" => Some(("device.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.fcc-id" => Some(("device.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.name" => Some(("device.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1500,9 +1517,11 @@ where "device.preloaded-config.installation-params.antenna-beamwidth" => Some(("device.preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-downtilt" => Some(("device.preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-gain" => Some(("device.preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.antenna-gain-new-field" => Some(("device.preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-model" => Some(("device.preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.cpe-cbsd-indication" => Some(("device.preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.eirp-capability" => Some(("device.preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.eirp-capability-new-field" => Some(("device.preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height" => Some(("device.preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height-type" => Some(("device.preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.horizontal-accuracy" => Some(("device.preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -1523,7 +1542,7 @@ where "device.serial-number" => Some(("device.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.state" => Some(("device.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1731,7 +1750,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2017,7 +2036,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2100,9 +2119,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -2128,6 +2149,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2139,9 +2161,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -2162,7 +2186,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2318,7 +2342,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2432,7 +2456,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2666,7 +2690,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2760,7 +2784,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2851,7 +2875,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3120,9 +3144,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3148,6 +3174,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3159,9 +3186,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3182,7 +3211,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3197,7 +3226,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3277,9 +3306,11 @@ where "device.active-config.installation-params.antenna-beamwidth" => Some(("device.activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-downtilt" => Some(("device.activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-gain" => Some(("device.activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.antenna-gain-new-field" => Some(("device.activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-model" => Some(("device.activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.cpe-cbsd-indication" => Some(("device.activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.active-config.installation-params.eirp-capability" => Some(("device.activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.eirp-capability-new-field" => Some(("device.activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height" => Some(("device.activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height-type" => Some(("device.activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.horizontal-accuracy" => Some(("device.activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3305,6 +3336,7 @@ where "device.device-metadata.nrqz-validation.cpi-id" => Some(("device.deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.latitude" => Some(("device.deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.longitude" => Some(("device.deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device.device-metadata.nrqz-validation.state" => Some(("device.deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.display-name" => Some(("device.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.fcc-id" => Some(("device.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.name" => Some(("device.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3316,9 +3348,11 @@ where "device.preloaded-config.installation-params.antenna-beamwidth" => Some(("device.preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-downtilt" => Some(("device.preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-gain" => Some(("device.preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.antenna-gain-new-field" => Some(("device.preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-model" => Some(("device.preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.cpe-cbsd-indication" => Some(("device.preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.eirp-capability" => Some(("device.preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.eirp-capability-new-field" => Some(("device.preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height" => Some(("device.preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height-type" => Some(("device.preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.horizontal-accuracy" => Some(("device.preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3339,7 +3373,7 @@ where "device.serial-number" => Some(("device.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.state" => Some(("device.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3791,9 +3825,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3819,6 +3855,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3830,9 +3867,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -3853,7 +3892,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4009,7 +4048,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4123,7 +4162,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4303,7 +4342,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4383,9 +4422,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4411,6 +4452,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4422,9 +4464,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4445,7 +4489,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4705,7 +4749,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -4873,9 +4917,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4901,6 +4947,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4912,9 +4959,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -4935,7 +4984,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4950,7 +4999,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5030,9 +5079,11 @@ where "device.active-config.installation-params.antenna-beamwidth" => Some(("device.activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-downtilt" => Some(("device.activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-gain" => Some(("device.activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.antenna-gain-new-field" => Some(("device.activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.antenna-model" => Some(("device.activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.cpe-cbsd-indication" => Some(("device.activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.active-config.installation-params.eirp-capability" => Some(("device.activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.active-config.installation-params.eirp-capability-new-field" => Some(("device.activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height" => Some(("device.activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.active-config.installation-params.height-type" => Some(("device.activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.active-config.installation-params.horizontal-accuracy" => Some(("device.activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5058,6 +5109,7 @@ where "device.device-metadata.nrqz-validation.cpi-id" => Some(("device.deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.latitude" => Some(("device.deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.device-metadata.nrqz-validation.longitude" => Some(("device.deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device.device-metadata.nrqz-validation.state" => Some(("device.deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.display-name" => Some(("device.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.fcc-id" => Some(("device.fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.name" => Some(("device.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5069,9 +5121,11 @@ where "device.preloaded-config.installation-params.antenna-beamwidth" => Some(("device.preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-downtilt" => Some(("device.preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-gain" => Some(("device.preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.antenna-gain-new-field" => Some(("device.preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.antenna-model" => Some(("device.preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.cpe-cbsd-indication" => Some(("device.preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.eirp-capability" => Some(("device.preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "device.preloaded-config.installation-params.eirp-capability-new-field" => Some(("device.preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height" => Some(("device.preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.height-type" => Some(("device.preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.preloaded-config.installation-params.horizontal-accuracy" => Some(("device.preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5092,7 +5146,7 @@ where "device.serial-number" => Some(("device.serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device.state" => Some(("device.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5527,7 +5581,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5610,9 +5664,11 @@ where "active-config.installation-params.antenna-beamwidth" => Some(("activeConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-downtilt" => Some(("activeConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-gain" => Some(("activeConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.antenna-gain-new-field" => Some(("activeConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.antenna-model" => Some(("activeConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.cpe-cbsd-indication" => Some(("activeConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "active-config.installation-params.eirp-capability" => Some(("activeConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "active-config.installation-params.eirp-capability-new-field" => Some(("activeConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height" => Some(("activeConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "active-config.installation-params.height-type" => Some(("activeConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "active-config.installation-params.horizontal-accuracy" => Some(("activeConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5638,6 +5694,7 @@ where "device-metadata.nrqz-validation.cpi-id" => Some(("deviceMetadata.nrqzValidation.cpiId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.latitude" => Some(("deviceMetadata.nrqzValidation.latitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "device-metadata.nrqz-validation.longitude" => Some(("deviceMetadata.nrqzValidation.longitude", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "device-metadata.nrqz-validation.state" => Some(("deviceMetadata.nrqzValidation.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "fcc-id" => Some(("fccId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -5649,9 +5706,11 @@ where "preloaded-config.installation-params.antenna-beamwidth" => Some(("preloadedConfig.installationParams.antennaBeamwidth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-downtilt" => Some(("preloadedConfig.installationParams.antennaDowntilt", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-gain" => Some(("preloadedConfig.installationParams.antennaGain", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.antenna-gain-new-field" => Some(("preloadedConfig.installationParams.antennaGainNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.antenna-model" => Some(("preloadedConfig.installationParams.antennaModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.cpe-cbsd-indication" => Some(("preloadedConfig.installationParams.cpeCbsdIndication", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "preloaded-config.installation-params.eirp-capability" => Some(("preloadedConfig.installationParams.eirpCapability", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "preloaded-config.installation-params.eirp-capability-new-field" => Some(("preloadedConfig.installationParams.eirpCapabilityNewField", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height" => Some(("preloadedConfig.installationParams.height", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "preloaded-config.installation-params.height-type" => Some(("preloadedConfig.installationParams.heightType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "preloaded-config.installation-params.horizontal-accuracy" => Some(("preloadedConfig.installationParams.horizontalAccuracy", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), @@ -5672,7 +5731,7 @@ where "serial-number" => Some(("serialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["active-config", "air-interface", "antenna-azimuth", "antenna-beamwidth", "antenna-downtilt", "antenna-gain", "antenna-gain-new-field", "antenna-model", "call-sign", "case-id", "category", "common-channel-group", "cpe-cbsd-indication", "cpi-id", "device-metadata", "display-name", "eirp-capability", "eirp-capability-new-field", "fcc-id", "firmware-version", "hardware-version", "height", "height-type", "horizontal-accuracy", "indoor-deployment", "installation-params", "interference-coordination-group", "is-signed", "latitude", "longitude", "measurement-capabilities", "model", "name", "nrqz-validated", "nrqz-validation", "preloaded-config", "radio-technology", "serial-number", "software-version", "state", "supported-spec", "update-time", "user-id", "vendor", "vertical-accuracy"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5828,7 +5887,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -5942,7 +6001,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6176,7 +6235,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -6270,7 +6329,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -8873,7 +8932,7 @@ async fn main() { let mut app = App::new("sasportal1-alpha1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230121") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sasportal1_alpha1_cli") .arg(Arg::with_name("url") diff --git a/gen/sasportal1_alpha1/Cargo.toml b/gen/sasportal1_alpha1/Cargo.toml index 96b21a0211..c039d73b54 100644 --- a/gen/sasportal1_alpha1/Cargo.toml +++ b/gen/sasportal1_alpha1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-sasportal1_alpha1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with sasportal (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sasportal1_alpha1" homepage = "https://developers.google.com/spectrum-access-system/" -documentation = "https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121" license = "MIT" keywords = ["sasportal", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/sasportal1_alpha1/README.md b/gen/sasportal1_alpha1/README.md index 8c489706d7..bb770dff7c 100644 --- a/gen/sasportal1_alpha1/README.md +++ b/gen/sasportal1_alpha1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-sasportal1_alpha1` library allows access to all features of the *Google sasportal* service. -This documentation was generated from *sasportal* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *sasportal* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *sasportal* *v1_alpha1* API can be found at the [official documentation site](https://developers.google.com/spectrum-access-system/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/Sasportal) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/Sasportal) ... * customers - * [*deployments create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentCreateCall), [*deployments delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentGetCall), [*deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentListCall), [*deployments move*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeploymentPatchCall), [*devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceCreateCall), [*devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceDeleteCall), [*devices get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceGetCall), [*devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceListCall), [*devices move*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceMoveCall), [*devices patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDevicePatchCall), [*devices sign device*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerDeviceUpdateSignedCall), [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerGetCall), [*list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerListCall), [*nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeCreateCall), [*nodes delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeDeviceListCall), [*nodes get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeGetCall), [*nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeListCall), [*nodes move*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodeNodeListCall), [*nodes patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerNodePatchCall) and [*patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::CustomerPatchCall) + * [*deployments create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentCreateCall), [*deployments delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentGetCall), [*deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentListCall), [*deployments move*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeploymentPatchCall), [*devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceCreateCall), [*devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceDeleteCall), [*devices get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceGetCall), [*devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceListCall), [*devices move*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceMoveCall), [*devices patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDevicePatchCall), [*devices sign device*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerDeviceUpdateSignedCall), [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerGetCall), [*list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerListCall), [*nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeCreateCall), [*nodes delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeDeviceListCall), [*nodes get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeGetCall), [*nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeListCall), [*nodes move*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodeNodeListCall), [*nodes patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerNodePatchCall) and [*patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::CustomerPatchCall) * deployments - * [*devices delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::DeploymentDeviceDeleteCall), [*devices get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::DeploymentDeviceGetCall), [*devices move*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::DeploymentDeviceMoveCall), [*devices patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::DeploymentDevicePatchCall), [*devices sign device*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::DeploymentDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::DeploymentDeviceUpdateSignedCall) and [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::DeploymentGetCall) + * [*devices delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::DeploymentDeviceDeleteCall), [*devices get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::DeploymentDeviceGetCall), [*devices move*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::DeploymentDeviceMoveCall), [*devices patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::DeploymentDevicePatchCall), [*devices sign device*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::DeploymentDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::DeploymentDeviceUpdateSignedCall) and [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::DeploymentGetCall) * installer - * [*generate secret*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::InstallerGenerateSecretCall) and [*validate*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::InstallerValidateCall) + * [*generate secret*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::InstallerGenerateSecretCall) and [*validate*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::InstallerValidateCall) * nodes - * [*deployments delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentGetCall), [*deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentListCall), [*deployments move*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeploymentPatchCall), [*devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceCreateCall), [*devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceDeleteCall), [*devices get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceGetCall), [*devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceListCall), [*devices move*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceMoveCall), [*devices patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDevicePatchCall), [*devices sign device*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeDeviceUpdateSignedCall), [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeGetCall), [*nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeCreateCall), [*nodes delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeDeviceListCall), [*nodes get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeGetCall), [*nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeListCall), [*nodes move*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodeNodeListCall) and [*nodes patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::NodeNodePatchCall) + * [*deployments delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeleteCall), [*deployments devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeviceCreateCall), [*deployments devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeviceCreateSignedCall), [*deployments devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentDeviceListCall), [*deployments get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentGetCall), [*deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentListCall), [*deployments move*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentMoveCall), [*deployments patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeploymentPatchCall), [*devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceCreateCall), [*devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceCreateSignedCall), [*devices delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceDeleteCall), [*devices get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceGetCall), [*devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceListCall), [*devices move*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceMoveCall), [*devices patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDevicePatchCall), [*devices sign device*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceSignDeviceCall), [*devices update signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeDeviceUpdateSignedCall), [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeGetCall), [*nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeCreateCall), [*nodes delete*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeDeleteCall), [*nodes deployments create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeDeploymentCreateCall), [*nodes deployments list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeDeploymentListCall), [*nodes devices create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeDeviceCreateCall), [*nodes devices create signed*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeDeviceCreateSignedCall), [*nodes devices list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeDeviceListCall), [*nodes get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeGetCall), [*nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeListCall), [*nodes move*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeMoveCall), [*nodes nodes create*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeNodeCreateCall), [*nodes nodes list*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodeNodeListCall) and [*nodes patch*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::NodeNodePatchCall) * policies - * [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::PolicyGetCall), [*set*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::PolicySetCall) and [*test*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/api::PolicyTestCall) + * [*get*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::PolicyGetCall), [*set*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::PolicySetCall) and [*test*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/api::PolicyTestCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/Sasportal)** +* **[Hub](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/Sasportal)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::CallBuilder) -* **[Resources](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::CallBuilder) +* **[Resources](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Part)** + * **[Parts](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -153,17 +153,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -173,29 +173,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Delegate) to the -[Method Builder](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Delegate) to the +[Method Builder](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::RequestValue) and -[decodable](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::RequestValue) and +[decodable](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-sasportal1_alpha1/5.0.2-beta-1+20230121/google_sasportal1_alpha1/client::RequestValue) are moved +* [request values](https://docs.rs/google-sasportal1_alpha1/5.0.2+20230121/google_sasportal1_alpha1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/sasportal1_alpha1/src/api.rs b/gen/sasportal1_alpha1/src/api.rs index d10712539d..0cf85a4b6e 100644 --- a/gen/sasportal1_alpha1/src/api.rs +++ b/gen/sasportal1_alpha1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Sasportal { Sasportal { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://sasportal.googleapis.com/".to_string(), _root_url: "https://sasportal.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> Sasportal { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/sasportal1_alpha1/src/client.rs b/gen/sasportal1_alpha1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/sasportal1_alpha1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/sasportal1_alpha1/src/lib.rs b/gen/sasportal1_alpha1/src/lib.rs index 2d4a4edfac..d845efdb1e 100644 --- a/gen/sasportal1_alpha1/src/lib.rs +++ b/gen/sasportal1_alpha1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *sasportal* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *sasportal* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *sasportal:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *sasportal* *v1_alpha1* API can be found at the //! [official documentation site](https://developers.google.com/spectrum-access-system/). diff --git a/gen/searchconsole1-cli/Cargo.toml b/gen/searchconsole1-cli/Cargo.toml index 05228aa8d5..9a9ad235f7 100644 --- a/gen/searchconsole1-cli/Cargo.toml +++ b/gen/searchconsole1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-searchconsole1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Search Console (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/searchconsole1-cli" @@ -20,13 +20,13 @@ name = "searchconsole1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-searchconsole1] path = "../searchconsole1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/searchconsole1-cli/README.md b/gen/searchconsole1-cli/README.md index e6185cbf98..eb7f990fae 100644 --- a/gen/searchconsole1-cli/README.md +++ b/gen/searchconsole1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Search Console* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Search Console* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash searchconsole1 [options] diff --git a/gen/searchconsole1-cli/mkdocs.yml b/gen/searchconsole1-cli/mkdocs.yml index 324a782686..72f7096115 100644 --- a/gen/searchconsole1-cli/mkdocs.yml +++ b/gen/searchconsole1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Search Console v4.0.1+20220305 +site_name: Search Console v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-searchconsole1-cli site_description: A complete library to interact with Search Console (protocol v1) @@ -7,19 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/searchconsole1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['searchanalytics_query.md', 'Searchanalytics', 'Query'] -- ['sitemaps_delete.md', 'Sitemaps', 'Delete'] -- ['sitemaps_get.md', 'Sitemaps', 'Get'] -- ['sitemaps_list.md', 'Sitemaps', 'List'] -- ['sitemaps_submit.md', 'Sitemaps', 'Submit'] -- ['sites_add.md', 'Sites', 'Add'] -- ['sites_delete.md', 'Sites', 'Delete'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_list.md', 'Sites', 'List'] -- ['url-inspection_index-inspect.md', 'Url Inspection', 'Index Inspect'] -- ['url-testing-tools_mobile-friendly-test-run.md', 'Url Testing Tools', 'Mobile Friendly Test Run'] +nav: +- Home: 'index.md' +- 'Searchanalytics': + - 'Query': 'searchanalytics_query.md' +- 'Sitemaps': + - 'Delete': 'sitemaps_delete.md' + - 'Get': 'sitemaps_get.md' + - 'List': 'sitemaps_list.md' + - 'Submit': 'sitemaps_submit.md' +- 'Sites': + - 'Add': 'sites_add.md' + - 'Delete': 'sites_delete.md' + - 'Get': 'sites_get.md' + - 'List': 'sites_list.md' +- 'Url Inspection': + - 'Index Inspect': 'url-inspection_index-inspect.md' +- 'Url Testing Tools': + - 'Mobile Friendly Test Run': 'url-testing-tools_mobile-friendly-test-run.md' theme: readthedocs diff --git a/gen/searchconsole1-cli/src/client.rs b/gen/searchconsole1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/searchconsole1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/searchconsole1-cli/src/main.rs b/gen/searchconsole1-cli/src/main.rs index ea6dc05b0f..72dc0e8f3c 100644 --- a/gen/searchconsole1-cli/src/main.rs +++ b/gen/searchconsole1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_searchconsole1::{api, Error, oauth2}; +use google_searchconsole1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -1108,7 +1107,7 @@ async fn main() { let mut app = App::new("searchconsole1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("The Search Console API provides access to both Search Console data (verified users only) and to public information on an URL basis (anyone)") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_searchconsole1_cli") .arg(Arg::with_name("url") diff --git a/gen/searchconsole1/Cargo.toml b/gen/searchconsole1/Cargo.toml index 4b3a1967a8..98633ca688 100644 --- a/gen/searchconsole1/Cargo.toml +++ b/gen/searchconsole1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-searchconsole1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Search Console (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/searchconsole1" homepage = "https://developers.google.com/webmaster-tools/search-console-api/" -documentation = "https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-searchconsole1/5.0.2+20230123" license = "MIT" keywords = ["searchconsole", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/searchconsole1/README.md b/gen/searchconsole1/README.md index 5061c9dda4..2c20cb913c 100644 --- a/gen/searchconsole1/README.md +++ b/gen/searchconsole1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-searchconsole1` library allows access to all features of the *Google Search Console* service. -This documentation was generated from *Search Console* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *searchconsole:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Search Console* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *searchconsole:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Search Console* *v1* API can be found at the [official documentation site](https://developers.google.com/webmaster-tools/search-console-api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/SearchConsole) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/SearchConsole) ... * searchanalytics - * [*query*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SearchanalyticQueryCall) + * [*query*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SearchanalyticQueryCall) * sitemaps - * [*delete*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SitemapDeleteCall), [*get*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SitemapGetCall), [*list*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SitemapListCall) and [*submit*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SitemapSubmitCall) + * [*delete*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SitemapDeleteCall), [*get*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SitemapGetCall), [*list*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SitemapListCall) and [*submit*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SitemapSubmitCall) * sites - * [*add*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SiteAddCall), [*delete*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SiteDeleteCall), [*get*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SiteGetCall) and [*list*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::SiteListCall) + * [*add*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SiteAddCall), [*delete*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SiteDeleteCall), [*get*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SiteGetCall) and [*list*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::SiteListCall) * url inspection - * [*index inspect*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::UrlInspectionIndexInspectCall) + * [*index inspect*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::UrlInspectionIndexInspectCall) * url testing tools - * [*mobile friendly test run*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/api::UrlTestingToolMobileFriendlyTestRunCall) + * [*mobile friendly test run*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/api::UrlTestingToolMobileFriendlyTestRunCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/SearchConsole)** +* **[Hub](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/SearchConsole)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::CallBuilder) -* **[Resources](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::CallBuilder) +* **[Resources](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Part)** + * **[Parts](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -132,17 +132,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -152,29 +152,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Delegate) to the -[Method Builder](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Delegate) to the +[Method Builder](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::RequestValue) and -[decodable](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::RequestValue) and +[decodable](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-searchconsole1/5.0.2-beta-1+20230123/google_searchconsole1/client::RequestValue) are moved +* [request values](https://docs.rs/google-searchconsole1/5.0.2+20230123/google_searchconsole1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/searchconsole1/src/api.rs b/gen/searchconsole1/src/api.rs index f79e102025..cf1edf613e 100644 --- a/gen/searchconsole1/src/api.rs +++ b/gen/searchconsole1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> SearchConsole { SearchConsole { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://searchconsole.googleapis.com/".to_string(), _root_url: "https://searchconsole.googleapis.com/".to_string(), } @@ -152,7 +152,7 @@ impl<'a, S> SearchConsole { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/searchconsole1/src/client.rs b/gen/searchconsole1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/searchconsole1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/searchconsole1/src/lib.rs b/gen/searchconsole1/src/lib.rs index 922bbe26a5..d73ab5198d 100644 --- a/gen/searchconsole1/src/lib.rs +++ b/gen/searchconsole1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Search Console* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *searchconsole:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Search Console* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *searchconsole:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Search Console* *v1* API can be found at the //! [official documentation site](https://developers.google.com/webmaster-tools/search-console-api/). diff --git a/gen/secretmanager1-cli/Cargo.toml b/gen/secretmanager1-cli/Cargo.toml index 906a81ddd1..2f72524b10 100644 --- a/gen/secretmanager1-cli/Cargo.toml +++ b/gen/secretmanager1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-secretmanager1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Secret Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/secretmanager1-cli" @@ -20,13 +20,13 @@ name = "secretmanager1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-secretmanager1] path = "../secretmanager1" -version = "4.0.1+20220226" +version = "5.0.2+20230114" + diff --git a/gen/secretmanager1-cli/README.md b/gen/secretmanager1-cli/README.md index ea05e36f28..0d795660da 100644 --- a/gen/secretmanager1-cli/README.md +++ b/gen/secretmanager1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Secret Manager* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *Secret Manager* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash secretmanager1 [options] diff --git a/gen/secretmanager1-cli/mkdocs.yml b/gen/secretmanager1-cli/mkdocs.yml index eccf943dd6..3dd80b1337 100644 --- a/gen/secretmanager1-cli/mkdocs.yml +++ b/gen/secretmanager1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Secret Manager v4.0.1+20220226 +site_name: Secret Manager v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-secretmanager1-cli site_description: A complete library to interact with Secret Manager (protocol v1) @@ -7,25 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/secretmanager1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_secrets-add-version.md', 'Projects', 'Secrets Add Version'] -- ['projects_secrets-create.md', 'Projects', 'Secrets Create'] -- ['projects_secrets-delete.md', 'Projects', 'Secrets Delete'] -- ['projects_secrets-get.md', 'Projects', 'Secrets Get'] -- ['projects_secrets-get-iam-policy.md', 'Projects', 'Secrets Get Iam Policy'] -- ['projects_secrets-list.md', 'Projects', 'Secrets List'] -- ['projects_secrets-patch.md', 'Projects', 'Secrets Patch'] -- ['projects_secrets-set-iam-policy.md', 'Projects', 'Secrets Set Iam Policy'] -- ['projects_secrets-test-iam-permissions.md', 'Projects', 'Secrets Test Iam Permissions'] -- ['projects_secrets-versions-access.md', 'Projects', 'Secrets Versions Access'] -- ['projects_secrets-versions-destroy.md', 'Projects', 'Secrets Versions Destroy'] -- ['projects_secrets-versions-disable.md', 'Projects', 'Secrets Versions Disable'] -- ['projects_secrets-versions-enable.md', 'Projects', 'Secrets Versions Enable'] -- ['projects_secrets-versions-get.md', 'Projects', 'Secrets Versions Get'] -- ['projects_secrets-versions-list.md', 'Projects', 'Secrets Versions List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Secrets Add Version': 'projects_secrets-add-version.md' + - 'Secrets Create': 'projects_secrets-create.md' + - 'Secrets Delete': 'projects_secrets-delete.md' + - 'Secrets Get': 'projects_secrets-get.md' + - 'Secrets Get Iam Policy': 'projects_secrets-get-iam-policy.md' + - 'Secrets List': 'projects_secrets-list.md' + - 'Secrets Patch': 'projects_secrets-patch.md' + - 'Secrets Set Iam Policy': 'projects_secrets-set-iam-policy.md' + - 'Secrets Test Iam Permissions': 'projects_secrets-test-iam-permissions.md' + - 'Secrets Versions Access': 'projects_secrets-versions-access.md' + - 'Secrets Versions Destroy': 'projects_secrets-versions-destroy.md' + - 'Secrets Versions Disable': 'projects_secrets-versions-disable.md' + - 'Secrets Versions Enable': 'projects_secrets-versions-enable.md' + - 'Secrets Versions Get': 'projects_secrets-versions-get.md' + - 'Secrets Versions List': 'projects_secrets-versions-list.md' theme: readthedocs diff --git a/gen/secretmanager1-cli/src/client.rs b/gen/secretmanager1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/secretmanager1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/secretmanager1-cli/src/main.rs b/gen/secretmanager1-cli/src/main.rs index 09b4437b43..acc054ee49 100644 --- a/gen/secretmanager1-cli/src/main.rs +++ b/gen/secretmanager1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_secretmanager1::{api, Error, oauth2}; +use google_secretmanager1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -274,6 +273,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -283,8 +283,9 @@ where "rotation.next-rotation-time" => Some(("rotation.nextRotationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rotation.rotation-period" => Some(("rotation.rotationPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ttl" => Some(("ttl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "version-aliases" => Some(("versionAliases", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["automatic", "create-time", "customer-managed-encryption", "etag", "expire-time", "kms-key-name", "labels", "name", "next-rotation-time", "replication", "rotation", "rotation-period", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "automatic", "create-time", "customer-managed-encryption", "etag", "expire-time", "kms-key-name", "labels", "name", "next-rotation-time", "replication", "rotation", "rotation-period", "ttl", "version-aliases"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -463,7 +464,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -522,7 +523,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -597,6 +598,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "expire-time" => Some(("expireTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -606,8 +608,9 @@ where "rotation.next-rotation-time" => Some(("rotation.nextRotationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "rotation.rotation-period" => Some(("rotation.rotationPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ttl" => Some(("ttl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "version-aliases" => Some(("versionAliases", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["automatic", "create-time", "customer-managed-encryption", "etag", "expire-time", "kms-key-name", "labels", "name", "next-rotation-time", "replication", "rotation", "rotation-period", "ttl"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "automatic", "create-time", "customer-managed-encryption", "etag", "expire-time", "kms-key-name", "labels", "name", "next-rotation-time", "replication", "rotation", "rotation-period", "ttl", "version-aliases"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -622,7 +625,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1212,7 +1215,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1552,7 +1555,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1624,7 +1627,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1652,7 +1655,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1830,7 +1833,7 @@ async fn main() { let mut app = App::new("secretmanager1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230114") .about("Stores sensitive data such as API keys, passwords, and certificates. Provides convenience while improving security. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_secretmanager1_cli") .arg(Arg::with_name("url") diff --git a/gen/secretmanager1/Cargo.toml b/gen/secretmanager1/Cargo.toml index 813f598fb0..7e23fcd7c4 100644 --- a/gen/secretmanager1/Cargo.toml +++ b/gen/secretmanager1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-secretmanager1" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Secret Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/secretmanager1" homepage = "https://cloud.google.com/secret-manager/" -documentation = "https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-secretmanager1/5.0.2+20230114" license = "MIT" keywords = ["secretmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/secretmanager1/README.md b/gen/secretmanager1/README.md index 01b96f3605..95c5203150 100644 --- a/gen/secretmanager1/README.md +++ b/gen/secretmanager1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-secretmanager1` library allows access to all features of the *Google Secret Manager* service. -This documentation was generated from *Secret Manager* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *secretmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Secret Manager* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *secretmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Secret Manager* *v1* API can be found at the [official documentation site](https://cloud.google.com/secret-manager/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/SecretManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/SecretManager) ... * projects - * [*locations get*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectLocationListCall), [*secrets add version*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretAddVersionCall), [*secrets create*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretCreateCall), [*secrets delete*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretDeleteCall), [*secrets get*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretGetCall), [*secrets get iam policy*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretGetIamPolicyCall), [*secrets list*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretListCall), [*secrets patch*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretPatchCall), [*secrets set iam policy*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretSetIamPolicyCall), [*secrets test iam permissions*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretTestIamPermissionCall), [*secrets versions access*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretVersionAccesCall), [*secrets versions destroy*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretVersionDestroyCall), [*secrets versions disable*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretVersionDisableCall), [*secrets versions enable*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretVersionEnableCall), [*secrets versions get*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretVersionGetCall) and [*secrets versions list*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/api::ProjectSecretVersionListCall) + * [*locations get*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectLocationListCall), [*secrets add version*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretAddVersionCall), [*secrets create*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretCreateCall), [*secrets delete*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretDeleteCall), [*secrets get*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretGetCall), [*secrets get iam policy*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretGetIamPolicyCall), [*secrets list*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretListCall), [*secrets patch*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretPatchCall), [*secrets set iam policy*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretSetIamPolicyCall), [*secrets test iam permissions*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretTestIamPermissionCall), [*secrets versions access*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretVersionAccesCall), [*secrets versions destroy*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretVersionDestroyCall), [*secrets versions disable*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretVersionDisableCall), [*secrets versions enable*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretVersionEnableCall), [*secrets versions get*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretVersionGetCall) and [*secrets versions list*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/api::ProjectSecretVersionListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/SecretManager)** +* **[Hub](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/SecretManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::CallBuilder) -* **[Resources](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::CallBuilder) +* **[Resources](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Part)** + * **[Parts](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Delegate) to the -[Method Builder](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Delegate) to the +[Method Builder](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::RequestValue) and -[decodable](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::RequestValue) and +[decodable](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-secretmanager1/5.0.2-beta-1+20230114/google_secretmanager1/client::RequestValue) are moved +* [request values](https://docs.rs/google-secretmanager1/5.0.2+20230114/google_secretmanager1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/secretmanager1/src/api.rs b/gen/secretmanager1/src/api.rs index 748b9edf08..f96a614c3f 100644 --- a/gen/secretmanager1/src/api.rs +++ b/gen/secretmanager1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> SecretManager { SecretManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://secretmanager.googleapis.com/".to_string(), _root_url: "https://secretmanager.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> SecretManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/secretmanager1/src/client.rs b/gen/secretmanager1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/secretmanager1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/secretmanager1/src/lib.rs b/gen/secretmanager1/src/lib.rs index cde7e6d5c4..1b98edbb12 100644 --- a/gen/secretmanager1/src/lib.rs +++ b/gen/secretmanager1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Secret Manager* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *secretmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Secret Manager* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *secretmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Secret Manager* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/secret-manager/). diff --git a/gen/secretmanager1_beta1-cli/Cargo.toml b/gen/secretmanager1_beta1-cli/Cargo.toml index 6c24878f3a..cd1870560c 100644 --- a/gen/secretmanager1_beta1-cli/Cargo.toml +++ b/gen/secretmanager1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-secretmanager1_beta1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Secret Manager (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/secretmanager1_beta1-cli" @@ -20,13 +20,13 @@ name = "secretmanager1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-secretmanager1_beta1] path = "../secretmanager1_beta1" -version = "4.0.1+20220226" +version = "5.0.2+20230114" + diff --git a/gen/secretmanager1_beta1-cli/README.md b/gen/secretmanager1_beta1-cli/README.md index 02165e7006..d5e9ffe718 100644 --- a/gen/secretmanager1_beta1-cli/README.md +++ b/gen/secretmanager1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Secret Manager* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *Secret Manager* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash secretmanager1-beta1 [options] diff --git a/gen/secretmanager1_beta1-cli/mkdocs.yml b/gen/secretmanager1_beta1-cli/mkdocs.yml index 61ea71d3d6..530a186a5a 100644 --- a/gen/secretmanager1_beta1-cli/mkdocs.yml +++ b/gen/secretmanager1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Secret Manager v4.0.1+20220226 +site_name: Secret Manager v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-secretmanager1_beta1-cli site_description: A complete library to interact with Secret Manager (protocol v1beta1) @@ -7,25 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/secretmanager1_b docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_secrets-add-version.md', 'Projects', 'Secrets Add Version'] -- ['projects_secrets-create.md', 'Projects', 'Secrets Create'] -- ['projects_secrets-delete.md', 'Projects', 'Secrets Delete'] -- ['projects_secrets-get.md', 'Projects', 'Secrets Get'] -- ['projects_secrets-get-iam-policy.md', 'Projects', 'Secrets Get Iam Policy'] -- ['projects_secrets-list.md', 'Projects', 'Secrets List'] -- ['projects_secrets-patch.md', 'Projects', 'Secrets Patch'] -- ['projects_secrets-set-iam-policy.md', 'Projects', 'Secrets Set Iam Policy'] -- ['projects_secrets-test-iam-permissions.md', 'Projects', 'Secrets Test Iam Permissions'] -- ['projects_secrets-versions-access.md', 'Projects', 'Secrets Versions Access'] -- ['projects_secrets-versions-destroy.md', 'Projects', 'Secrets Versions Destroy'] -- ['projects_secrets-versions-disable.md', 'Projects', 'Secrets Versions Disable'] -- ['projects_secrets-versions-enable.md', 'Projects', 'Secrets Versions Enable'] -- ['projects_secrets-versions-get.md', 'Projects', 'Secrets Versions Get'] -- ['projects_secrets-versions-list.md', 'Projects', 'Secrets Versions List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Secrets Add Version': 'projects_secrets-add-version.md' + - 'Secrets Create': 'projects_secrets-create.md' + - 'Secrets Delete': 'projects_secrets-delete.md' + - 'Secrets Get': 'projects_secrets-get.md' + - 'Secrets Get Iam Policy': 'projects_secrets-get-iam-policy.md' + - 'Secrets List': 'projects_secrets-list.md' + - 'Secrets Patch': 'projects_secrets-patch.md' + - 'Secrets Set Iam Policy': 'projects_secrets-set-iam-policy.md' + - 'Secrets Test Iam Permissions': 'projects_secrets-test-iam-permissions.md' + - 'Secrets Versions Access': 'projects_secrets-versions-access.md' + - 'Secrets Versions Destroy': 'projects_secrets-versions-destroy.md' + - 'Secrets Versions Disable': 'projects_secrets-versions-disable.md' + - 'Secrets Versions Enable': 'projects_secrets-versions-enable.md' + - 'Secrets Versions Get': 'projects_secrets-versions-get.md' + - 'Secrets Versions List': 'projects_secrets-versions-list.md' theme: readthedocs diff --git a/gen/secretmanager1_beta1-cli/src/client.rs b/gen/secretmanager1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/secretmanager1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/secretmanager1_beta1-cli/src/main.rs b/gen/secretmanager1_beta1-cli/src/main.rs index 018ed90ff7..9a11b1ea40 100644 --- a/gen/secretmanager1_beta1-cli/src/main.rs +++ b/gen/secretmanager1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_secretmanager1_beta1::{api, Error, oauth2}; +use google_secretmanager1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -452,7 +451,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -511,7 +510,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -602,7 +601,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1189,7 +1188,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1526,7 +1525,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1598,7 +1597,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1626,7 +1625,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1804,7 +1803,7 @@ async fn main() { let mut app = App::new("secretmanager1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230114") .about("Stores sensitive data such as API keys, passwords, and certificates. Provides convenience while improving security. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_secretmanager1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/secretmanager1_beta1/Cargo.toml b/gen/secretmanager1_beta1/Cargo.toml index cb507cddbc..58ed5d3b42 100644 --- a/gen/secretmanager1_beta1/Cargo.toml +++ b/gen/secretmanager1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-secretmanager1_beta1" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Secret Manager (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/secretmanager1_beta1" homepage = "https://cloud.google.com/secret-manager/" -documentation = "https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114" license = "MIT" keywords = ["secretmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/secretmanager1_beta1/README.md b/gen/secretmanager1_beta1/README.md index 3c16a6a0e6..b0b55934b8 100644 --- a/gen/secretmanager1_beta1/README.md +++ b/gen/secretmanager1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-secretmanager1_beta1` library allows access to all features of the *Google Secret Manager* service. -This documentation was generated from *Secret Manager* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *secretmanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Secret Manager* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *secretmanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Secret Manager* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/secret-manager/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/SecretManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/SecretManager) ... * projects - * [*locations get*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectLocationListCall), [*secrets add version*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretAddVersionCall), [*secrets create*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretCreateCall), [*secrets delete*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretDeleteCall), [*secrets get*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretGetCall), [*secrets get iam policy*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretGetIamPolicyCall), [*secrets list*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretListCall), [*secrets patch*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretPatchCall), [*secrets set iam policy*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretSetIamPolicyCall), [*secrets test iam permissions*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretTestIamPermissionCall), [*secrets versions access*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionAccesCall), [*secrets versions destroy*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionDestroyCall), [*secrets versions disable*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionDisableCall), [*secrets versions enable*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionEnableCall), [*secrets versions get*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionGetCall) and [*secrets versions list*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionListCall) + * [*locations get*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectLocationListCall), [*secrets add version*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretAddVersionCall), [*secrets create*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretCreateCall), [*secrets delete*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretDeleteCall), [*secrets get*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretGetCall), [*secrets get iam policy*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretGetIamPolicyCall), [*secrets list*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretListCall), [*secrets patch*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretPatchCall), [*secrets set iam policy*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretSetIamPolicyCall), [*secrets test iam permissions*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretTestIamPermissionCall), [*secrets versions access*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionAccesCall), [*secrets versions destroy*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionDestroyCall), [*secrets versions disable*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionDisableCall), [*secrets versions enable*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionEnableCall), [*secrets versions get*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionGetCall) and [*secrets versions list*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/api::ProjectSecretVersionListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/SecretManager)** +* **[Hub](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/SecretManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-secretmanager1_beta1/5.0.2-beta-1+20230114/google_secretmanager1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-secretmanager1_beta1/5.0.2+20230114/google_secretmanager1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/secretmanager1_beta1/src/api.rs b/gen/secretmanager1_beta1/src/api.rs index 4d561f11f3..170c9a7eda 100644 --- a/gen/secretmanager1_beta1/src/api.rs +++ b/gen/secretmanager1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> SecretManager { SecretManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://secretmanager.googleapis.com/".to_string(), _root_url: "https://secretmanager.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> SecretManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/secretmanager1_beta1/src/client.rs b/gen/secretmanager1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/secretmanager1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/secretmanager1_beta1/src/lib.rs b/gen/secretmanager1_beta1/src/lib.rs index 34c22e0a2f..a9ab65d80e 100644 --- a/gen/secretmanager1_beta1/src/lib.rs +++ b/gen/secretmanager1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Secret Manager* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *secretmanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Secret Manager* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *secretmanager:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Secret Manager* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/secret-manager/). diff --git a/gen/securitycenter1-cli/Cargo.toml b/gen/securitycenter1-cli/Cargo.toml index 400567900e..44f1f85386 100644 --- a/gen/securitycenter1-cli/Cargo.toml +++ b/gen/securitycenter1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-securitycenter1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Security Command Center (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/securitycenter1-cli" @@ -20,13 +20,13 @@ name = "securitycenter1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-securitycenter1] path = "../securitycenter1" -version = "4.0.1+20220224" +version = "5.0.2+20230123" + diff --git a/gen/securitycenter1-cli/README.md b/gen/securitycenter1-cli/README.md index 7e1867d346..f4263cef39 100644 --- a/gen/securitycenter1-cli/README.md +++ b/gen/securitycenter1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Security Command Center* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Security Command Center* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash securitycenter1 [options] @@ -44,6 +44,11 @@ securitycenter1 [options] mute-configs-get [-p ]... [-o ] mute-configs-list [-p ]... [-o ] mute-configs-patch (-r )... [-p ]... [-o ] + notification-configs-create (-r )... [-p ]... [-o ] + notification-configs-delete [-p ]... [-o ] + notification-configs-get [-p ]... [-o ] + notification-configs-list [-p ]... [-o ] + notification-configs-patch (-r )... [-p ]... [-o ] sources-findings-external-systems-patch (-r )... [-p ]... [-o ] sources-findings-group (-r )... [-p ]... [-o ] sources-findings-list [-p ]... [-o ] @@ -109,6 +114,11 @@ securitycenter1 [options] mute-configs-get [-p ]... [-o ] mute-configs-list [-p ]... [-o ] mute-configs-patch (-r )... [-p ]... [-o ] + notification-configs-create (-r )... [-p ]... [-o ] + notification-configs-delete [-p ]... [-o ] + notification-configs-get [-p ]... [-o ] + notification-configs-list [-p ]... [-o ] + notification-configs-patch (-r )... [-p ]... [-o ] sources-findings-external-systems-patch (-r )... [-p ]... [-o ] sources-findings-group (-r )... [-p ]... [-o ] sources-findings-list [-p ]... [-o ] diff --git a/gen/securitycenter1-cli/mkdocs.yml b/gen/securitycenter1-cli/mkdocs.yml index e34b803070..a7c5c9703a 100644 --- a/gen/securitycenter1-cli/mkdocs.yml +++ b/gen/securitycenter1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Security Command Center v4.0.1+20220224 +site_name: Security Command Center v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-securitycenter1-cli site_description: A complete library to interact with Security Command Center (protocol v1) @@ -7,93 +7,106 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/securitycenter1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['folders_assets-group.md', 'Folders', 'Assets Group'] -- ['folders_assets-list.md', 'Folders', 'Assets List'] -- ['folders_assets-update-security-marks.md', 'Folders', 'Assets Update Security Marks'] -- ['folders_big-query-exports-create.md', 'Folders', 'Big Query Exports Create'] -- ['folders_big-query-exports-delete.md', 'Folders', 'Big Query Exports Delete'] -- ['folders_big-query-exports-get.md', 'Folders', 'Big Query Exports Get'] -- ['folders_big-query-exports-list.md', 'Folders', 'Big Query Exports List'] -- ['folders_big-query-exports-patch.md', 'Folders', 'Big Query Exports Patch'] -- ['folders_findings-bulk-mute.md', 'Folders', 'Findings Bulk Mute'] -- ['folders_mute-configs-create.md', 'Folders', 'Mute Configs Create'] -- ['folders_mute-configs-delete.md', 'Folders', 'Mute Configs Delete'] -- ['folders_mute-configs-get.md', 'Folders', 'Mute Configs Get'] -- ['folders_mute-configs-list.md', 'Folders', 'Mute Configs List'] -- ['folders_mute-configs-patch.md', 'Folders', 'Mute Configs Patch'] -- ['folders_sources-findings-external-systems-patch.md', 'Folders', 'Sources Findings External Systems Patch'] -- ['folders_sources-findings-group.md', 'Folders', 'Sources Findings Group'] -- ['folders_sources-findings-list.md', 'Folders', 'Sources Findings List'] -- ['folders_sources-findings-patch.md', 'Folders', 'Sources Findings Patch'] -- ['folders_sources-findings-set-mute.md', 'Folders', 'Sources Findings Set Mute'] -- ['folders_sources-findings-set-state.md', 'Folders', 'Sources Findings Set State'] -- ['folders_sources-findings-update-security-marks.md', 'Folders', 'Sources Findings Update Security Marks'] -- ['folders_sources-list.md', 'Folders', 'Sources List'] -- ['organizations_assets-group.md', 'Organizations', 'Assets Group'] -- ['organizations_assets-list.md', 'Organizations', 'Assets List'] -- ['organizations_assets-run-discovery.md', 'Organizations', 'Assets Run Discovery'] -- ['organizations_assets-update-security-marks.md', 'Organizations', 'Assets Update Security Marks'] -- ['organizations_big-query-exports-create.md', 'Organizations', 'Big Query Exports Create'] -- ['organizations_big-query-exports-delete.md', 'Organizations', 'Big Query Exports Delete'] -- ['organizations_big-query-exports-get.md', 'Organizations', 'Big Query Exports Get'] -- ['organizations_big-query-exports-list.md', 'Organizations', 'Big Query Exports List'] -- ['organizations_big-query-exports-patch.md', 'Organizations', 'Big Query Exports Patch'] -- ['organizations_findings-bulk-mute.md', 'Organizations', 'Findings Bulk Mute'] -- ['organizations_get-organization-settings.md', 'Organizations', 'Get Organization Settings'] -- ['organizations_mute-configs-create.md', 'Organizations', 'Mute Configs Create'] -- ['organizations_mute-configs-delete.md', 'Organizations', 'Mute Configs Delete'] -- ['organizations_mute-configs-get.md', 'Organizations', 'Mute Configs Get'] -- ['organizations_mute-configs-list.md', 'Organizations', 'Mute Configs List'] -- ['organizations_mute-configs-patch.md', 'Organizations', 'Mute Configs Patch'] -- ['organizations_notification-configs-create.md', 'Organizations', 'Notification Configs Create'] -- ['organizations_notification-configs-delete.md', 'Organizations', 'Notification Configs Delete'] -- ['organizations_notification-configs-get.md', 'Organizations', 'Notification Configs Get'] -- ['organizations_notification-configs-list.md', 'Organizations', 'Notification Configs List'] -- ['organizations_notification-configs-patch.md', 'Organizations', 'Notification Configs Patch'] -- ['organizations_operations-cancel.md', 'Organizations', 'Operations Cancel'] -- ['organizations_operations-delete.md', 'Organizations', 'Operations Delete'] -- ['organizations_operations-get.md', 'Organizations', 'Operations Get'] -- ['organizations_operations-list.md', 'Organizations', 'Operations List'] -- ['organizations_sources-create.md', 'Organizations', 'Sources Create'] -- ['organizations_sources-findings-create.md', 'Organizations', 'Sources Findings Create'] -- ['organizations_sources-findings-external-systems-patch.md', 'Organizations', 'Sources Findings External Systems Patch'] -- ['organizations_sources-findings-group.md', 'Organizations', 'Sources Findings Group'] -- ['organizations_sources-findings-list.md', 'Organizations', 'Sources Findings List'] -- ['organizations_sources-findings-patch.md', 'Organizations', 'Sources Findings Patch'] -- ['organizations_sources-findings-set-mute.md', 'Organizations', 'Sources Findings Set Mute'] -- ['organizations_sources-findings-set-state.md', 'Organizations', 'Sources Findings Set State'] -- ['organizations_sources-findings-update-security-marks.md', 'Organizations', 'Sources Findings Update Security Marks'] -- ['organizations_sources-get.md', 'Organizations', 'Sources Get'] -- ['organizations_sources-get-iam-policy.md', 'Organizations', 'Sources Get Iam Policy'] -- ['organizations_sources-list.md', 'Organizations', 'Sources List'] -- ['organizations_sources-patch.md', 'Organizations', 'Sources Patch'] -- ['organizations_sources-set-iam-policy.md', 'Organizations', 'Sources Set Iam Policy'] -- ['organizations_sources-test-iam-permissions.md', 'Organizations', 'Sources Test Iam Permissions'] -- ['organizations_update-organization-settings.md', 'Organizations', 'Update Organization Settings'] -- ['projects_assets-group.md', 'Projects', 'Assets Group'] -- ['projects_assets-list.md', 'Projects', 'Assets List'] -- ['projects_assets-update-security-marks.md', 'Projects', 'Assets Update Security Marks'] -- ['projects_big-query-exports-create.md', 'Projects', 'Big Query Exports Create'] -- ['projects_big-query-exports-delete.md', 'Projects', 'Big Query Exports Delete'] -- ['projects_big-query-exports-get.md', 'Projects', 'Big Query Exports Get'] -- ['projects_big-query-exports-list.md', 'Projects', 'Big Query Exports List'] -- ['projects_big-query-exports-patch.md', 'Projects', 'Big Query Exports Patch'] -- ['projects_findings-bulk-mute.md', 'Projects', 'Findings Bulk Mute'] -- ['projects_mute-configs-create.md', 'Projects', 'Mute Configs Create'] -- ['projects_mute-configs-delete.md', 'Projects', 'Mute Configs Delete'] -- ['projects_mute-configs-get.md', 'Projects', 'Mute Configs Get'] -- ['projects_mute-configs-list.md', 'Projects', 'Mute Configs List'] -- ['projects_mute-configs-patch.md', 'Projects', 'Mute Configs Patch'] -- ['projects_sources-findings-external-systems-patch.md', 'Projects', 'Sources Findings External Systems Patch'] -- ['projects_sources-findings-group.md', 'Projects', 'Sources Findings Group'] -- ['projects_sources-findings-list.md', 'Projects', 'Sources Findings List'] -- ['projects_sources-findings-patch.md', 'Projects', 'Sources Findings Patch'] -- ['projects_sources-findings-set-mute.md', 'Projects', 'Sources Findings Set Mute'] -- ['projects_sources-findings-set-state.md', 'Projects', 'Sources Findings Set State'] -- ['projects_sources-findings-update-security-marks.md', 'Projects', 'Sources Findings Update Security Marks'] -- ['projects_sources-list.md', 'Projects', 'Sources List'] +nav: +- Home: 'index.md' +- 'Folders': + - 'Assets Group': 'folders_assets-group.md' + - 'Assets List': 'folders_assets-list.md' + - 'Assets Update Security Marks': 'folders_assets-update-security-marks.md' + - 'Big Query Exports Create': 'folders_big-query-exports-create.md' + - 'Big Query Exports Delete': 'folders_big-query-exports-delete.md' + - 'Big Query Exports Get': 'folders_big-query-exports-get.md' + - 'Big Query Exports List': 'folders_big-query-exports-list.md' + - 'Big Query Exports Patch': 'folders_big-query-exports-patch.md' + - 'Findings Bulk Mute': 'folders_findings-bulk-mute.md' + - 'Mute Configs Create': 'folders_mute-configs-create.md' + - 'Mute Configs Delete': 'folders_mute-configs-delete.md' + - 'Mute Configs Get': 'folders_mute-configs-get.md' + - 'Mute Configs List': 'folders_mute-configs-list.md' + - 'Mute Configs Patch': 'folders_mute-configs-patch.md' + - 'Notification Configs Create': 'folders_notification-configs-create.md' + - 'Notification Configs Delete': 'folders_notification-configs-delete.md' + - 'Notification Configs Get': 'folders_notification-configs-get.md' + - 'Notification Configs List': 'folders_notification-configs-list.md' + - 'Notification Configs Patch': 'folders_notification-configs-patch.md' + - 'Sources Findings External Systems Patch': 'folders_sources-findings-external-systems-patch.md' + - 'Sources Findings Group': 'folders_sources-findings-group.md' + - 'Sources Findings List': 'folders_sources-findings-list.md' + - 'Sources Findings Patch': 'folders_sources-findings-patch.md' + - 'Sources Findings Set Mute': 'folders_sources-findings-set-mute.md' + - 'Sources Findings Set State': 'folders_sources-findings-set-state.md' + - 'Sources Findings Update Security Marks': 'folders_sources-findings-update-security-marks.md' + - 'Sources List': 'folders_sources-list.md' +- 'Organizations': + - 'Assets Group': 'organizations_assets-group.md' + - 'Assets List': 'organizations_assets-list.md' + - 'Assets Run Discovery': 'organizations_assets-run-discovery.md' + - 'Assets Update Security Marks': 'organizations_assets-update-security-marks.md' + - 'Big Query Exports Create': 'organizations_big-query-exports-create.md' + - 'Big Query Exports Delete': 'organizations_big-query-exports-delete.md' + - 'Big Query Exports Get': 'organizations_big-query-exports-get.md' + - 'Big Query Exports List': 'organizations_big-query-exports-list.md' + - 'Big Query Exports Patch': 'organizations_big-query-exports-patch.md' + - 'Findings Bulk Mute': 'organizations_findings-bulk-mute.md' + - 'Get Organization Settings': 'organizations_get-organization-settings.md' + - 'Mute Configs Create': 'organizations_mute-configs-create.md' + - 'Mute Configs Delete': 'organizations_mute-configs-delete.md' + - 'Mute Configs Get': 'organizations_mute-configs-get.md' + - 'Mute Configs List': 'organizations_mute-configs-list.md' + - 'Mute Configs Patch': 'organizations_mute-configs-patch.md' + - 'Notification Configs Create': 'organizations_notification-configs-create.md' + - 'Notification Configs Delete': 'organizations_notification-configs-delete.md' + - 'Notification Configs Get': 'organizations_notification-configs-get.md' + - 'Notification Configs List': 'organizations_notification-configs-list.md' + - 'Notification Configs Patch': 'organizations_notification-configs-patch.md' + - 'Operations Cancel': 'organizations_operations-cancel.md' + - 'Operations Delete': 'organizations_operations-delete.md' + - 'Operations Get': 'organizations_operations-get.md' + - 'Operations List': 'organizations_operations-list.md' + - 'Sources Create': 'organizations_sources-create.md' + - 'Sources Findings Create': 'organizations_sources-findings-create.md' + - 'Sources Findings External Systems Patch': 'organizations_sources-findings-external-systems-patch.md' + - 'Sources Findings Group': 'organizations_sources-findings-group.md' + - 'Sources Findings List': 'organizations_sources-findings-list.md' + - 'Sources Findings Patch': 'organizations_sources-findings-patch.md' + - 'Sources Findings Set Mute': 'organizations_sources-findings-set-mute.md' + - 'Sources Findings Set State': 'organizations_sources-findings-set-state.md' + - 'Sources Findings Update Security Marks': 'organizations_sources-findings-update-security-marks.md' + - 'Sources Get': 'organizations_sources-get.md' + - 'Sources Get Iam Policy': 'organizations_sources-get-iam-policy.md' + - 'Sources List': 'organizations_sources-list.md' + - 'Sources Patch': 'organizations_sources-patch.md' + - 'Sources Set Iam Policy': 'organizations_sources-set-iam-policy.md' + - 'Sources Test Iam Permissions': 'organizations_sources-test-iam-permissions.md' + - 'Update Organization Settings': 'organizations_update-organization-settings.md' +- 'Projects': + - 'Assets Group': 'projects_assets-group.md' + - 'Assets List': 'projects_assets-list.md' + - 'Assets Update Security Marks': 'projects_assets-update-security-marks.md' + - 'Big Query Exports Create': 'projects_big-query-exports-create.md' + - 'Big Query Exports Delete': 'projects_big-query-exports-delete.md' + - 'Big Query Exports Get': 'projects_big-query-exports-get.md' + - 'Big Query Exports List': 'projects_big-query-exports-list.md' + - 'Big Query Exports Patch': 'projects_big-query-exports-patch.md' + - 'Findings Bulk Mute': 'projects_findings-bulk-mute.md' + - 'Mute Configs Create': 'projects_mute-configs-create.md' + - 'Mute Configs Delete': 'projects_mute-configs-delete.md' + - 'Mute Configs Get': 'projects_mute-configs-get.md' + - 'Mute Configs List': 'projects_mute-configs-list.md' + - 'Mute Configs Patch': 'projects_mute-configs-patch.md' + - 'Notification Configs Create': 'projects_notification-configs-create.md' + - 'Notification Configs Delete': 'projects_notification-configs-delete.md' + - 'Notification Configs Get': 'projects_notification-configs-get.md' + - 'Notification Configs List': 'projects_notification-configs-list.md' + - 'Notification Configs Patch': 'projects_notification-configs-patch.md' + - 'Sources Findings External Systems Patch': 'projects_sources-findings-external-systems-patch.md' + - 'Sources Findings Group': 'projects_sources-findings-group.md' + - 'Sources Findings List': 'projects_sources-findings-list.md' + - 'Sources Findings Patch': 'projects_sources-findings-patch.md' + - 'Sources Findings Set Mute': 'projects_sources-findings-set-mute.md' + - 'Sources Findings Set State': 'projects_sources-findings-set-state.md' + - 'Sources Findings Update Security Marks': 'projects_sources-findings-update-security-marks.md' + - 'Sources List': 'projects_sources-list.md' theme: readthedocs diff --git a/gen/securitycenter1-cli/src/client.rs b/gen/securitycenter1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/securitycenter1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/securitycenter1-cli/src/main.rs b/gen/securitycenter1-cli/src/main.rs index 0cac78a41f..64e80ab8dc 100644 --- a/gen/securitycenter1-cli/src/main.rs +++ b/gen/securitycenter1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_securitycenter1::{api, Error, oauth2}; +use google_securitycenter1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -148,13 +147,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -163,10 +162,10 @@ where call = call.filter(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "compare-duration" => { - call = call.compare_duration(value.unwrap_or("")); + call = call.compare_duration( value.map(|v| arg_from_str(v, err, "compare-duration", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -257,10 +256,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -519,7 +518,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -615,7 +614,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -959,7 +958,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1054,7 +1053,356 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _folders_notification_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pubsub-topic" => Some(("pubsubTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "streaming-config.filter" => Some(("streamingConfig.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "filter", "name", "pubsub-topic", "service-account", "streaming-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NotificationConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.folders().notification_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "config-id" => { + call = call.config_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["config-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _folders_notification_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.folders().notification_configs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _folders_notification_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.folders().notification_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _folders_notification_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.folders().notification_configs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _folders_notification_configs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pubsub-topic" => Some(("pubsubTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "streaming-config.filter" => Some(("streamingConfig.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "filter", "name", "pubsub-topic", "service-account", "streaming-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NotificationConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.folders().notification_configs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1147,7 +1495,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1293,13 +1641,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1308,10 +1656,10 @@ where call = call.filter(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "compare-duration" => { - call = call.compare_duration(value.unwrap_or("")); + call = call.compare_duration( value.map(|v| arg_from_str(v, err, "compare-duration", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -1387,16 +1735,35 @@ where "access.caller-ip-geo.region-code" => Some(("access.callerIpGeo.regionCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.method-name" => Some(("access.methodName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.principal-email" => Some(("access.principalEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.principal-subject" => Some(("access.principalSubject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.service-account-key-name" => Some(("access.serviceAccountKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.service-name" => Some(("access.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.user-agent-family" => Some(("access.userAgentFamily", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.user-name" => Some(("access.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "canonical-name" => Some(("canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "category" => Some(("category", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.display-name" => Some(("database.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.grantees" => Some(("database.grantees", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "database.name" => Some(("database.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.query" => Some(("database.query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.user-name" => Some(("database.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-time" => Some(("eventTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-uri" => Some(("externalUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "finding-class" => Some(("findingClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "indicator.domains" => Some(("indicator.domains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "indicator.ip-addresses" => Some(("indicator.ipAddresses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "indicator.uris" => Some(("indicator.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "kernel-rootkit.name" => Some(("kernelRootkit.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-code-modification" => Some(("kernelRootkit.unexpectedCodeModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-ftrace-handler" => Some(("kernelRootkit.unexpectedFtraceHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-interrupt-handler" => Some(("kernelRootkit.unexpectedInterruptHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kernel-code-pages" => Some(("kernelRootkit.unexpectedKernelCodePages", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kprobe-handler" => Some(("kernelRootkit.unexpectedKprobeHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-processes-in-runqueue" => Some(("kernelRootkit.unexpectedProcessesInRunqueue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-read-only-data-modification" => Some(("kernelRootkit.unexpectedReadOnlyDataModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-system-call-handler" => Some(("kernelRootkit.unexpectedSystemCallHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "mitre-attack.additional-tactics" => Some(("mitreAttack.additionalTactics", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.additional-techniques" => Some(("mitreAttack.additionalTechniques", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.primary-tactic" => Some(("mitreAttack.primaryTactic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1406,7 +1773,9 @@ where "mute-initiator" => Some(("muteInitiator", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mute-update-time" => Some(("muteUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "next-steps" => Some(("nextSteps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent-display-name" => Some(("parentDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.canonical-name" => Some(("securityMarks.canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.marks" => Some(("securityMarks.marks", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -1425,7 +1794,7 @@ where "vulnerability.cve.id" => Some(("vulnerability.cve.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cve.upstream-fix-available" => Some(("vulnerability.cve.upstreamFixAvailable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "domains", "event-time", "external-uri", "finding-class", "id", "indicator", "integrity-impact", "ip-addresses", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "parent", "primary-tactic", "primary-techniques", "principal-email", "privileges-required", "region-code", "resource-name", "scope", "security-marks", "service-name", "severity", "state", "upstream-fix-available", "user-agent-family", "user-interaction", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "database", "description", "display-name", "domains", "event-time", "external-uri", "finding-class", "grantees", "id", "indicator", "integrity-impact", "ip-addresses", "kernel-rootkit", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "next-steps", "parent", "parent-display-name", "primary-tactic", "primary-techniques", "principal-email", "principal-subject", "privileges-required", "query", "region-code", "resource-name", "scope", "security-marks", "service-account-key-name", "service-name", "severity", "state", "unexpected-code-modification", "unexpected-ftrace-handler", "unexpected-interrupt-handler", "unexpected-kernel-code-pages", "unexpected-kprobe-handler", "unexpected-processes-in-runqueue", "unexpected-read-only-data-modification", "unexpected-system-call-handler", "upstream-fix-available", "uris", "user-agent-family", "user-interaction", "user-name", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1440,7 +1809,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1702,10 +2071,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -1764,7 +2133,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1910,13 +2279,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1925,10 +2294,10 @@ where call = call.filter(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "compare-duration" => { - call = call.compare_duration(value.unwrap_or("")); + call = call.compare_duration( value.map(|v| arg_from_str(v, err, "compare-duration", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -2103,10 +2472,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -2365,7 +2734,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2461,7 +2830,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2857,7 +3226,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2952,7 +3321,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3208,7 +3577,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3301,7 +3670,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3516,7 +3885,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3683,16 +4052,35 @@ where "access.caller-ip-geo.region-code" => Some(("access.callerIpGeo.regionCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.method-name" => Some(("access.methodName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.principal-email" => Some(("access.principalEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.principal-subject" => Some(("access.principalSubject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.service-account-key-name" => Some(("access.serviceAccountKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.service-name" => Some(("access.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.user-agent-family" => Some(("access.userAgentFamily", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.user-name" => Some(("access.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "canonical-name" => Some(("canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "category" => Some(("category", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.display-name" => Some(("database.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.grantees" => Some(("database.grantees", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "database.name" => Some(("database.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.query" => Some(("database.query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.user-name" => Some(("database.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-time" => Some(("eventTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-uri" => Some(("externalUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "finding-class" => Some(("findingClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "indicator.domains" => Some(("indicator.domains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "indicator.ip-addresses" => Some(("indicator.ipAddresses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "indicator.uris" => Some(("indicator.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "kernel-rootkit.name" => Some(("kernelRootkit.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-code-modification" => Some(("kernelRootkit.unexpectedCodeModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-ftrace-handler" => Some(("kernelRootkit.unexpectedFtraceHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-interrupt-handler" => Some(("kernelRootkit.unexpectedInterruptHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kernel-code-pages" => Some(("kernelRootkit.unexpectedKernelCodePages", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kprobe-handler" => Some(("kernelRootkit.unexpectedKprobeHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-processes-in-runqueue" => Some(("kernelRootkit.unexpectedProcessesInRunqueue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-read-only-data-modification" => Some(("kernelRootkit.unexpectedReadOnlyDataModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-system-call-handler" => Some(("kernelRootkit.unexpectedSystemCallHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "mitre-attack.additional-tactics" => Some(("mitreAttack.additionalTactics", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.additional-techniques" => Some(("mitreAttack.additionalTechniques", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.primary-tactic" => Some(("mitreAttack.primaryTactic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3702,7 +4090,9 @@ where "mute-initiator" => Some(("muteInitiator", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mute-update-time" => Some(("muteUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "next-steps" => Some(("nextSteps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent-display-name" => Some(("parentDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.canonical-name" => Some(("securityMarks.canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.marks" => Some(("securityMarks.marks", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -3721,7 +4111,7 @@ where "vulnerability.cve.id" => Some(("vulnerability.cve.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cve.upstream-fix-available" => Some(("vulnerability.cve.upstreamFixAvailable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "domains", "event-time", "external-uri", "finding-class", "id", "indicator", "integrity-impact", "ip-addresses", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "parent", "primary-tactic", "primary-techniques", "principal-email", "privileges-required", "region-code", "resource-name", "scope", "security-marks", "service-name", "severity", "state", "upstream-fix-available", "user-agent-family", "user-interaction", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "database", "description", "display-name", "domains", "event-time", "external-uri", "finding-class", "grantees", "id", "indicator", "integrity-impact", "ip-addresses", "kernel-rootkit", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "next-steps", "parent", "parent-display-name", "primary-tactic", "primary-techniques", "principal-email", "principal-subject", "privileges-required", "query", "region-code", "resource-name", "scope", "security-marks", "service-account-key-name", "service-name", "severity", "state", "unexpected-code-modification", "unexpected-ftrace-handler", "unexpected-interrupt-handler", "unexpected-kernel-code-pages", "unexpected-kprobe-handler", "unexpected-processes-in-runqueue", "unexpected-read-only-data-modification", "unexpected-system-call-handler", "upstream-fix-available", "uris", "user-agent-family", "user-interaction", "user-name", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3829,7 +4219,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -3975,13 +4365,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3990,10 +4380,10 @@ where call = call.filter(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "compare-duration" => { - call = call.compare_duration(value.unwrap_or("")); + call = call.compare_duration( value.map(|v| arg_from_str(v, err, "compare-duration", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -4069,16 +4459,35 @@ where "access.caller-ip-geo.region-code" => Some(("access.callerIpGeo.regionCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.method-name" => Some(("access.methodName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.principal-email" => Some(("access.principalEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.principal-subject" => Some(("access.principalSubject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.service-account-key-name" => Some(("access.serviceAccountKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.service-name" => Some(("access.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.user-agent-family" => Some(("access.userAgentFamily", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.user-name" => Some(("access.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "canonical-name" => Some(("canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "category" => Some(("category", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.display-name" => Some(("database.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.grantees" => Some(("database.grantees", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "database.name" => Some(("database.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.query" => Some(("database.query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.user-name" => Some(("database.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-time" => Some(("eventTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-uri" => Some(("externalUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "finding-class" => Some(("findingClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "indicator.domains" => Some(("indicator.domains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "indicator.ip-addresses" => Some(("indicator.ipAddresses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "indicator.uris" => Some(("indicator.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "kernel-rootkit.name" => Some(("kernelRootkit.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-code-modification" => Some(("kernelRootkit.unexpectedCodeModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-ftrace-handler" => Some(("kernelRootkit.unexpectedFtraceHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-interrupt-handler" => Some(("kernelRootkit.unexpectedInterruptHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kernel-code-pages" => Some(("kernelRootkit.unexpectedKernelCodePages", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kprobe-handler" => Some(("kernelRootkit.unexpectedKprobeHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-processes-in-runqueue" => Some(("kernelRootkit.unexpectedProcessesInRunqueue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-read-only-data-modification" => Some(("kernelRootkit.unexpectedReadOnlyDataModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-system-call-handler" => Some(("kernelRootkit.unexpectedSystemCallHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "mitre-attack.additional-tactics" => Some(("mitreAttack.additionalTactics", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.additional-techniques" => Some(("mitreAttack.additionalTechniques", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.primary-tactic" => Some(("mitreAttack.primaryTactic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -4088,7 +4497,9 @@ where "mute-initiator" => Some(("muteInitiator", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mute-update-time" => Some(("muteUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "next-steps" => Some(("nextSteps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent-display-name" => Some(("parentDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.canonical-name" => Some(("securityMarks.canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.marks" => Some(("securityMarks.marks", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -4107,7 +4518,7 @@ where "vulnerability.cve.id" => Some(("vulnerability.cve.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cve.upstream-fix-available" => Some(("vulnerability.cve.upstreamFixAvailable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "domains", "event-time", "external-uri", "finding-class", "id", "indicator", "integrity-impact", "ip-addresses", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "parent", "primary-tactic", "primary-techniques", "principal-email", "privileges-required", "region-code", "resource-name", "scope", "security-marks", "service-name", "severity", "state", "upstream-fix-available", "user-agent-family", "user-interaction", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "database", "description", "display-name", "domains", "event-time", "external-uri", "finding-class", "grantees", "id", "indicator", "integrity-impact", "ip-addresses", "kernel-rootkit", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "next-steps", "parent", "parent-display-name", "primary-tactic", "primary-techniques", "principal-email", "principal-subject", "privileges-required", "query", "region-code", "resource-name", "scope", "security-marks", "service-account-key-name", "service-name", "severity", "state", "unexpected-code-modification", "unexpected-ftrace-handler", "unexpected-interrupt-handler", "unexpected-kernel-code-pages", "unexpected-kprobe-handler", "unexpected-processes-in-runqueue", "unexpected-read-only-data-modification", "unexpected-system-call-handler", "upstream-fix-available", "uris", "user-agent-family", "user-interaction", "user-name", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4122,7 +4533,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4384,10 +4795,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -4583,7 +4994,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4675,7 +5086,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -4940,7 +5351,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5086,13 +5497,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -5101,10 +5512,10 @@ where call = call.filter(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "compare-duration" => { - call = call.compare_duration(value.unwrap_or("")); + call = call.compare_duration( value.map(|v| arg_from_str(v, err, "compare-duration", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -5195,10 +5606,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -5457,7 +5868,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5553,7 +5964,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -5897,7 +6308,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5992,7 +6403,356 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_notification_configs_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pubsub-topic" => Some(("pubsubTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "streaming-config.filter" => Some(("streamingConfig.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "filter", "name", "pubsub-topic", "service-account", "streaming-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NotificationConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().notification_configs_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "config-id" => { + call = call.config_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["config-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_notification_configs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().notification_configs_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_notification_configs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().notification_configs_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_notification_configs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().notification_configs_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_notification_configs_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "pubsub-topic" => Some(("pubsubTopic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "service-account" => Some(("serviceAccount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "streaming-config.filter" => Some(("streamingConfig.filter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "filter", "name", "pubsub-topic", "service-account", "streaming-config"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::NotificationConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().notification_configs_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6085,7 +6845,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6231,13 +6991,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -6246,10 +7006,10 @@ where call = call.filter(value.unwrap_or("")); }, "field-mask" => { - call = call.field_mask(value.unwrap_or("")); + call = call.field_mask( value.map(|v| arg_from_str(v, err, "field-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "compare-duration" => { - call = call.compare_duration(value.unwrap_or("")); + call = call.compare_duration( value.map(|v| arg_from_str(v, err, "compare-duration", "google-duration")).unwrap_or(chrono::Duration::seconds(0))); }, _ => { let mut found = false; @@ -6325,16 +7085,35 @@ where "access.caller-ip-geo.region-code" => Some(("access.callerIpGeo.regionCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.method-name" => Some(("access.methodName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.principal-email" => Some(("access.principalEmail", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.principal-subject" => Some(("access.principalSubject", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.service-account-key-name" => Some(("access.serviceAccountKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.service-name" => Some(("access.serviceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "access.user-agent-family" => Some(("access.userAgentFamily", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "access.user-name" => Some(("access.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "canonical-name" => Some(("canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "category" => Some(("category", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.display-name" => Some(("database.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.grantees" => Some(("database.grantees", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "database.name" => Some(("database.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.query" => Some(("database.query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "database.user-name" => Some(("database.userName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "event-time" => Some(("eventTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-uri" => Some(("externalUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "finding-class" => Some(("findingClass", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "indicator.domains" => Some(("indicator.domains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "indicator.ip-addresses" => Some(("indicator.ipAddresses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "indicator.uris" => Some(("indicator.uris", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "kernel-rootkit.name" => Some(("kernelRootkit.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-code-modification" => Some(("kernelRootkit.unexpectedCodeModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-ftrace-handler" => Some(("kernelRootkit.unexpectedFtraceHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-interrupt-handler" => Some(("kernelRootkit.unexpectedInterruptHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kernel-code-pages" => Some(("kernelRootkit.unexpectedKernelCodePages", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-kprobe-handler" => Some(("kernelRootkit.unexpectedKprobeHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-processes-in-runqueue" => Some(("kernelRootkit.unexpectedProcessesInRunqueue", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-read-only-data-modification" => Some(("kernelRootkit.unexpectedReadOnlyDataModification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "kernel-rootkit.unexpected-system-call-handler" => Some(("kernelRootkit.unexpectedSystemCallHandler", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "mitre-attack.additional-tactics" => Some(("mitreAttack.additionalTactics", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.additional-techniques" => Some(("mitreAttack.additionalTechniques", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mitre-attack.primary-tactic" => Some(("mitreAttack.primaryTactic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -6344,7 +7123,9 @@ where "mute-initiator" => Some(("muteInitiator", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "mute-update-time" => Some(("muteUpdateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "next-steps" => Some(("nextSteps", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "parent-display-name" => Some(("parentDisplayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "resource-name" => Some(("resourceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.canonical-name" => Some(("securityMarks.canonicalName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "security-marks.marks" => Some(("securityMarks.marks", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -6363,7 +7144,7 @@ where "vulnerability.cve.id" => Some(("vulnerability.cve.id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vulnerability.cve.upstream-fix-available" => Some(("vulnerability.cve.upstreamFixAvailable", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "domains", "event-time", "external-uri", "finding-class", "id", "indicator", "integrity-impact", "ip-addresses", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "parent", "primary-tactic", "primary-techniques", "principal-email", "privileges-required", "region-code", "resource-name", "scope", "security-marks", "service-name", "severity", "state", "upstream-fix-available", "user-agent-family", "user-interaction", "version", "vulnerability"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access", "additional-tactics", "additional-techniques", "attack-complexity", "attack-vector", "availability-impact", "base-score", "caller-ip", "caller-ip-geo", "canonical-name", "category", "confidentiality-impact", "create-time", "cve", "cvssv3", "database", "description", "display-name", "domains", "event-time", "external-uri", "finding-class", "grantees", "id", "indicator", "integrity-impact", "ip-addresses", "kernel-rootkit", "marks", "method-name", "mitre-attack", "mute", "mute-initiator", "mute-update-time", "name", "next-steps", "parent", "parent-display-name", "primary-tactic", "primary-techniques", "principal-email", "principal-subject", "privileges-required", "query", "region-code", "resource-name", "scope", "security-marks", "service-account-key-name", "service-name", "severity", "state", "unexpected-code-modification", "unexpected-ftrace-handler", "unexpected-interrupt-handler", "unexpected-kernel-code-pages", "unexpected-kprobe-handler", "unexpected-processes-in-runqueue", "unexpected-read-only-data-modification", "unexpected-system-call-handler", "upstream-fix-available", "uris", "user-agent-family", "user-interaction", "user-name", "version", "vulnerability"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6378,7 +7159,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -6640,10 +7421,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "start-time" => { - call = call.start_time(value.unwrap_or("")); + call = call.start_time( value.map(|v| arg_from_str(v, err, "start-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -6702,7 +7483,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6800,6 +7581,21 @@ where ("mute-configs-patch", Some(opt)) => { call_result = self._folders_mute_configs_patch(opt, dry_run, &mut err).await; }, + ("notification-configs-create", Some(opt)) => { + call_result = self._folders_notification_configs_create(opt, dry_run, &mut err).await; + }, + ("notification-configs-delete", Some(opt)) => { + call_result = self._folders_notification_configs_delete(opt, dry_run, &mut err).await; + }, + ("notification-configs-get", Some(opt)) => { + call_result = self._folders_notification_configs_get(opt, dry_run, &mut err).await; + }, + ("notification-configs-list", Some(opt)) => { + call_result = self._folders_notification_configs_list(opt, dry_run, &mut err).await; + }, + ("notification-configs-patch", Some(opt)) => { + call_result = self._folders_notification_configs_patch(opt, dry_run, &mut err).await; + }, ("sources-findings-external-systems-patch", Some(opt)) => { call_result = self._folders_sources_findings_external_systems_patch(opt, dry_run, &mut err).await; }, @@ -7005,6 +7801,21 @@ where ("mute-configs-patch", Some(opt)) => { call_result = self._projects_mute_configs_patch(opt, dry_run, &mut err).await; }, + ("notification-configs-create", Some(opt)) => { + call_result = self._projects_notification_configs_create(opt, dry_run, &mut err).await; + }, + ("notification-configs-delete", Some(opt)) => { + call_result = self._projects_notification_configs_delete(opt, dry_run, &mut err).await; + }, + ("notification-configs-get", Some(opt)) => { + call_result = self._projects_notification_configs_get(opt, dry_run, &mut err).await; + }, + ("notification-configs-list", Some(opt)) => { + call_result = self._projects_notification_configs_list(opt, dry_run, &mut err).await; + }, + ("notification-configs-patch", Some(opt)) => { + call_result = self._projects_notification_configs_patch(opt, dry_run, &mut err).await; + }, ("sources-findings-external-systems-patch", Some(opt)) => { call_result = self._projects_sources_findings_external_systems_patch(opt, dry_run, &mut err).await; }, @@ -7108,14 +7919,14 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("folders", "methods: 'assets-group', 'assets-list', 'assets-update-security-marks', 'big-query-exports-create', 'big-query-exports-delete', 'big-query-exports-get', 'big-query-exports-list', 'big-query-exports-patch', 'findings-bulk-mute', 'mute-configs-create', 'mute-configs-delete', 'mute-configs-get', 'mute-configs-list', 'mute-configs-patch', 'sources-findings-external-systems-patch', 'sources-findings-group', 'sources-findings-list', 'sources-findings-patch', 'sources-findings-set-mute', 'sources-findings-set-state', 'sources-findings-update-security-marks' and 'sources-list'", vec![ + ("folders", "methods: 'assets-group', 'assets-list', 'assets-update-security-marks', 'big-query-exports-create', 'big-query-exports-delete', 'big-query-exports-get', 'big-query-exports-list', 'big-query-exports-patch', 'findings-bulk-mute', 'mute-configs-create', 'mute-configs-delete', 'mute-configs-get', 'mute-configs-list', 'mute-configs-patch', 'notification-configs-create', 'notification-configs-delete', 'notification-configs-get', 'notification-configs-list', 'notification-configs-patch', 'sources-findings-external-systems-patch', 'sources-findings-group', 'sources-findings-list', 'sources-findings-patch', 'sources-findings-set-mute', 'sources-findings-set-state', 'sources-findings-update-security-marks' and 'sources-list'", vec![ ("assets-group", Some(r##"Filters an organization's assets and groups them by their specified properties."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_assets-group", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the organization to groupBy. Its format is "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. The name of the parent to group the assets by. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -7143,7 +7954,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the organization assets should belong to. Its format is "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. The name of the parent resource that contains the assets. The value that you can specify on parent depends on the method in which you specify parent. You can specify one of the following values: "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -7188,12 +7999,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-create", - Some(r##"Creates a big query export."##), + Some(r##"Creates a BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_big-query-exports-create", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the new big query export's parent. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), + Some(r##"Required. The name of the parent resource of the new BigQuery export. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -7216,12 +8027,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-delete", - Some(r##"Deletes an existing big query export."##), + Some(r##"Deletes an existing BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_big-query-exports-delete", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the big query export to delete. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), + Some(r##"Required. The name of the BigQuery export to delete. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), Some(true), Some(false)), @@ -7238,12 +8049,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-get", - Some(r##"Gets a big query export."##), + Some(r##"Gets a BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_big-query-exports-get", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the big query export to retrieve. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), + Some(r##"Required. Name of the BigQuery export to retrieve. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), Some(true), Some(false)), @@ -7453,6 +8264,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-create", + Some(r##"Creates a notification config."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_notification-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Resource name of the new notification config's parent. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-delete", + Some(r##"Deletes a notification config."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_notification-configs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the notification config to delete. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]", "folders/[folder_id]/notificationConfigs/[config_id]", or "projects/[project_id]/notificationConfigs/[config_id]"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-get", + Some(r##"Gets a notification config."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_notification-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the notification config to get. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]", "folders/[folder_id]/notificationConfigs/[config_id]", or "projects/[project_id]/notificationConfigs/[config_id]"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-list", + Some(r##"Lists notification configs."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_notification-configs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The name of the parent in which to list the notification configurations. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-patch", + Some(r##" Updates a notification config. The following update fields are allowed: description, pubsub_topic, streaming_config.filter"##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/folders_notification-configs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"The relative resource name of this notification config. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/notificationConfigs/notify_public_bucket", "folders/{folder_id}/notificationConfigs/notify_public_bucket", or "projects/{project_id}/notificationConfigs/notify_public_bucket"."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -7465,7 +8398,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"External System Name e.g. jira, demisto, etc. e.g.: organizations/1234/sources/5678/findings/123456/externalSystems/jira folders/1234/sources/5678/findings/123456/externalSystems/jira projects/1234/sources/5678/findings/123456/externalSystems/jira"##), + Some(r##"Full resource name of the external system, for example: "organizations/1234/sources/5678/findings/123456/externalSystems/jira", "folders/1234/sources/5678/findings/123456/externalSystems/jira", "projects/1234/sources/5678/findings/123456/externalSystems/jira""##), Some(true), Some(false)), @@ -7571,7 +8504,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The relative resource name of the finding. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}", "folders/{folder_id}/sources/{source_id}/finding/{finding_id}", "projects/{project_id}/sources/{source_id}/finding/{finding_id}"."##), + Some(r##"Required. The [relative resource name](https://cloud.google.com/apis/design/resource_names#relative_resource_name) of the finding. Example: "organizations/{organization_id}/sources/{source_id}/findings/{finding_id}", "folders/{folder_id}/sources/{source_id}/findings/{finding_id}", "projects/{project_id}/sources/{source_id}/findings/{finding_id}"."##), Some(true), Some(false)), @@ -7599,7 +8532,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The relative resource name of the finding. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}"."##), + Some(r##"Required. The [relative resource name](https://cloud.google.com/apis/design/resource_names#relative_resource_name) of the finding. Example: "organizations/{organization_id}/sources/{source_id}/findings/{finding_id}", "folders/{folder_id}/sources/{source_id}/findings/{finding_id}", "projects/{project_id}/sources/{source_id}/findings/{finding_id}"."##), Some(true), Some(false)), @@ -7655,7 +8588,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the parent of sources to list. Its format should be "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. Resource name of the parent of sources to list. Its format should be "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -7680,7 +8613,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the organization to groupBy. Its format is "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. The name of the parent to group the assets by. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -7708,7 +8641,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the organization assets should belong to. Its format is "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. The name of the parent resource that contains the assets. The value that you can specify on parent depends on the method in which you specify parent. You can specify one of the following values: "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -7781,12 +8714,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-create", - Some(r##"Creates a big query export."##), + Some(r##"Creates a BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/organizations_big-query-exports-create", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the new big query export's parent. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), + Some(r##"Required. The name of the parent resource of the new BigQuery export. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -7809,12 +8742,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-delete", - Some(r##"Deletes an existing big query export."##), + Some(r##"Deletes an existing BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/organizations_big-query-exports-delete", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the big query export to delete. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), + Some(r##"Required. The name of the BigQuery export to delete. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), Some(true), Some(false)), @@ -7831,12 +8764,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-get", - Some(r##"Gets a big query export."##), + Some(r##"Gets a BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/organizations_big-query-exports-get", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the big query export to retrieve. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), + Some(r##"Required. Name of the BigQuery export to retrieve. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), Some(true), Some(false)), @@ -8080,7 +9013,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the new notification config's parent. Its format is "organizations/[organization_id]"."##), + Some(r##"Required. Resource name of the new notification config's parent. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -8108,7 +9041,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the notification config to delete. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]"."##), + Some(r##"Required. Name of the notification config to delete. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]", "folders/[folder_id]/notificationConfigs/[config_id]", or "projects/[project_id]/notificationConfigs/[config_id]"."##), Some(true), Some(false)), @@ -8130,7 +9063,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the notification config to get. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]"."##), + Some(r##"Required. Name of the notification config to get. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]", "folders/[folder_id]/notificationConfigs/[config_id]", or "projects/[project_id]/notificationConfigs/[config_id]"."##), Some(true), Some(false)), @@ -8152,7 +9085,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the organization to list notification configs. Its format is "organizations/[organization_id]"."##), + Some(r##"Required. The name of the parent in which to list the notification configurations. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -8174,7 +9107,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"The relative resource name of this notification config. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/notificationConfigs/notify_public_bucket"."##), + Some(r##"The relative resource name of this notification config. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/notificationConfigs/notify_public_bucket", "folders/{folder_id}/notificationConfigs/notify_public_bucket", or "projects/{project_id}/notificationConfigs/notify_public_bucket"."##), Some(true), Some(false)), @@ -8346,7 +9279,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"External System Name e.g. jira, demisto, etc. e.g.: organizations/1234/sources/5678/findings/123456/externalSystems/jira folders/1234/sources/5678/findings/123456/externalSystems/jira projects/1234/sources/5678/findings/123456/externalSystems/jira"##), + Some(r##"Full resource name of the external system, for example: "organizations/1234/sources/5678/findings/123456/externalSystems/jira", "folders/1234/sources/5678/findings/123456/externalSystems/jira", "projects/1234/sources/5678/findings/123456/externalSystems/jira""##), Some(true), Some(false)), @@ -8452,7 +9385,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The relative resource name of the finding. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}", "folders/{folder_id}/sources/{source_id}/finding/{finding_id}", "projects/{project_id}/sources/{source_id}/finding/{finding_id}"."##), + Some(r##"Required. The [relative resource name](https://cloud.google.com/apis/design/resource_names#relative_resource_name) of the finding. Example: "organizations/{organization_id}/sources/{source_id}/findings/{finding_id}", "folders/{folder_id}/sources/{source_id}/findings/{finding_id}", "projects/{project_id}/sources/{source_id}/findings/{finding_id}"."##), Some(true), Some(false)), @@ -8480,7 +9413,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The relative resource name of the finding. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}"."##), + Some(r##"Required. The [relative resource name](https://cloud.google.com/apis/design/resource_names#relative_resource_name) of the finding. Example: "organizations/{organization_id}/sources/{source_id}/findings/{finding_id}", "folders/{folder_id}/sources/{source_id}/findings/{finding_id}", "projects/{project_id}/sources/{source_id}/findings/{finding_id}"."##), Some(true), Some(false)), @@ -8558,7 +9491,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8586,7 +9519,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the parent of sources to list. Its format should be "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. Resource name of the parent of sources to list. Its format should be "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -8636,7 +9569,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8664,7 +9597,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -8716,14 +9649,14 @@ async fn main() { ]), ]), - ("projects", "methods: 'assets-group', 'assets-list', 'assets-update-security-marks', 'big-query-exports-create', 'big-query-exports-delete', 'big-query-exports-get', 'big-query-exports-list', 'big-query-exports-patch', 'findings-bulk-mute', 'mute-configs-create', 'mute-configs-delete', 'mute-configs-get', 'mute-configs-list', 'mute-configs-patch', 'sources-findings-external-systems-patch', 'sources-findings-group', 'sources-findings-list', 'sources-findings-patch', 'sources-findings-set-mute', 'sources-findings-set-state', 'sources-findings-update-security-marks' and 'sources-list'", vec![ + ("projects", "methods: 'assets-group', 'assets-list', 'assets-update-security-marks', 'big-query-exports-create', 'big-query-exports-delete', 'big-query-exports-get', 'big-query-exports-list', 'big-query-exports-patch', 'findings-bulk-mute', 'mute-configs-create', 'mute-configs-delete', 'mute-configs-get', 'mute-configs-list', 'mute-configs-patch', 'notification-configs-create', 'notification-configs-delete', 'notification-configs-get', 'notification-configs-list', 'notification-configs-patch', 'sources-findings-external-systems-patch', 'sources-findings-group', 'sources-findings-list', 'sources-findings-patch', 'sources-findings-set-mute', 'sources-findings-set-state', 'sources-findings-update-security-marks' and 'sources-list'", vec![ ("assets-group", Some(r##"Filters an organization's assets and groups them by their specified properties."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_assets-group", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the organization to groupBy. Its format is "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. The name of the parent to group the assets by. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -8751,7 +9684,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Name of the organization assets should belong to. Its format is "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. The name of the parent resource that contains the assets. The value that you can specify on parent depends on the method in which you specify parent. You can specify one of the following values: "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -8796,12 +9729,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-create", - Some(r##"Creates a big query export."##), + Some(r##"Creates a BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_big-query-exports-create", vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the new big query export's parent. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), + Some(r##"Required. The name of the parent resource of the new BigQuery export. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -8824,12 +9757,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-delete", - Some(r##"Deletes an existing big query export."##), + Some(r##"Deletes an existing BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_big-query-exports-delete", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the big query export to delete. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), + Some(r##"Required. The name of the BigQuery export to delete. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), Some(true), Some(false)), @@ -8846,12 +9779,12 @@ async fn main() { Some(false)), ]), ("big-query-exports-get", - Some(r##"Gets a big query export."##), + Some(r##"Gets a BigQuery export."##), "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_big-query-exports-get", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the big query export to retrieve. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), + Some(r##"Required. Name of the BigQuery export to retrieve. Its format is organizations/{organization}/bigQueryExports/{export_id}, folders/{folder}/bigQueryExports/{export_id}, or projects/{project}/bigQueryExports/{export_id}"##), Some(true), Some(false)), @@ -9061,6 +9994,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-create", + Some(r##"Creates a notification config."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_notification-configs-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Resource name of the new notification config's parent. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-delete", + Some(r##"Deletes a notification config."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_notification-configs-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the notification config to delete. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]", "folders/[folder_id]/notificationConfigs/[config_id]", or "projects/[project_id]/notificationConfigs/[config_id]"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-get", + Some(r##"Gets a notification config."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_notification-configs-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. Name of the notification config to get. Its format is "organizations/[organization_id]/notificationConfigs/[config_id]", "folders/[folder_id]/notificationConfigs/[config_id]", or "projects/[project_id]/notificationConfigs/[config_id]"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-list", + Some(r##"Lists notification configs."##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_notification-configs-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The name of the parent in which to list the notification configurations. Its format is "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("notification-configs-patch", + Some(r##" Updates a notification config. The following update fields are allowed: description, pubsub_topic, streaming_config.filter"##), + "Details at http://byron.github.io/google-apis-rs/google_securitycenter1_cli/projects_notification-configs-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"The relative resource name of this notification config. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/notificationConfigs/notify_public_bucket", "folders/{folder_id}/notificationConfigs/notify_public_bucket", or "projects/{project_id}/notificationConfigs/notify_public_bucket"."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -9073,7 +10128,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"External System Name e.g. jira, demisto, etc. e.g.: organizations/1234/sources/5678/findings/123456/externalSystems/jira folders/1234/sources/5678/findings/123456/externalSystems/jira projects/1234/sources/5678/findings/123456/externalSystems/jira"##), + Some(r##"Full resource name of the external system, for example: "organizations/1234/sources/5678/findings/123456/externalSystems/jira", "folders/1234/sources/5678/findings/123456/externalSystems/jira", "projects/1234/sources/5678/findings/123456/externalSystems/jira""##), Some(true), Some(false)), @@ -9179,7 +10234,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The relative resource name of the finding. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}", "folders/{folder_id}/sources/{source_id}/finding/{finding_id}", "projects/{project_id}/sources/{source_id}/finding/{finding_id}"."##), + Some(r##"Required. The [relative resource name](https://cloud.google.com/apis/design/resource_names#relative_resource_name) of the finding. Example: "organizations/{organization_id}/sources/{source_id}/findings/{finding_id}", "folders/{folder_id}/sources/{source_id}/findings/{finding_id}", "projects/{project_id}/sources/{source_id}/findings/{finding_id}"."##), Some(true), Some(false)), @@ -9207,7 +10262,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The relative resource name of the finding. See: https://cloud.google.com/apis/design/resource_names#relative_resource_name Example: "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}"."##), + Some(r##"Required. The [relative resource name](https://cloud.google.com/apis/design/resource_names#relative_resource_name) of the finding. Example: "organizations/{organization_id}/sources/{source_id}/findings/{finding_id}", "folders/{folder_id}/sources/{source_id}/findings/{finding_id}", "projects/{project_id}/sources/{source_id}/findings/{finding_id}"."##), Some(true), Some(false)), @@ -9263,7 +10318,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. Resource name of the parent of sources to list. Its format should be "organizations/[organization_id], folders/[folder_id], or projects/[project_id]"."##), + Some(r##"Required. Resource name of the parent of sources to list. Its format should be "organizations/[organization_id]", "folders/[folder_id]", or "projects/[project_id]"."##), Some(true), Some(false)), @@ -9285,7 +10340,7 @@ async fn main() { let mut app = App::new("securitycenter1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230123") .about("Security Command Center API provides access to temporal views of assets and findings within an organization.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_securitycenter1_cli") .arg(Arg::with_name("url") diff --git a/gen/securitycenter1/Cargo.toml b/gen/securitycenter1/Cargo.toml index 01ad023f6b..24e4af7e25 100644 --- a/gen/securitycenter1/Cargo.toml +++ b/gen/securitycenter1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-securitycenter1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Security Command Center (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/securitycenter1" homepage = "https://cloud.google.com/security-command-center" -documentation = "https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-securitycenter1/5.0.2+20230123" license = "MIT" keywords = ["securitycenter", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/securitycenter1/README.md b/gen/securitycenter1/README.md index a4c53e833e..a5a454031f 100644 --- a/gen/securitycenter1/README.md +++ b/gen/securitycenter1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-securitycenter1` library allows access to all features of the *Google Security Command Center* service. -This documentation was generated from *Security Command Center* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *securitycenter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Security Command Center* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *securitycenter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Security Command Center* *v1* API can be found at the [official documentation site](https://cloud.google.com/security-command-center). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/SecurityCommandCenter) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/SecurityCommandCenter) ... -* [folders](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::Folder) - * [*assets group*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderAssetGroupCall), [*assets list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderAssetListCall), [*assets update security marks*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderAssetUpdateSecurityMarkCall), [*big query exports create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderBigQueryExportCreateCall), [*big query exports delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderBigQueryExportDeleteCall), [*big query exports get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderBigQueryExportGetCall), [*big query exports list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderBigQueryExportListCall), [*big query exports patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderBigQueryExportPatchCall), [*findings bulk mute*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderFindingBulkMuteCall), [*mute configs create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderMuteConfigCreateCall), [*mute configs delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderMuteConfigDeleteCall), [*mute configs get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderMuteConfigGetCall), [*mute configs list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderMuteConfigListCall), [*mute configs patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderMuteConfigPatchCall), [*notification configs create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderNotificationConfigCreateCall), [*notification configs delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderNotificationConfigDeleteCall), [*notification configs get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderNotificationConfigGetCall), [*notification configs list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderNotificationConfigListCall), [*notification configs patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderNotificationConfigPatchCall), [*sources findings external systems patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceFindingExternalSystemPatchCall), [*sources findings group*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceFindingGroupCall), [*sources findings list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceFindingListCall), [*sources findings patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceFindingPatchCall), [*sources findings set mute*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceFindingSetMuteCall), [*sources findings set state*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceFindingSetStateCall), [*sources findings update security marks*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceFindingUpdateSecurityMarkCall) and [*sources list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::FolderSourceListCall) +* [folders](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::Folder) + * [*assets group*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderAssetGroupCall), [*assets list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderAssetListCall), [*assets update security marks*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderAssetUpdateSecurityMarkCall), [*big query exports create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderBigQueryExportCreateCall), [*big query exports delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderBigQueryExportDeleteCall), [*big query exports get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderBigQueryExportGetCall), [*big query exports list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderBigQueryExportListCall), [*big query exports patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderBigQueryExportPatchCall), [*findings bulk mute*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderFindingBulkMuteCall), [*mute configs create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderMuteConfigCreateCall), [*mute configs delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderMuteConfigDeleteCall), [*mute configs get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderMuteConfigGetCall), [*mute configs list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderMuteConfigListCall), [*mute configs patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderMuteConfigPatchCall), [*notification configs create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderNotificationConfigCreateCall), [*notification configs delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderNotificationConfigDeleteCall), [*notification configs get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderNotificationConfigGetCall), [*notification configs list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderNotificationConfigListCall), [*notification configs patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderNotificationConfigPatchCall), [*sources findings external systems patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceFindingExternalSystemPatchCall), [*sources findings group*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceFindingGroupCall), [*sources findings list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceFindingListCall), [*sources findings patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceFindingPatchCall), [*sources findings set mute*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceFindingSetMuteCall), [*sources findings set state*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceFindingSetStateCall), [*sources findings update security marks*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceFindingUpdateSecurityMarkCall) and [*sources list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::FolderSourceListCall) * organizations - * [*assets group*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationAssetGroupCall), [*assets list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationAssetListCall), [*assets run discovery*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationAssetRunDiscoveryCall), [*assets update security marks*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationAssetUpdateSecurityMarkCall), [*big query exports create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationBigQueryExportCreateCall), [*big query exports delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationBigQueryExportDeleteCall), [*big query exports get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationBigQueryExportGetCall), [*big query exports list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationBigQueryExportListCall), [*big query exports patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationBigQueryExportPatchCall), [*findings bulk mute*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationFindingBulkMuteCall), [*get organization settings*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationGetOrganizationSettingCall), [*mute configs create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationMuteConfigCreateCall), [*mute configs delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationMuteConfigDeleteCall), [*mute configs get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationMuteConfigGetCall), [*mute configs list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationMuteConfigListCall), [*mute configs patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationMuteConfigPatchCall), [*notification configs create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationNotificationConfigCreateCall), [*notification configs delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationNotificationConfigDeleteCall), [*notification configs get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationNotificationConfigGetCall), [*notification configs list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationNotificationConfigListCall), [*notification configs patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationNotificationConfigPatchCall), [*operations cancel*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationOperationCancelCall), [*operations delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationOperationDeleteCall), [*operations get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationOperationGetCall), [*operations list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationOperationListCall), [*sources create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceCreateCall), [*sources findings create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingCreateCall), [*sources findings external systems patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingExternalSystemPatchCall), [*sources findings group*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingGroupCall), [*sources findings list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingListCall), [*sources findings patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingPatchCall), [*sources findings set mute*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingSetMuteCall), [*sources findings set state*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingSetStateCall), [*sources findings update security marks*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceFindingUpdateSecurityMarkCall), [*sources get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceGetCall), [*sources get iam policy*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceGetIamPolicyCall), [*sources list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceListCall), [*sources patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourcePatchCall), [*sources set iam policy*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceSetIamPolicyCall), [*sources test iam permissions*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationSourceTestIamPermissionCall) and [*update organization settings*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::OrganizationUpdateOrganizationSettingCall) + * [*assets group*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationAssetGroupCall), [*assets list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationAssetListCall), [*assets run discovery*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationAssetRunDiscoveryCall), [*assets update security marks*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationAssetUpdateSecurityMarkCall), [*big query exports create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationBigQueryExportCreateCall), [*big query exports delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationBigQueryExportDeleteCall), [*big query exports get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationBigQueryExportGetCall), [*big query exports list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationBigQueryExportListCall), [*big query exports patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationBigQueryExportPatchCall), [*findings bulk mute*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationFindingBulkMuteCall), [*get organization settings*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationGetOrganizationSettingCall), [*mute configs create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationMuteConfigCreateCall), [*mute configs delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationMuteConfigDeleteCall), [*mute configs get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationMuteConfigGetCall), [*mute configs list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationMuteConfigListCall), [*mute configs patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationMuteConfigPatchCall), [*notification configs create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationNotificationConfigCreateCall), [*notification configs delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationNotificationConfigDeleteCall), [*notification configs get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationNotificationConfigGetCall), [*notification configs list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationNotificationConfigListCall), [*notification configs patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationNotificationConfigPatchCall), [*operations cancel*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationOperationCancelCall), [*operations delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationOperationDeleteCall), [*operations get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationOperationGetCall), [*operations list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationOperationListCall), [*sources create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceCreateCall), [*sources findings create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingCreateCall), [*sources findings external systems patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingExternalSystemPatchCall), [*sources findings group*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingGroupCall), [*sources findings list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingListCall), [*sources findings patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingPatchCall), [*sources findings set mute*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingSetMuteCall), [*sources findings set state*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingSetStateCall), [*sources findings update security marks*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceFindingUpdateSecurityMarkCall), [*sources get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceGetCall), [*sources get iam policy*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceGetIamPolicyCall), [*sources list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceListCall), [*sources patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourcePatchCall), [*sources set iam policy*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceSetIamPolicyCall), [*sources test iam permissions*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationSourceTestIamPermissionCall) and [*update organization settings*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::OrganizationUpdateOrganizationSettingCall) * projects - * [*assets group*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectAssetGroupCall), [*assets list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectAssetListCall), [*assets update security marks*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectAssetUpdateSecurityMarkCall), [*big query exports create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectBigQueryExportCreateCall), [*big query exports delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectBigQueryExportDeleteCall), [*big query exports get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectBigQueryExportGetCall), [*big query exports list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectBigQueryExportListCall), [*big query exports patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectBigQueryExportPatchCall), [*findings bulk mute*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectFindingBulkMuteCall), [*mute configs create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectMuteConfigCreateCall), [*mute configs delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectMuteConfigDeleteCall), [*mute configs get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectMuteConfigGetCall), [*mute configs list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectMuteConfigListCall), [*mute configs patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectMuteConfigPatchCall), [*notification configs create*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectNotificationConfigCreateCall), [*notification configs delete*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectNotificationConfigDeleteCall), [*notification configs get*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectNotificationConfigGetCall), [*notification configs list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectNotificationConfigListCall), [*notification configs patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectNotificationConfigPatchCall), [*sources findings external systems patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceFindingExternalSystemPatchCall), [*sources findings group*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceFindingGroupCall), [*sources findings list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceFindingListCall), [*sources findings patch*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceFindingPatchCall), [*sources findings set mute*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceFindingSetMuteCall), [*sources findings set state*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceFindingSetStateCall), [*sources findings update security marks*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceFindingUpdateSecurityMarkCall) and [*sources list*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/api::ProjectSourceListCall) + * [*assets group*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectAssetGroupCall), [*assets list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectAssetListCall), [*assets update security marks*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectAssetUpdateSecurityMarkCall), [*big query exports create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectBigQueryExportCreateCall), [*big query exports delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectBigQueryExportDeleteCall), [*big query exports get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectBigQueryExportGetCall), [*big query exports list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectBigQueryExportListCall), [*big query exports patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectBigQueryExportPatchCall), [*findings bulk mute*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectFindingBulkMuteCall), [*mute configs create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectMuteConfigCreateCall), [*mute configs delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectMuteConfigDeleteCall), [*mute configs get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectMuteConfigGetCall), [*mute configs list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectMuteConfigListCall), [*mute configs patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectMuteConfigPatchCall), [*notification configs create*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectNotificationConfigCreateCall), [*notification configs delete*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectNotificationConfigDeleteCall), [*notification configs get*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectNotificationConfigGetCall), [*notification configs list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectNotificationConfigListCall), [*notification configs patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectNotificationConfigPatchCall), [*sources findings external systems patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceFindingExternalSystemPatchCall), [*sources findings group*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceFindingGroupCall), [*sources findings list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceFindingListCall), [*sources findings patch*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceFindingPatchCall), [*sources findings set mute*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceFindingSetMuteCall), [*sources findings set state*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceFindingSetStateCall), [*sources findings update security marks*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceFindingUpdateSecurityMarkCall) and [*sources list*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/api::ProjectSourceListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/SecurityCommandCenter)** +* **[Hub](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/SecurityCommandCenter)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::CallBuilder) -* **[Resources](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::CallBuilder) +* **[Resources](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Part)** + * **[Parts](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -155,17 +155,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -175,29 +175,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Delegate) to the -[Method Builder](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Delegate) to the +[Method Builder](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::RequestValue) and -[decodable](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::RequestValue) and +[decodable](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-securitycenter1/5.0.2-beta-1+20230123/google_securitycenter1/client::RequestValue) are moved +* [request values](https://docs.rs/google-securitycenter1/5.0.2+20230123/google_securitycenter1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/securitycenter1/src/api.rs b/gen/securitycenter1/src/api.rs index af39c522fc..19df3f57a7 100644 --- a/gen/securitycenter1/src/api.rs +++ b/gen/securitycenter1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> SecurityCommandCenter { SecurityCommandCenter { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://securitycenter.googleapis.com/".to_string(), _root_url: "https://securitycenter.googleapis.com/".to_string(), } @@ -143,7 +143,7 @@ impl<'a, S> SecurityCommandCenter { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/securitycenter1/src/client.rs b/gen/securitycenter1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/securitycenter1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/securitycenter1/src/lib.rs b/gen/securitycenter1/src/lib.rs index aa7fe10ef6..4ef4cd21b0 100644 --- a/gen/securitycenter1/src/lib.rs +++ b/gen/securitycenter1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Security Command Center* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *securitycenter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Security Command Center* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *securitycenter:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Security Command Center* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/security-command-center). diff --git a/gen/servicebroker1-cli/Cargo.toml b/gen/servicebroker1-cli/Cargo.toml index d9cc346739..92a97d1d1c 100644 --- a/gen/servicebroker1-cli/Cargo.toml +++ b/gen/servicebroker1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-servicebroker1-cli" -version = "4.0.1+20190624" +version = "5.0.2+20190624" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Broker (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicebroker1-cli" @@ -20,13 +20,13 @@ name = "servicebroker1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-servicebroker1] path = "../servicebroker1" -version = "4.0.1+20190624" +version = "5.0.2+20190624" + diff --git a/gen/servicebroker1-cli/README.md b/gen/servicebroker1-cli/README.md index a1f15cb0db..0f833011a7 100644 --- a/gen/servicebroker1-cli/README.md +++ b/gen/servicebroker1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Service Broker* API at revision *20190624*. The CLI is at version *4.0.1*. +This documentation was generated from the *Service Broker* API at revision *20190624*. The CLI is at version *5.0.2*. ```bash servicebroker1 [options] diff --git a/gen/servicebroker1-cli/mkdocs.yml b/gen/servicebroker1-cli/mkdocs.yml index d0a32c3ca0..fb7adfb175 100644 --- a/gen/servicebroker1-cli/mkdocs.yml +++ b/gen/servicebroker1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Service Broker v4.0.1+20190624 +site_name: Service Broker v5.0.2+20190624 site_url: http://byron.github.io/google-apis-rs/google-servicebroker1-cli site_description: A complete library to interact with Service Broker (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/servicebroker1-c docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['methods_get-iam-policy.md', 'Methods', 'Get Iam Policy'] -- ['methods_set-iam-policy.md', 'Methods', 'Set Iam Policy'] -- ['methods_test-iam-permissions.md', 'Methods', 'Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Methods': + - 'Get Iam Policy': 'methods_get-iam-policy.md' + - 'Set Iam Policy': 'methods_set-iam-policy.md' + - 'Test Iam Permissions': 'methods_test-iam-permissions.md' theme: readthedocs diff --git a/gen/servicebroker1-cli/src/client.rs b/gen/servicebroker1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/servicebroker1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/servicebroker1-cli/src/main.rs b/gen/servicebroker1-cli/src/main.rs index da9e2e5ffe..d45253ab32 100644 --- a/gen/servicebroker1-cli/src/main.rs +++ b/gen/servicebroker1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_servicebroker1::{api, Error, oauth2}; +use google_servicebroker1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -466,7 +465,7 @@ async fn main() { let mut app = App::new("servicebroker1") .author("Sebastian Thiel ") - .version("4.0.1+20190624") + .version("5.0.2+20190624") .about("The Google Cloud Platform Service Broker API provides Google hosted implementation of the Open Service Broker API (https://www.openservicebrokerapi.org/). diff --git a/gen/servicebroker1/Cargo.toml b/gen/servicebroker1/Cargo.toml index 99656a271b..4ab0b601d3 100644 --- a/gen/servicebroker1/Cargo.toml +++ b/gen/servicebroker1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-servicebroker1" -version = "5.0.2-beta-1+20190624" +version = "5.0.2+20190624" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Broker (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicebroker1" homepage = "https://cloud.google.com/kubernetes-engine/docs/concepts/add-on/service-broker" -documentation = "https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624" +documentation = "https://docs.rs/google-servicebroker1/5.0.2+20190624" license = "MIT" keywords = ["servicebroker", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/servicebroker1/README.md b/gen/servicebroker1/README.md index ecfc8a3072..4f4f93ccba 100644 --- a/gen/servicebroker1/README.md +++ b/gen/servicebroker1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-servicebroker1` library allows access to all features of the *Google Service Broker* service. -This documentation was generated from *Service Broker* crate version *5.0.2-beta-1+20190624*, where *20190624* is the exact revision of the *servicebroker:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Service Broker* crate version *5.0.2+20190624*, where *20190624* is the exact revision of the *servicebroker:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Service Broker* *v1* API can be found at the [official documentation site](https://cloud.google.com/kubernetes-engine/docs/concepts/add-on/service-broker). # Features -Use the following functionality with ease from the central [hub](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/ServiceBroker) ... +Use the following functionality with ease from the central [hub](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/ServiceBroker) ... -* [get iam policy](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/api::MethodGetIamPolicyCall) -* [set iam policy](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/api::MethodSetIamPolicyCall) -* [test iam permissions](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/api::MethodTestIamPermissionCall) +* [get iam policy](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/api::MethodGetIamPolicyCall) +* [set iam policy](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/api::MethodSetIamPolicyCall) +* [test iam permissions](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/api::MethodTestIamPermissionCall) @@ -24,17 +24,17 @@ Use the following functionality with ease from the central [hub](https://docs.rs The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/ServiceBroker)** +* **[Hub](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/ServiceBroker)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::CallBuilder) -* **[Resources](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::CallBuilder) +* **[Resources](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Part)** + * **[Parts](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Delegate) to the -[Method Builder](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Delegate) to the +[Method Builder](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::RequestValue) and -[decodable](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::RequestValue) and +[decodable](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-servicebroker1/5.0.2-beta-1+20190624/google_servicebroker1/client::RequestValue) are moved +* [request values](https://docs.rs/google-servicebroker1/5.0.2+20190624/google_servicebroker1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/servicebroker1/src/api.rs b/gen/servicebroker1/src/api.rs index 2551866ad0..e9f99c820e 100644 --- a/gen/servicebroker1/src/api.rs +++ b/gen/servicebroker1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> ServiceBroker { ServiceBroker { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://servicebroker.googleapis.com/".to_string(), _root_url: "https://servicebroker.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> ServiceBroker { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/servicebroker1/src/client.rs b/gen/servicebroker1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/servicebroker1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/servicebroker1/src/lib.rs b/gen/servicebroker1/src/lib.rs index 5cb6d1118e..2190b62fc1 100644 --- a/gen/servicebroker1/src/lib.rs +++ b/gen/servicebroker1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Service Broker* crate version *5.0.2-beta-1+20190624*, where *20190624* is the exact revision of the *servicebroker:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Service Broker* crate version *5.0.2+20190624*, where *20190624* is the exact revision of the *servicebroker:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Service Broker* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/kubernetes-engine/docs/concepts/add-on/service-broker). diff --git a/gen/servicecontrol1-cli/Cargo.toml b/gen/servicecontrol1-cli/Cargo.toml index ad9ebede06..41ff5db3a5 100644 --- a/gen/servicecontrol1-cli/Cargo.toml +++ b/gen/servicecontrol1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-servicecontrol1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Control (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicecontrol1-cli" @@ -20,13 +20,13 @@ name = "servicecontrol1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-servicecontrol1] path = "../servicecontrol1" -version = "4.0.1+20220227" +version = "5.0.2+20230106" + diff --git a/gen/servicecontrol1-cli/README.md b/gen/servicecontrol1-cli/README.md index 3b289a65c7..4d9e226bed 100644 --- a/gen/servicecontrol1-cli/README.md +++ b/gen/servicecontrol1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Service Control* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Service Control* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash servicecontrol1 [options] diff --git a/gen/servicecontrol1-cli/mkdocs.yml b/gen/servicecontrol1-cli/mkdocs.yml index e24702a0ba..8b2790d800 100644 --- a/gen/servicecontrol1-cli/mkdocs.yml +++ b/gen/servicecontrol1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Service Control v4.0.1+20220227 +site_name: Service Control v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-servicecontrol1-cli site_description: A complete library to interact with Service Control (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/servicecontrol1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['services_allocate-quota.md', 'Services', 'Allocate Quota'] -- ['services_check.md', 'Services', 'Check'] -- ['services_report.md', 'Services', 'Report'] +nav: +- Home: 'index.md' +- 'Services': + - 'Allocate Quota': 'services_allocate-quota.md' + - 'Check': 'services_check.md' + - 'Report': 'services_report.md' theme: readthedocs diff --git a/gen/servicecontrol1-cli/src/client.rs b/gen/servicecontrol1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/servicecontrol1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/servicecontrol1-cli/src/main.rs b/gen/servicecontrol1-cli/src/main.rs index ac1e130298..078541b397 100644 --- a/gen/servicecontrol1-cli/src/main.rs +++ b/gen/servicecontrol1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_servicecontrol1::{api, Error, oauth2}; +use google_servicecontrol1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -508,7 +507,7 @@ async fn main() { let mut app = App::new("servicecontrol1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230106") .about("Provides admission control and telemetry reporting for services integrated with Service Infrastructure. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_servicecontrol1_cli") .arg(Arg::with_name("url") diff --git a/gen/servicecontrol1/Cargo.toml b/gen/servicecontrol1/Cargo.toml index 3e72ba09a3..c3921b5f48 100644 --- a/gen/servicecontrol1/Cargo.toml +++ b/gen/servicecontrol1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-servicecontrol1" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Control (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicecontrol1" homepage = "https://cloud.google.com/service-control/" -documentation = "https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-servicecontrol1/5.0.2+20230106" license = "MIT" keywords = ["servicecontrol", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/servicecontrol1/README.md b/gen/servicecontrol1/README.md index 388d116e37..43238586e4 100644 --- a/gen/servicecontrol1/README.md +++ b/gen/servicecontrol1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-servicecontrol1` library allows access to all features of the *Google Service Control* service. -This documentation was generated from *Service Control* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *servicecontrol:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Service Control* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *servicecontrol:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Service Control* *v1* API can be found at the [official documentation site](https://cloud.google.com/service-control/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/ServiceControl) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/ServiceControl) ... * services - * [*allocate quota*](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/api::ServiceAllocateQuotaCall), [*check*](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/api::ServiceCheckCall) and [*report*](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/api::ServiceReportCall) + * [*allocate quota*](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/api::ServiceAllocateQuotaCall), [*check*](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/api::ServiceCheckCall) and [*report*](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/api::ServiceReportCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/ServiceControl)** +* **[Hub](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/ServiceControl)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::CallBuilder) -* **[Resources](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::CallBuilder) +* **[Resources](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Part)** + * **[Parts](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Delegate) to the -[Method Builder](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Delegate) to the +[Method Builder](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::RequestValue) and -[decodable](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::RequestValue) and +[decodable](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-servicecontrol1/5.0.2-beta-1+20230106/google_servicecontrol1/client::RequestValue) are moved +* [request values](https://docs.rs/google-servicecontrol1/5.0.2+20230106/google_servicecontrol1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/servicecontrol1/src/api.rs b/gen/servicecontrol1/src/api.rs index 567d738796..675b78b900 100644 --- a/gen/servicecontrol1/src/api.rs +++ b/gen/servicecontrol1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> ServiceControl { ServiceControl { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://servicecontrol.googleapis.com/".to_string(), _root_url: "https://servicecontrol.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> ServiceControl { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/servicecontrol1/src/client.rs b/gen/servicecontrol1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/servicecontrol1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/servicecontrol1/src/lib.rs b/gen/servicecontrol1/src/lib.rs index 54c23c2c60..fae986f1bf 100644 --- a/gen/servicecontrol1/src/lib.rs +++ b/gen/servicecontrol1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Service Control* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *servicecontrol:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Service Control* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *servicecontrol:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Service Control* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/service-control/). diff --git a/gen/servicecontrol2-cli/Cargo.toml b/gen/servicecontrol2-cli/Cargo.toml index dbc8cd2412..e3a9261640 100644 --- a/gen/servicecontrol2-cli/Cargo.toml +++ b/gen/servicecontrol2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-servicecontrol2-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Control (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicecontrol2-cli" @@ -20,13 +20,13 @@ name = "servicecontrol2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-servicecontrol2] path = "../servicecontrol2" -version = "4.0.1+20220227" +version = "5.0.2+20230106" + diff --git a/gen/servicecontrol2-cli/README.md b/gen/servicecontrol2-cli/README.md index 6edccd0299..932e42ed28 100644 --- a/gen/servicecontrol2-cli/README.md +++ b/gen/servicecontrol2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Service Control* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Service Control* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash servicecontrol2 [options] diff --git a/gen/servicecontrol2-cli/mkdocs.yml b/gen/servicecontrol2-cli/mkdocs.yml index 8a3b9bc291..8bd7ccc928 100644 --- a/gen/servicecontrol2-cli/mkdocs.yml +++ b/gen/servicecontrol2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Service Control v4.0.1+20220227 +site_name: Service Control v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-servicecontrol2-cli site_description: A complete library to interact with Service Control (protocol v2) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/servicecontrol2- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['services_check.md', 'Services', 'Check'] -- ['services_report.md', 'Services', 'Report'] +nav: +- Home: 'index.md' +- 'Services': + - 'Check': 'services_check.md' + - 'Report': 'services_report.md' theme: readthedocs diff --git a/gen/servicecontrol2-cli/src/client.rs b/gen/servicecontrol2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/servicecontrol2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/servicecontrol2-cli/src/main.rs b/gen/servicecontrol2-cli/src/main.rs index e32e61cdb6..2d57ccace9 100644 --- a/gen/servicecontrol2-cli/src/main.rs +++ b/gen/servicecontrol2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_servicecontrol2::{api, Error, oauth2}; +use google_servicecontrol2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -367,7 +366,7 @@ async fn main() { let arg_data = [ ("services", "methods: 'check' and 'report'", vec![ ("check", - Some(r##"Private Preview. This feature is only available for approved services. This method provides admission control for services that are integrated with [Service Infrastructure](/service-infrastructure). It checks whether an operation should be allowed based on the service configuration and relevant policies. It must be called before the operation is executed. For more information, see [Admission Control](/service-infrastructure/docs/admission-control). NOTE: The admission control has an expected policy propagation delay of 60s. The caller **must** not depend on the most recent policy changes. NOTE: The admission control has a hard limit of 1 referenced resources per call. If an operation refers to more than 1 resources, the caller must call the Check method multiple times. This method requires the `servicemanagement.services.check` permission on the specified service. For more information, see [Service Control API Access Control](https://cloud.google.com/service-infrastructure/docs/service-control/access-control)."##), + Some(r##"Private Preview. This feature is only available for approved services. This method provides admission control for services that are integrated with [Service Infrastructure](https://cloud.google.com/service-infrastructure). It checks whether an operation should be allowed based on the service configuration and relevant policies. It must be called before the operation is executed. For more information, see [Admission Control](https://cloud.google.com/service-infrastructure/docs/admission-control). NOTE: The admission control has an expected policy propagation delay of 60s. The caller **must** not depend on the most recent policy changes. NOTE: The admission control has a hard limit of 1 referenced resources per call. If an operation refers to more than 1 resources, the caller must call the Check method multiple times. This method requires the `servicemanagement.services.check` permission on the specified service. For more information, see [Service Control API Access Control](https://cloud.google.com/service-infrastructure/docs/service-control/access-control)."##), "Details at http://byron.github.io/google-apis-rs/google_servicecontrol2_cli/services_check", vec![ (Some(r##"service-name"##), @@ -395,7 +394,7 @@ async fn main() { Some(false)), ]), ("report", - Some(r##"Private Preview. This feature is only available for approved services. This method provides telemetry reporting for services that are integrated with [Service Infrastructure](/service-infrastructure). It reports a list of operations that have occurred on a service. It must be called after the operations have been executed. For more information, see [Telemetry Reporting](/service-infrastructure/docs/telemetry-reporting). NOTE: The telemetry reporting has a hard limit of 1000 operations and 1MB per Report call. It is recommended to have no more than 100 operations per call. This method requires the `servicemanagement.services.report` permission on the specified service. For more information, see [Service Control API Access Control](https://cloud.google.com/service-infrastructure/docs/service-control/access-control)."##), + Some(r##"Private Preview. This feature is only available for approved services. This method provides telemetry reporting for services that are integrated with [Service Infrastructure](https://cloud.google.com/service-infrastructure). It reports a list of operations that have occurred on a service. It must be called after the operations have been executed. For more information, see [Telemetry Reporting](https://cloud.google.com/service-infrastructure/docs/telemetry-reporting). NOTE: The telemetry reporting has a hard limit of 1000 operations and 1MB per Report call. It is recommended to have no more than 100 operations per call. This method requires the `servicemanagement.services.report` permission on the specified service. For more information, see [Service Control API Access Control](https://cloud.google.com/service-infrastructure/docs/service-control/access-control)."##), "Details at http://byron.github.io/google-apis-rs/google_servicecontrol2_cli/services_report", vec![ (Some(r##"service-name"##), @@ -428,7 +427,7 @@ async fn main() { let mut app = App::new("servicecontrol2") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230106") .about("Provides admission control and telemetry reporting for services integrated with Service Infrastructure. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_servicecontrol2_cli") .arg(Arg::with_name("url") diff --git a/gen/servicecontrol2/Cargo.toml b/gen/servicecontrol2/Cargo.toml index b2d18f79e4..8d2628a431 100644 --- a/gen/servicecontrol2/Cargo.toml +++ b/gen/servicecontrol2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-servicecontrol2" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Control (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicecontrol2" homepage = "https://cloud.google.com/service-control/" -documentation = "https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-servicecontrol2/5.0.2+20230106" license = "MIT" keywords = ["servicecontrol", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/servicecontrol2/README.md b/gen/servicecontrol2/README.md index 5d3bed5b80..e52ed932a8 100644 --- a/gen/servicecontrol2/README.md +++ b/gen/servicecontrol2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-servicecontrol2` library allows access to all features of the *Google Service Control* service. -This documentation was generated from *Service Control* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *servicecontrol:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Service Control* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *servicecontrol:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Service Control* *v2* API can be found at the [official documentation site](https://cloud.google.com/service-control/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/ServiceControl) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/ServiceControl) ... * services - * [*check*](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/api::ServiceCheckCall) and [*report*](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/api::ServiceReportCall) + * [*check*](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/api::ServiceCheckCall) and [*report*](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/api::ServiceReportCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/ServiceControl)** +* **[Hub](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/ServiceControl)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::CallBuilder) -* **[Resources](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::CallBuilder) +* **[Resources](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Part)** + * **[Parts](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Delegate) to the -[Method Builder](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Delegate) to the +[Method Builder](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::RequestValue) and -[decodable](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::RequestValue) and +[decodable](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-servicecontrol2/5.0.2-beta-1+20230106/google_servicecontrol2/client::RequestValue) are moved +* [request values](https://docs.rs/google-servicecontrol2/5.0.2+20230106/google_servicecontrol2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/servicecontrol2/src/api.rs b/gen/servicecontrol2/src/api.rs index 61cf8f571c..f296fdd396 100644 --- a/gen/servicecontrol2/src/api.rs +++ b/gen/servicecontrol2/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> ServiceControl { ServiceControl { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://servicecontrol.googleapis.com/".to_string(), _root_url: "https://servicecontrol.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> ServiceControl { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/servicecontrol2/src/client.rs b/gen/servicecontrol2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/servicecontrol2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/servicecontrol2/src/lib.rs b/gen/servicecontrol2/src/lib.rs index 7335affa6f..b46040f1bf 100644 --- a/gen/servicecontrol2/src/lib.rs +++ b/gen/servicecontrol2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Service Control* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *servicecontrol:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Service Control* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *servicecontrol:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Service Control* *v2* API can be found at the //! [official documentation site](https://cloud.google.com/service-control/). diff --git a/gen/servicedirectory1-cli/Cargo.toml b/gen/servicedirectory1-cli/Cargo.toml index 4f3f32375e..65bfcee186 100644 --- a/gen/servicedirectory1-cli/Cargo.toml +++ b/gen/servicedirectory1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-servicedirectory1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Directory (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicedirectory1-cli" @@ -20,13 +20,13 @@ name = "servicedirectory1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-servicedirectory1] path = "../servicedirectory1" -version = "4.0.1+20220224" +version = "5.0.2+20230105" + diff --git a/gen/servicedirectory1-cli/README.md b/gen/servicedirectory1-cli/README.md index 09dbdd3f05..6656fa6588 100644 --- a/gen/servicedirectory1-cli/README.md +++ b/gen/servicedirectory1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Service Directory* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Service Directory* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash servicedirectory1 [options] diff --git a/gen/servicedirectory1-cli/mkdocs.yml b/gen/servicedirectory1-cli/mkdocs.yml index cc5f439493..d31edccc43 100644 --- a/gen/servicedirectory1-cli/mkdocs.yml +++ b/gen/servicedirectory1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Service Directory v4.0.1+20220224 +site_name: Service Directory v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-servicedirectory1-cli site_description: A complete library to interact with Service Directory (protocol v1) @@ -7,32 +7,33 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/servicedirectory docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-namespaces-create.md', 'Projects', 'Locations Namespaces Create'] -- ['projects_locations-namespaces-delete.md', 'Projects', 'Locations Namespaces Delete'] -- ['projects_locations-namespaces-get.md', 'Projects', 'Locations Namespaces Get'] -- ['projects_locations-namespaces-get-iam-policy.md', 'Projects', 'Locations Namespaces Get Iam Policy'] -- ['projects_locations-namespaces-list.md', 'Projects', 'Locations Namespaces List'] -- ['projects_locations-namespaces-patch.md', 'Projects', 'Locations Namespaces Patch'] -- ['projects_locations-namespaces-services-create.md', 'Projects', 'Locations Namespaces Services Create'] -- ['projects_locations-namespaces-services-delete.md', 'Projects', 'Locations Namespaces Services Delete'] -- ['projects_locations-namespaces-services-endpoints-create.md', 'Projects', 'Locations Namespaces Services Endpoints Create'] -- ['projects_locations-namespaces-services-endpoints-delete.md', 'Projects', 'Locations Namespaces Services Endpoints Delete'] -- ['projects_locations-namespaces-services-endpoints-get.md', 'Projects', 'Locations Namespaces Services Endpoints Get'] -- ['projects_locations-namespaces-services-endpoints-list.md', 'Projects', 'Locations Namespaces Services Endpoints List'] -- ['projects_locations-namespaces-services-endpoints-patch.md', 'Projects', 'Locations Namespaces Services Endpoints Patch'] -- ['projects_locations-namespaces-services-get.md', 'Projects', 'Locations Namespaces Services Get'] -- ['projects_locations-namespaces-services-get-iam-policy.md', 'Projects', 'Locations Namespaces Services Get Iam Policy'] -- ['projects_locations-namespaces-services-list.md', 'Projects', 'Locations Namespaces Services List'] -- ['projects_locations-namespaces-services-patch.md', 'Projects', 'Locations Namespaces Services Patch'] -- ['projects_locations-namespaces-services-resolve.md', 'Projects', 'Locations Namespaces Services Resolve'] -- ['projects_locations-namespaces-services-set-iam-policy.md', 'Projects', 'Locations Namespaces Services Set Iam Policy'] -- ['projects_locations-namespaces-services-test-iam-permissions.md', 'Projects', 'Locations Namespaces Services Test Iam Permissions'] -- ['projects_locations-namespaces-set-iam-policy.md', 'Projects', 'Locations Namespaces Set Iam Policy'] -- ['projects_locations-namespaces-test-iam-permissions.md', 'Projects', 'Locations Namespaces Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Namespaces Create': 'projects_locations-namespaces-create.md' + - 'Locations Namespaces Delete': 'projects_locations-namespaces-delete.md' + - 'Locations Namespaces Get': 'projects_locations-namespaces-get.md' + - 'Locations Namespaces Get Iam Policy': 'projects_locations-namespaces-get-iam-policy.md' + - 'Locations Namespaces List': 'projects_locations-namespaces-list.md' + - 'Locations Namespaces Patch': 'projects_locations-namespaces-patch.md' + - 'Locations Namespaces Services Create': 'projects_locations-namespaces-services-create.md' + - 'Locations Namespaces Services Delete': 'projects_locations-namespaces-services-delete.md' + - 'Locations Namespaces Services Endpoints Create': 'projects_locations-namespaces-services-endpoints-create.md' + - 'Locations Namespaces Services Endpoints Delete': 'projects_locations-namespaces-services-endpoints-delete.md' + - 'Locations Namespaces Services Endpoints Get': 'projects_locations-namespaces-services-endpoints-get.md' + - 'Locations Namespaces Services Endpoints List': 'projects_locations-namespaces-services-endpoints-list.md' + - 'Locations Namespaces Services Endpoints Patch': 'projects_locations-namespaces-services-endpoints-patch.md' + - 'Locations Namespaces Services Get': 'projects_locations-namespaces-services-get.md' + - 'Locations Namespaces Services Get Iam Policy': 'projects_locations-namespaces-services-get-iam-policy.md' + - 'Locations Namespaces Services List': 'projects_locations-namespaces-services-list.md' + - 'Locations Namespaces Services Patch': 'projects_locations-namespaces-services-patch.md' + - 'Locations Namespaces Services Resolve': 'projects_locations-namespaces-services-resolve.md' + - 'Locations Namespaces Services Set Iam Policy': 'projects_locations-namespaces-services-set-iam-policy.md' + - 'Locations Namespaces Services Test Iam Permissions': 'projects_locations-namespaces-services-test-iam-permissions.md' + - 'Locations Namespaces Set Iam Policy': 'projects_locations-namespaces-set-iam-policy.md' + - 'Locations Namespaces Test Iam Permissions': 'projects_locations-namespaces-test-iam-permissions.md' theme: readthedocs diff --git a/gen/servicedirectory1-cli/src/client.rs b/gen/servicedirectory1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/servicedirectory1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/servicedirectory1-cli/src/main.rs b/gen/servicedirectory1-cli/src/main.rs index 50e0d3c5f2..22f340cd18 100644 --- a/gen/servicedirectory1-cli/src/main.rs +++ b/gen/servicedirectory1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_servicedirectory1::{api, Error, oauth2}; +use google_servicedirectory1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -190,8 +189,9 @@ where match &temp_cursor.to_string()[..] { "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["labels", "name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["labels", "name", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -454,7 +454,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -534,8 +534,9 @@ where match &temp_cursor.to_string()[..] { "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["labels", "name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["labels", "name", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -550,7 +551,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -624,8 +625,9 @@ where match &temp_cursor.to_string()[..] { "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "name", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -769,8 +771,9 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "port" => Some(("port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "annotations", "name", "network", "port"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "annotations", "name", "network", "port", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -948,7 +951,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1031,8 +1034,9 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "network" => Some(("network", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "port" => Some(("port", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "annotations", "name", "network", "port"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["address", "annotations", "name", "network", "port", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1047,7 +1051,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1243,7 +1247,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1323,8 +1327,9 @@ where match &temp_cursor.to_string()[..] { "annotations" => Some(("annotations", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uid" => Some(("uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "name"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["annotations", "name", "uid"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1339,7 +1344,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2097,7 +2102,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2369,7 +2374,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2475,7 +2480,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2503,7 +2508,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2531,7 +2536,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2559,7 +2564,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2587,7 +2592,7 @@ async fn main() { let mut app = App::new("servicedirectory1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230105") .about("Service Directory is a platform for discovering, publishing, and connecting services. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_servicedirectory1_cli") .arg(Arg::with_name("url") diff --git a/gen/servicedirectory1/Cargo.toml b/gen/servicedirectory1/Cargo.toml index 75d912b0b2..2daaa5138c 100644 --- a/gen/servicedirectory1/Cargo.toml +++ b/gen/servicedirectory1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-servicedirectory1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Directory (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicedirectory1" homepage = "https://cloud.google.com/service-directory" -documentation = "https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-servicedirectory1/5.0.2+20230105" license = "MIT" keywords = ["servicedirectory", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/servicedirectory1/README.md b/gen/servicedirectory1/README.md index 2d6dc09952..c93408e261 100644 --- a/gen/servicedirectory1/README.md +++ b/gen/servicedirectory1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-servicedirectory1` library allows access to all features of the *Google Service Directory* service. -This documentation was generated from *Service Directory* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Service Directory* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Service Directory* *v1* API can be found at the [official documentation site](https://cloud.google.com/service-directory). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/ServiceDirectory) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/ServiceDirectory) ... * projects - * [*locations get*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationListCall), [*locations namespaces create*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceCreateCall), [*locations namespaces delete*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceDeleteCall), [*locations namespaces get*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceGetCall), [*locations namespaces get iam policy*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceGetIamPolicyCall), [*locations namespaces list*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceListCall), [*locations namespaces patch*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespacePatchCall), [*locations namespaces services create*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceCreateCall), [*locations namespaces services delete*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceDeleteCall), [*locations namespaces services endpoints create*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointCreateCall), [*locations namespaces services endpoints delete*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointDeleteCall), [*locations namespaces services endpoints get*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointGetCall), [*locations namespaces services endpoints list*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointListCall), [*locations namespaces services endpoints patch*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointPatchCall), [*locations namespaces services get*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceGetCall), [*locations namespaces services get iam policy*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceGetIamPolicyCall), [*locations namespaces services list*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceListCall), [*locations namespaces services patch*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServicePatchCall), [*locations namespaces services resolve*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceResolveCall), [*locations namespaces services set iam policy*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceSetIamPolicyCall), [*locations namespaces services test iam permissions*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceTestIamPermissionCall), [*locations namespaces set iam policy*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceSetIamPolicyCall) and [*locations namespaces test iam permissions*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/api::ProjectLocationNamespaceTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationListCall), [*locations namespaces create*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceCreateCall), [*locations namespaces delete*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceDeleteCall), [*locations namespaces get*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceGetCall), [*locations namespaces get iam policy*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceGetIamPolicyCall), [*locations namespaces list*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceListCall), [*locations namespaces patch*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespacePatchCall), [*locations namespaces services create*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceCreateCall), [*locations namespaces services delete*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceDeleteCall), [*locations namespaces services endpoints create*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointCreateCall), [*locations namespaces services endpoints delete*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointDeleteCall), [*locations namespaces services endpoints get*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointGetCall), [*locations namespaces services endpoints list*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointListCall), [*locations namespaces services endpoints patch*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceEndpointPatchCall), [*locations namespaces services get*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceGetCall), [*locations namespaces services get iam policy*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceGetIamPolicyCall), [*locations namespaces services list*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceListCall), [*locations namespaces services patch*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServicePatchCall), [*locations namespaces services resolve*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceResolveCall), [*locations namespaces services set iam policy*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceSetIamPolicyCall), [*locations namespaces services test iam permissions*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceServiceTestIamPermissionCall), [*locations namespaces set iam policy*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceSetIamPolicyCall) and [*locations namespaces test iam permissions*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/api::ProjectLocationNamespaceTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/ServiceDirectory)** +* **[Hub](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/ServiceDirectory)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::CallBuilder) -* **[Resources](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::CallBuilder) +* **[Resources](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Part)** + * **[Parts](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Delegate) to the -[Method Builder](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Delegate) to the +[Method Builder](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::RequestValue) and -[decodable](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::RequestValue) and +[decodable](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-servicedirectory1/5.0.2-beta-1+20230105/google_servicedirectory1/client::RequestValue) are moved +* [request values](https://docs.rs/google-servicedirectory1/5.0.2+20230105/google_servicedirectory1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/servicedirectory1/src/api.rs b/gen/servicedirectory1/src/api.rs index ddf60f22c9..d64e366b4f 100644 --- a/gen/servicedirectory1/src/api.rs +++ b/gen/servicedirectory1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> ServiceDirectory { ServiceDirectory { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://servicedirectory.googleapis.com/".to_string(), _root_url: "https://servicedirectory.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> ServiceDirectory { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/servicedirectory1/src/client.rs b/gen/servicedirectory1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/servicedirectory1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/servicedirectory1/src/lib.rs b/gen/servicedirectory1/src/lib.rs index 06482bee2d..0febdd97c0 100644 --- a/gen/servicedirectory1/src/lib.rs +++ b/gen/servicedirectory1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Service Directory* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Service Directory* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Service Directory* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/service-directory). diff --git a/gen/servicedirectory1_beta1-cli/Cargo.toml b/gen/servicedirectory1_beta1-cli/Cargo.toml index f0d095f741..8097798dce 100644 --- a/gen/servicedirectory1_beta1-cli/Cargo.toml +++ b/gen/servicedirectory1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-servicedirectory1_beta1-cli" -version = "4.0.1+20220224" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Directory (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicedirectory1_beta1-cli" @@ -20,13 +20,13 @@ name = "servicedirectory1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-servicedirectory1_beta1] path = "../servicedirectory1_beta1" -version = "4.0.1+20220224" +version = "5.0.2+20230105" + diff --git a/gen/servicedirectory1_beta1-cli/README.md b/gen/servicedirectory1_beta1-cli/README.md index ac1bb37c2c..6e181aaa3b 100644 --- a/gen/servicedirectory1_beta1-cli/README.md +++ b/gen/servicedirectory1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Service Directory* API at revision *20220224*. The CLI is at version *4.0.1*. +This documentation was generated from the *Service Directory* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash servicedirectory1-beta1 [options] @@ -38,6 +38,9 @@ servicedirectory1-beta1 [options] locations-namespaces-get-iam-policy (-r )... [-p ]... [-o ] locations-namespaces-list [-p ]... [-o ] locations-namespaces-patch (-r )... [-p ]... [-o ] + locations-namespaces-service-workloads-get-iam-policy (-r )... [-p ]... [-o ] + locations-namespaces-service-workloads-set-iam-policy (-r )... [-p ]... [-o ] + locations-namespaces-service-workloads-test-iam-permissions (-r )... [-p ]... [-o ] locations-namespaces-services-create (-r )... [-p ]... [-o ] locations-namespaces-services-delete [-p ]... [-o ] locations-namespaces-services-endpoints-create (-r )... [-p ]... [-o ] @@ -54,6 +57,9 @@ servicedirectory1-beta1 [options] locations-namespaces-services-test-iam-permissions (-r )... [-p ]... [-o ] locations-namespaces-set-iam-policy (-r )... [-p ]... [-o ] locations-namespaces-test-iam-permissions (-r )... [-p ]... [-o ] + locations-registration-policies-get-iam-policy (-r )... [-p ]... [-o ] + locations-registration-policies-set-iam-policy (-r )... [-p ]... [-o ] + locations-registration-policies-test-iam-permissions (-r )... [-p ]... [-o ] servicedirectory1-beta1 --help Configuration: diff --git a/gen/servicedirectory1_beta1-cli/mkdocs.yml b/gen/servicedirectory1_beta1-cli/mkdocs.yml index 8227523e2b..dd5107ae86 100644 --- a/gen/servicedirectory1_beta1-cli/mkdocs.yml +++ b/gen/servicedirectory1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Service Directory v4.0.1+20220224 +site_name: Service Directory v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-servicedirectory1_beta1-cli site_description: A complete library to interact with Service Directory (protocol v1beta1) @@ -7,32 +7,39 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/servicedirectory docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-namespaces-create.md', 'Projects', 'Locations Namespaces Create'] -- ['projects_locations-namespaces-delete.md', 'Projects', 'Locations Namespaces Delete'] -- ['projects_locations-namespaces-get.md', 'Projects', 'Locations Namespaces Get'] -- ['projects_locations-namespaces-get-iam-policy.md', 'Projects', 'Locations Namespaces Get Iam Policy'] -- ['projects_locations-namespaces-list.md', 'Projects', 'Locations Namespaces List'] -- ['projects_locations-namespaces-patch.md', 'Projects', 'Locations Namespaces Patch'] -- ['projects_locations-namespaces-services-create.md', 'Projects', 'Locations Namespaces Services Create'] -- ['projects_locations-namespaces-services-delete.md', 'Projects', 'Locations Namespaces Services Delete'] -- ['projects_locations-namespaces-services-endpoints-create.md', 'Projects', 'Locations Namespaces Services Endpoints Create'] -- ['projects_locations-namespaces-services-endpoints-delete.md', 'Projects', 'Locations Namespaces Services Endpoints Delete'] -- ['projects_locations-namespaces-services-endpoints-get.md', 'Projects', 'Locations Namespaces Services Endpoints Get'] -- ['projects_locations-namespaces-services-endpoints-list.md', 'Projects', 'Locations Namespaces Services Endpoints List'] -- ['projects_locations-namespaces-services-endpoints-patch.md', 'Projects', 'Locations Namespaces Services Endpoints Patch'] -- ['projects_locations-namespaces-services-get.md', 'Projects', 'Locations Namespaces Services Get'] -- ['projects_locations-namespaces-services-get-iam-policy.md', 'Projects', 'Locations Namespaces Services Get Iam Policy'] -- ['projects_locations-namespaces-services-list.md', 'Projects', 'Locations Namespaces Services List'] -- ['projects_locations-namespaces-services-patch.md', 'Projects', 'Locations Namespaces Services Patch'] -- ['projects_locations-namespaces-services-resolve.md', 'Projects', 'Locations Namespaces Services Resolve'] -- ['projects_locations-namespaces-services-set-iam-policy.md', 'Projects', 'Locations Namespaces Services Set Iam Policy'] -- ['projects_locations-namespaces-services-test-iam-permissions.md', 'Projects', 'Locations Namespaces Services Test Iam Permissions'] -- ['projects_locations-namespaces-set-iam-policy.md', 'Projects', 'Locations Namespaces Set Iam Policy'] -- ['projects_locations-namespaces-test-iam-permissions.md', 'Projects', 'Locations Namespaces Test Iam Permissions'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Namespaces Create': 'projects_locations-namespaces-create.md' + - 'Locations Namespaces Delete': 'projects_locations-namespaces-delete.md' + - 'Locations Namespaces Get': 'projects_locations-namespaces-get.md' + - 'Locations Namespaces Get Iam Policy': 'projects_locations-namespaces-get-iam-policy.md' + - 'Locations Namespaces List': 'projects_locations-namespaces-list.md' + - 'Locations Namespaces Patch': 'projects_locations-namespaces-patch.md' + - 'Locations Namespaces Service Workloads Get Iam Policy': 'projects_locations-namespaces-service-workloads-get-iam-policy.md' + - 'Locations Namespaces Service Workloads Set Iam Policy': 'projects_locations-namespaces-service-workloads-set-iam-policy.md' + - 'Locations Namespaces Service Workloads Test Iam Permissions': 'projects_locations-namespaces-service-workloads-test-iam-permissions.md' + - 'Locations Namespaces Services Create': 'projects_locations-namespaces-services-create.md' + - 'Locations Namespaces Services Delete': 'projects_locations-namespaces-services-delete.md' + - 'Locations Namespaces Services Endpoints Create': 'projects_locations-namespaces-services-endpoints-create.md' + - 'Locations Namespaces Services Endpoints Delete': 'projects_locations-namespaces-services-endpoints-delete.md' + - 'Locations Namespaces Services Endpoints Get': 'projects_locations-namespaces-services-endpoints-get.md' + - 'Locations Namespaces Services Endpoints List': 'projects_locations-namespaces-services-endpoints-list.md' + - 'Locations Namespaces Services Endpoints Patch': 'projects_locations-namespaces-services-endpoints-patch.md' + - 'Locations Namespaces Services Get': 'projects_locations-namespaces-services-get.md' + - 'Locations Namespaces Services Get Iam Policy': 'projects_locations-namespaces-services-get-iam-policy.md' + - 'Locations Namespaces Services List': 'projects_locations-namespaces-services-list.md' + - 'Locations Namespaces Services Patch': 'projects_locations-namespaces-services-patch.md' + - 'Locations Namespaces Services Resolve': 'projects_locations-namespaces-services-resolve.md' + - 'Locations Namespaces Services Set Iam Policy': 'projects_locations-namespaces-services-set-iam-policy.md' + - 'Locations Namespaces Services Test Iam Permissions': 'projects_locations-namespaces-services-test-iam-permissions.md' + - 'Locations Namespaces Set Iam Policy': 'projects_locations-namespaces-set-iam-policy.md' + - 'Locations Namespaces Test Iam Permissions': 'projects_locations-namespaces-test-iam-permissions.md' + - 'Locations Registration Policies Get Iam Policy': 'projects_locations-registration-policies-get-iam-policy.md' + - 'Locations Registration Policies Set Iam Policy': 'projects_locations-registration-policies-set-iam-policy.md' + - 'Locations Registration Policies Test Iam Permissions': 'projects_locations-registration-policies-test-iam-permissions.md' theme: readthedocs diff --git a/gen/servicedirectory1_beta1-cli/src/client.rs b/gen/servicedirectory1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/servicedirectory1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/servicedirectory1_beta1-cli/src/main.rs b/gen/servicedirectory1_beta1-cli/src/main.rs index 0e36536912..648531b77c 100644 --- a/gen/servicedirectory1_beta1-cli/src/main.rs +++ b/gen/servicedirectory1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_servicedirectory1_beta1::{api, Error, oauth2}; +use google_servicedirectory1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -456,7 +455,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -554,7 +553,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -603,6 +602,262 @@ where } } + async fn _projects_locations_namespaces_service_workloads_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "options.requested-policy-version" => Some(("options.requestedPolicyVersion", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["options", "requested-policy-version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_namespaces_service_workloads_get_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_namespaces_service_workloads_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_namespaces_service_workloads_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_namespaces_service_workloads_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_namespaces_service_workloads_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_namespaces_services_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -956,7 +1211,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1057,7 +1312,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1253,7 +1508,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1351,7 +1606,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1828,6 +2083,262 @@ where } } + async fn _projects_locations_registration_policies_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "options.requested-policy-version" => Some(("options.requestedPolicyVersion", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["options", "requested-policy-version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_registration_policies_get_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_registration_policies_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "version"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_registration_policies_set_iam_policy(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_registration_policies_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_registration_policies_test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -1859,6 +2370,15 @@ where ("locations-namespaces-patch", Some(opt)) => { call_result = self._projects_locations_namespaces_patch(opt, dry_run, &mut err).await; }, + ("locations-namespaces-service-workloads-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_namespaces_service_workloads_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-namespaces-service-workloads-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_namespaces_service_workloads_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-namespaces-service-workloads-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_namespaces_service_workloads_test_iam_permissions(opt, dry_run, &mut err).await; + }, ("locations-namespaces-services-create", Some(opt)) => { call_result = self._projects_locations_namespaces_services_create(opt, dry_run, &mut err).await; }, @@ -1907,6 +2427,15 @@ where ("locations-namespaces-test-iam-permissions", Some(opt)) => { call_result = self._projects_locations_namespaces_test_iam_permissions(opt, dry_run, &mut err).await; }, + ("locations-registration-policies-get-iam-policy", Some(opt)) => { + call_result = self._projects_locations_registration_policies_get_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-registration-policies-set-iam-policy", Some(opt)) => { + call_result = self._projects_locations_registration_policies_set_iam_policy(opt, dry_run, &mut err).await; + }, + ("locations-registration-policies-test-iam-permissions", Some(opt)) => { + call_result = self._projects_locations_registration_policies_test_iam_permissions(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -1986,7 +2515,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-list', 'locations-namespaces-create', 'locations-namespaces-delete', 'locations-namespaces-get', 'locations-namespaces-get-iam-policy', 'locations-namespaces-list', 'locations-namespaces-patch', 'locations-namespaces-services-create', 'locations-namespaces-services-delete', 'locations-namespaces-services-endpoints-create', 'locations-namespaces-services-endpoints-delete', 'locations-namespaces-services-endpoints-get', 'locations-namespaces-services-endpoints-list', 'locations-namespaces-services-endpoints-patch', 'locations-namespaces-services-get', 'locations-namespaces-services-get-iam-policy', 'locations-namespaces-services-list', 'locations-namespaces-services-patch', 'locations-namespaces-services-resolve', 'locations-namespaces-services-set-iam-policy', 'locations-namespaces-services-test-iam-permissions', 'locations-namespaces-set-iam-policy' and 'locations-namespaces-test-iam-permissions'", vec![ + ("projects", "methods: 'locations-get', 'locations-list', 'locations-namespaces-create', 'locations-namespaces-delete', 'locations-namespaces-get', 'locations-namespaces-get-iam-policy', 'locations-namespaces-list', 'locations-namespaces-patch', 'locations-namespaces-service-workloads-get-iam-policy', 'locations-namespaces-service-workloads-set-iam-policy', 'locations-namespaces-service-workloads-test-iam-permissions', 'locations-namespaces-services-create', 'locations-namespaces-services-delete', 'locations-namespaces-services-endpoints-create', 'locations-namespaces-services-endpoints-delete', 'locations-namespaces-services-endpoints-get', 'locations-namespaces-services-endpoints-list', 'locations-namespaces-services-endpoints-patch', 'locations-namespaces-services-get', 'locations-namespaces-services-get-iam-policy', 'locations-namespaces-services-list', 'locations-namespaces-services-patch', 'locations-namespaces-services-resolve', 'locations-namespaces-services-set-iam-policy', 'locations-namespaces-services-test-iam-permissions', 'locations-namespaces-set-iam-policy', 'locations-namespaces-test-iam-permissions', 'locations-registration-policies-get-iam-policy', 'locations-registration-policies-set-iam-policy' and 'locations-registration-policies-test-iam-permissions'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-get", @@ -2104,12 +2633,12 @@ async fn main() { Some(false)), ]), ("locations-namespaces-get-iam-policy", - Some(r##"Gets the IAM Policy for a resource (namespace or service only)."##), + Some(r##"Gets the IAM Policy for a resource"##), "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2175,6 +2704,90 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-namespaces-service-workloads-get-iam-policy", + Some(r##"Gets the IAM Policy for a resource"##), + "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-service-workloads-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-namespaces-service-workloads-set-iam-policy", + Some(r##"Sets the IAM Policy for a resource"##), + "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-service-workloads-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-namespaces-service-workloads-test-iam-permissions", + Some(r##"Tests IAM permissions for a resource (namespace, service or service workload only)."##), + "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-service-workloads-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2376,12 +2989,12 @@ async fn main() { Some(false)), ]), ("locations-namespaces-services-get-iam-policy", - Some(r##"Gets the IAM Policy for a resource (namespace or service only)."##), + Some(r##"Gets the IAM Policy for a resource"##), "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-services-get-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2482,12 +3095,12 @@ async fn main() { Some(false)), ]), ("locations-namespaces-services-set-iam-policy", - Some(r##"Sets the IAM Policy for a resource (namespace or service only)."##), + Some(r##"Sets the IAM Policy for a resource"##), "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-services-set-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2510,12 +3123,12 @@ async fn main() { Some(false)), ]), ("locations-namespaces-services-test-iam-permissions", - Some(r##"Tests IAM permissions for a resource (namespace or service only)."##), + Some(r##"Tests IAM permissions for a resource (namespace, service or service workload only)."##), "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-services-test-iam-permissions", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2538,12 +3151,12 @@ async fn main() { Some(false)), ]), ("locations-namespaces-set-iam-policy", - Some(r##"Sets the IAM Policy for a resource (namespace or service only)."##), + Some(r##"Sets the IAM Policy for a resource"##), "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-set-iam-policy", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2566,12 +3179,96 @@ async fn main() { Some(false)), ]), ("locations-namespaces-test-iam-permissions", - Some(r##"Tests IAM permissions for a resource (namespace or service only)."##), + Some(r##"Tests IAM permissions for a resource (namespace, service or service workload only)."##), "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-namespaces-test-iam-permissions", vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-registration-policies-get-iam-policy", + Some(r##"Gets the IAM Policy for a resource"##), + "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-registration-policies-get-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-registration-policies-set-iam-policy", + Some(r##"Sets the IAM Policy for a resource"##), + "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-registration-policies-set-iam-policy", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-registration-policies-test-iam-permissions", + Some(r##"Tests IAM permissions for a resource (namespace, service or service workload only)."##), + "Details at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli/projects_locations-registration-policies-test-iam-permissions", + vec![ + (Some(r##"resource"##), + None, + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -2599,7 +3296,7 @@ async fn main() { let mut app = App::new("servicedirectory1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20220224") + .version("5.0.2+20230105") .about("Service Directory is a platform for discovering, publishing, and connecting services. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_servicedirectory1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/servicedirectory1_beta1/Cargo.toml b/gen/servicedirectory1_beta1/Cargo.toml index 3d35b6b0d7..acafff2232 100644 --- a/gen/servicedirectory1_beta1/Cargo.toml +++ b/gen/servicedirectory1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-servicedirectory1_beta1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Directory (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/servicedirectory1_beta1" homepage = "https://cloud.google.com/service-directory" -documentation = "https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105" license = "MIT" keywords = ["servicedirectory", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/servicedirectory1_beta1/README.md b/gen/servicedirectory1_beta1/README.md index 4eb219608e..45289c0e61 100644 --- a/gen/servicedirectory1_beta1/README.md +++ b/gen/servicedirectory1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-servicedirectory1_beta1` library allows access to all features of the *Google Service Directory* service. -This documentation was generated from *Service Directory* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Service Directory* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Service Directory* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/service-directory). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/ServiceDirectory) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/ServiceDirectory) ... * projects - * [*locations get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationListCall), [*locations namespaces create*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceCreateCall), [*locations namespaces delete*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceDeleteCall), [*locations namespaces get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceGetCall), [*locations namespaces get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceGetIamPolicyCall), [*locations namespaces list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceListCall), [*locations namespaces patch*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespacePatchCall), [*locations namespaces service workloads get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceWorkloadGetIamPolicyCall), [*locations namespaces service workloads set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceWorkloadSetIamPolicyCall), [*locations namespaces service workloads test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceWorkloadTestIamPermissionCall), [*locations namespaces services create*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceCreateCall), [*locations namespaces services delete*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceDeleteCall), [*locations namespaces services endpoints create*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointCreateCall), [*locations namespaces services endpoints delete*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointDeleteCall), [*locations namespaces services endpoints get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointGetCall), [*locations namespaces services endpoints list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointListCall), [*locations namespaces services endpoints patch*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointPatchCall), [*locations namespaces services get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceGetCall), [*locations namespaces services get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceGetIamPolicyCall), [*locations namespaces services list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceListCall), [*locations namespaces services patch*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServicePatchCall), [*locations namespaces services resolve*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceResolveCall), [*locations namespaces services set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceSetIamPolicyCall), [*locations namespaces services test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceTestIamPermissionCall), [*locations namespaces set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceSetIamPolicyCall), [*locations namespaces test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceTestIamPermissionCall), [*locations registration policies get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationRegistrationPolicyGetIamPolicyCall), [*locations registration policies set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationRegistrationPolicySetIamPolicyCall) and [*locations registration policies test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/api::ProjectLocationRegistrationPolicyTestIamPermissionCall) + * [*locations get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationListCall), [*locations namespaces create*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceCreateCall), [*locations namespaces delete*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceDeleteCall), [*locations namespaces get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceGetCall), [*locations namespaces get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceGetIamPolicyCall), [*locations namespaces list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceListCall), [*locations namespaces patch*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespacePatchCall), [*locations namespaces service workloads get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceWorkloadGetIamPolicyCall), [*locations namespaces service workloads set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceWorkloadSetIamPolicyCall), [*locations namespaces service workloads test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceWorkloadTestIamPermissionCall), [*locations namespaces services create*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceCreateCall), [*locations namespaces services delete*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceDeleteCall), [*locations namespaces services endpoints create*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointCreateCall), [*locations namespaces services endpoints delete*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointDeleteCall), [*locations namespaces services endpoints get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointGetCall), [*locations namespaces services endpoints list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointListCall), [*locations namespaces services endpoints patch*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceEndpointPatchCall), [*locations namespaces services get*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceGetCall), [*locations namespaces services get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceGetIamPolicyCall), [*locations namespaces services list*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceListCall), [*locations namespaces services patch*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServicePatchCall), [*locations namespaces services resolve*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceResolveCall), [*locations namespaces services set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceSetIamPolicyCall), [*locations namespaces services test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceServiceTestIamPermissionCall), [*locations namespaces set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceSetIamPolicyCall), [*locations namespaces test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationNamespaceTestIamPermissionCall), [*locations registration policies get iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationRegistrationPolicyGetIamPolicyCall), [*locations registration policies set iam policy*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationRegistrationPolicySetIamPolicyCall) and [*locations registration policies test iam permissions*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/api::ProjectLocationRegistrationPolicyTestIamPermissionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/ServiceDirectory)** +* **[Hub](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/ServiceDirectory)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-servicedirectory1_beta1/5.0.2-beta-1+20230105/google_servicedirectory1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-servicedirectory1_beta1/5.0.2+20230105/google_servicedirectory1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/servicedirectory1_beta1/src/api.rs b/gen/servicedirectory1_beta1/src/api.rs index ee015e7d77..c430c2e000 100644 --- a/gen/servicedirectory1_beta1/src/api.rs +++ b/gen/servicedirectory1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> ServiceDirectory { ServiceDirectory { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://servicedirectory.googleapis.com/".to_string(), _root_url: "https://servicedirectory.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> ServiceDirectory { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/servicedirectory1_beta1/src/client.rs b/gen/servicedirectory1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/servicedirectory1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/servicedirectory1_beta1/src/lib.rs b/gen/servicedirectory1_beta1/src/lib.rs index b1283915c4..42c1580cbe 100644 --- a/gen/servicedirectory1_beta1/src/lib.rs +++ b/gen/servicedirectory1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Service Directory* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Service Directory* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *servicedirectory:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Service Directory* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/service-directory). diff --git a/gen/serviceregistryalpha-cli/Cargo.toml b/gen/serviceregistryalpha-cli/Cargo.toml index 9a0798166c..a5f171fb78 100644 --- a/gen/serviceregistryalpha-cli/Cargo.toml +++ b/gen/serviceregistryalpha-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-serviceregistryalpha-cli" -version = "4.0.1+20160401" +version = "5.0.2+20160401" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Registry (protocol alpha)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/serviceregistryalpha-cli" @@ -20,13 +20,13 @@ name = "serviceregistryalpha" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-serviceregistryalpha] path = "../serviceregistryalpha" -version = "4.0.1+20160401" +version = "5.0.2+20160401" + diff --git a/gen/serviceregistryalpha-cli/README.md b/gen/serviceregistryalpha-cli/README.md index a96db7bc80..fcbf08de97 100644 --- a/gen/serviceregistryalpha-cli/README.md +++ b/gen/serviceregistryalpha-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Service Registry* API at revision *20160401*. The CLI is at version *4.0.1*. +This documentation was generated from the *Service Registry* API at revision *20160401*. The CLI is at version *5.0.2*. ```bash serviceregistryalpha [options] diff --git a/gen/serviceregistryalpha-cli/mkdocs.yml b/gen/serviceregistryalpha-cli/mkdocs.yml index 6a179947f5..b686590b7e 100644 --- a/gen/serviceregistryalpha-cli/mkdocs.yml +++ b/gen/serviceregistryalpha-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Service Registry v4.0.1+20160401 +site_name: Service Registry v5.0.2+20160401 site_url: http://byron.github.io/google-apis-rs/google-serviceregistryalpha-cli site_description: A complete library to interact with Service Registry (protocol alpha) @@ -7,16 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/serviceregistrya docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['endpoints_delete.md', 'Endpoints', 'Delete'] -- ['endpoints_get.md', 'Endpoints', 'Get'] -- ['endpoints_insert.md', 'Endpoints', 'Insert'] -- ['endpoints_list.md', 'Endpoints', 'List'] -- ['endpoints_patch.md', 'Endpoints', 'Patch'] -- ['endpoints_update.md', 'Endpoints', 'Update'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] +nav: +- Home: 'index.md' +- 'Endpoints': + - 'Delete': 'endpoints_delete.md' + - 'Get': 'endpoints_get.md' + - 'Insert': 'endpoints_insert.md' + - 'List': 'endpoints_list.md' + - 'Patch': 'endpoints_patch.md' + - 'Update': 'endpoints_update.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' theme: readthedocs diff --git a/gen/serviceregistryalpha-cli/src/client.rs b/gen/serviceregistryalpha-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/serviceregistryalpha-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/serviceregistryalpha-cli/src/main.rs b/gen/serviceregistryalpha-cli/src/main.rs index 458f3e4b18..0c2dae3f92 100644 --- a/gen/serviceregistryalpha-cli/src/main.rs +++ b/gen/serviceregistryalpha-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_serviceregistryalpha::{api, Error, oauth2}; +use google_serviceregistryalpha::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -262,7 +261,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -567,7 +566,7 @@ where call = call.order_by(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -968,7 +967,7 @@ async fn main() { let mut app = App::new("serviceregistryalpha") .author("Sebastian Thiel ") - .version("4.0.1+20160401") + .version("5.0.2+20160401") .about("Manages service endpoints in Service Registry and provides integration with DNS for service discovery and name resolution.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_serviceregistryalpha_cli") .arg(Arg::with_name("url") diff --git a/gen/serviceregistryalpha/Cargo.toml b/gen/serviceregistryalpha/Cargo.toml index 3df43b1b2c..b45106de9b 100644 --- a/gen/serviceregistryalpha/Cargo.toml +++ b/gen/serviceregistryalpha/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-serviceregistryalpha" -version = "5.0.2-beta-1+20160401" +version = "5.0.2+20160401" authors = ["Sebastian Thiel "] description = "A complete library to interact with Service Registry (protocol alpha)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/serviceregistryalpha" homepage = "https://developers.google.com/cloud-serviceregistry/" -documentation = "https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401" +documentation = "https://docs.rs/google-serviceregistryalpha/5.0.2+20160401" license = "MIT" keywords = ["serviceregistry", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/serviceregistryalpha/README.md b/gen/serviceregistryalpha/README.md index 375ba0ffa7..d2e41ea0be 100644 --- a/gen/serviceregistryalpha/README.md +++ b/gen/serviceregistryalpha/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-serviceregistryalpha` library allows access to all features of the *Google Service Registry* service. -This documentation was generated from *Service Registry* crate version *5.0.2-beta-1+20160401*, where *20160401* is the exact revision of the *serviceregistry:alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Service Registry* crate version *5.0.2+20160401*, where *20160401* is the exact revision of the *serviceregistry:alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Service Registry* *alpha* API can be found at the [official documentation site](https://developers.google.com/cloud-serviceregistry/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/ServiceRegistry) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/ServiceRegistry) ... -* [endpoints](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::Endpoint) - * [*delete*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::EndpointDeleteCall), [*get*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::EndpointGetCall), [*insert*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::EndpointInsertCall), [*list*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::EndpointListCall), [*patch*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::EndpointPatchCall) and [*update*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::EndpointUpdateCall) -* [operations](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::Operation) - * [*get*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::OperationGetCall) and [*list*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/api::OperationListCall) +* [endpoints](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::Endpoint) + * [*delete*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::EndpointDeleteCall), [*get*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::EndpointGetCall), [*insert*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::EndpointInsertCall), [*list*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::EndpointListCall), [*patch*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::EndpointPatchCall) and [*update*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::EndpointUpdateCall) +* [operations](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::Operation) + * [*get*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::OperationGetCall) and [*list*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/api::OperationListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/ServiceRegistry)** +* **[Hub](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/ServiceRegistry)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::CallBuilder) -* **[Resources](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::CallBuilder) +* **[Resources](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Part)** + * **[Parts](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::CallBuilder)** +* **[Activities](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Delegate) to the -[Method Builder](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Delegate) to the +[Method Builder](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::RequestValue) and -[decodable](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::RequestValue) and +[decodable](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-serviceregistryalpha/5.0.2-beta-1+20160401/google_serviceregistryalpha/client::RequestValue) are moved +* [request values](https://docs.rs/google-serviceregistryalpha/5.0.2+20160401/google_serviceregistryalpha/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/serviceregistryalpha/src/api.rs b/gen/serviceregistryalpha/src/api.rs index 22ef9f46df..809ba5793e 100644 --- a/gen/serviceregistryalpha/src/api.rs +++ b/gen/serviceregistryalpha/src/api.rs @@ -135,7 +135,7 @@ impl<'a, S> ServiceRegistry { ServiceRegistry { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/serviceregistry/alpha/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> ServiceRegistry { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/serviceregistryalpha/src/client.rs b/gen/serviceregistryalpha/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/serviceregistryalpha/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/serviceregistryalpha/src/lib.rs b/gen/serviceregistryalpha/src/lib.rs index 7d688e9c23..82a4a8407e 100644 --- a/gen/serviceregistryalpha/src/lib.rs +++ b/gen/serviceregistryalpha/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Service Registry* crate version *5.0.2-beta-1+20160401*, where *20160401* is the exact revision of the *serviceregistry:alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Service Registry* crate version *5.0.2+20160401*, where *20160401* is the exact revision of the *serviceregistry:alpha* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Service Registry* *alpha* API can be found at the //! [official documentation site](https://developers.google.com/cloud-serviceregistry/). diff --git a/gen/sheets4-cli/Cargo.toml b/gen/sheets4-cli/Cargo.toml index fa7b31c7e0..0b53329f4a 100644 --- a/gen/sheets4-cli/Cargo.toml +++ b/gen/sheets4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-sheets4-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Sheets (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sheets4-cli" @@ -20,13 +20,13 @@ name = "sheets4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-sheets4] path = "../sheets4" -version = "4.0.1+20220221" +version = "5.0.2+20230119" + diff --git a/gen/sheets4-cli/README.md b/gen/sheets4-cli/README.md index dd10e4655f..c57564b449 100644 --- a/gen/sheets4-cli/README.md +++ b/gen/sheets4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Sheets* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *Sheets* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash sheets4 [options] diff --git a/gen/sheets4-cli/mkdocs.yml b/gen/sheets4-cli/mkdocs.yml index 28c2de085b..cc012f933e 100644 --- a/gen/sheets4-cli/mkdocs.yml +++ b/gen/sheets4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Sheets v4.0.1+20220221 +site_name: Sheets v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-sheets4-cli site_description: A complete library to interact with Sheets (protocol v4) @@ -7,25 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/sheets4-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['spreadsheets_batch-update.md', 'Spreadsheets', 'Batch Update'] -- ['spreadsheets_create.md', 'Spreadsheets', 'Create'] -- ['spreadsheets_developer-metadata-get.md', 'Spreadsheets', 'Developer Metadata Get'] -- ['spreadsheets_developer-metadata-search.md', 'Spreadsheets', 'Developer Metadata Search'] -- ['spreadsheets_get.md', 'Spreadsheets', 'Get'] -- ['spreadsheets_get-by-data-filter.md', 'Spreadsheets', 'Get By Data Filter'] -- ['spreadsheets_sheets-copy-to.md', 'Spreadsheets', 'Sheets Copy To'] -- ['spreadsheets_values-append.md', 'Spreadsheets', 'Values Append'] -- ['spreadsheets_values-batch-clear.md', 'Spreadsheets', 'Values Batch Clear'] -- ['spreadsheets_values-batch-clear-by-data-filter.md', 'Spreadsheets', 'Values Batch Clear By Data Filter'] -- ['spreadsheets_values-batch-get.md', 'Spreadsheets', 'Values Batch Get'] -- ['spreadsheets_values-batch-get-by-data-filter.md', 'Spreadsheets', 'Values Batch Get By Data Filter'] -- ['spreadsheets_values-batch-update.md', 'Spreadsheets', 'Values Batch Update'] -- ['spreadsheets_values-batch-update-by-data-filter.md', 'Spreadsheets', 'Values Batch Update By Data Filter'] -- ['spreadsheets_values-clear.md', 'Spreadsheets', 'Values Clear'] -- ['spreadsheets_values-get.md', 'Spreadsheets', 'Values Get'] -- ['spreadsheets_values-update.md', 'Spreadsheets', 'Values Update'] +nav: +- Home: 'index.md' +- 'Spreadsheets': + - 'Batch Update': 'spreadsheets_batch-update.md' + - 'Create': 'spreadsheets_create.md' + - 'Developer Metadata Get': 'spreadsheets_developer-metadata-get.md' + - 'Developer Metadata Search': 'spreadsheets_developer-metadata-search.md' + - 'Get': 'spreadsheets_get.md' + - 'Get By Data Filter': 'spreadsheets_get-by-data-filter.md' + - 'Sheets Copy To': 'spreadsheets_sheets-copy-to.md' + - 'Values Append': 'spreadsheets_values-append.md' + - 'Values Batch Clear': 'spreadsheets_values-batch-clear.md' + - 'Values Batch Clear By Data Filter': 'spreadsheets_values-batch-clear-by-data-filter.md' + - 'Values Batch Get': 'spreadsheets_values-batch-get.md' + - 'Values Batch Get By Data Filter': 'spreadsheets_values-batch-get-by-data-filter.md' + - 'Values Batch Update': 'spreadsheets_values-batch-update.md' + - 'Values Batch Update By Data Filter': 'spreadsheets_values-batch-update-by-data-filter.md' + - 'Values Clear': 'spreadsheets_values-clear.md' + - 'Values Get': 'spreadsheets_values-get.md' + - 'Values Update': 'spreadsheets_values-update.md' theme: readthedocs diff --git a/gen/sheets4-cli/src/client.rs b/gen/sheets4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/sheets4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/sheets4-cli/src/main.rs b/gen/sheets4-cli/src/main.rs index e149f94ccb..3c5044df08 100644 --- a/gen/sheets4-cli/src/main.rs +++ b/gen/sheets4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_sheets4::{api, Error, oauth2}; +use google_sheets4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -460,7 +459,7 @@ where call = call.add_ranges(value.unwrap_or("")); }, "include-grid-data" => { - call = call.include_grid_data(arg_from_str(value.unwrap_or("false"), err, "include-grid-data", "boolean")); + call = call.include_grid_data( value.map(|v| arg_from_str(v, err, "include-grid-data", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -733,7 +732,7 @@ where call = call.insert_data_option(value.unwrap_or("")); }, "include-values-in-response" => { - call = call.include_values_in_response(arg_from_str(value.unwrap_or("false"), err, "include-values-in-response", "boolean")); + call = call.include_values_in_response( value.map(|v| arg_from_str(v, err, "include-values-in-response", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1475,7 +1474,7 @@ where call = call.response_date_time_render_option(value.unwrap_or("")); }, "include-values-in-response" => { - call = call.include_values_in_response(arg_from_str(value.unwrap_or("false"), err, "include-values-in-response", "boolean")); + call = call.include_values_in_response( value.map(|v| arg_from_str(v, err, "include-values-in-response", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1769,7 +1768,7 @@ async fn main() { Some(false)), ]), ("get", - Some(r##"Returns the spreadsheet at the given ID. The caller must specify the spreadsheet ID. By default, data within grids will not be returned. You can include grid data one of two ways: * Specify a field mask listing your desired fields using the `fields` URL parameter in HTTP * Set the includeGridData URL parameter to true. If a field mask is set, the `includeGridData` parameter is ignored For large spreadsheets, it is recommended to retrieve only the specific fields of the spreadsheet that you want. To retrieve only subsets of the spreadsheet, use the ranges URL parameter. Multiple ranges can be specified. Limiting the range will return only the portions of the spreadsheet that intersect the requested ranges. Ranges are specified using A1 notation."##), + Some(r##"Returns the spreadsheet at the given ID. The caller must specify the spreadsheet ID. By default, data within grids is not returned. You can include grid data in one of 2 ways: * Specify a [field mask](https://developers.google.com/sheets/api/guides/field-masks) listing your desired fields using the `fields` URL parameter in HTTP * Set the includeGridData URL parameter to true. If a field mask is set, the `includeGridData` parameter is ignored For large spreadsheets, as a best practice, retrieve only the specific spreadsheet fields that you want. To retrieve only subsets of spreadsheet data, use the ranges URL parameter. Ranges are specified using [A1 notation](/sheets/api/guides/concepts#cell). You can define a single cell (for example, `A1`) or multiple cells (for example, `A1:D5`). You can also get cells from other sheets within the same spreadsheet (for example, `Sheet2!A1:C4`) or retrieve multiple ranges at once (for example, `?ranges=A1:D5&ranges=Sheet2!A1:C4`). Limiting the range returns only the portions of the spreadsheet that intersect the requested ranges."##), "Details at http://byron.github.io/google-apis-rs/google_sheets4_cli/spreadsheets_get", vec![ (Some(r##"spreadsheet-id"##), @@ -1791,7 +1790,7 @@ async fn main() { Some(false)), ]), ("get-by-data-filter", - Some(r##"Returns the spreadsheet at the given ID. The caller must specify the spreadsheet ID. This method differs from GetSpreadsheet in that it allows selecting which subsets of spreadsheet data to return by specifying a dataFilters parameter. Multiple DataFilters can be specified. Specifying one or more data filters will return the portions of the spreadsheet that intersect ranges matched by any of the filters. By default, data within grids will not be returned. You can include grid data one of two ways: * Specify a field mask listing your desired fields using the `fields` URL parameter in HTTP * Set the includeGridData parameter to true. If a field mask is set, the `includeGridData` parameter is ignored For large spreadsheets, it is recommended to retrieve only the specific fields of the spreadsheet that you want."##), + Some(r##"Returns the spreadsheet at the given ID. The caller must specify the spreadsheet ID. This method differs from GetSpreadsheet in that it allows selecting which subsets of spreadsheet data to return by specifying a dataFilters parameter. Multiple DataFilters can be specified. Specifying one or more data filters returns the portions of the spreadsheet that intersect ranges matched by any of the filters. By default, data within grids is not returned. You can include grid data one of 2 ways: * Specify a [field mask](https://developers.google.com/sheets/api/guides/field-masks) listing your desired fields using the `fields` URL parameter in HTTP * Set the includeGridData parameter to true. If a field mask is set, the `includeGridData` parameter is ignored For large spreadsheets, as a best practice, retrieve only the specific spreadsheet fields that you want."##), "Details at http://byron.github.io/google-apis-rs/google_sheets4_cli/spreadsheets_get-by-data-filter", vec![ (Some(r##"spreadsheet-id"##), @@ -1864,7 +1863,7 @@ async fn main() { (Some(r##"range"##), None, - Some(r##"The A1 notation of a range to search for a logical table of data. Values are appended after the last row of the table."##), + Some(r##"The [A1 notation](/sheets/api/guides/concepts#cell) of a range to search for a logical table of data. Values are appended after the last row of the table."##), Some(true), Some(false)), @@ -1887,7 +1886,7 @@ async fn main() { Some(false)), ]), ("values-batch-clear", - Some(r##"Clears one or more ranges of values from a spreadsheet. The caller must specify the spreadsheet ID and one or more ranges. Only values are cleared -- all other properties of the cell (such as formatting, data validation, etc..) are kept."##), + Some(r##"Clears one or more ranges of values from a spreadsheet. The caller must specify the spreadsheet ID and one or more ranges. Only values are cleared -- all other properties of the cell (such as formatting and data validation) are kept."##), "Details at http://byron.github.io/google-apis-rs/google_sheets4_cli/spreadsheets_values-batch-clear", vec![ (Some(r##"spreadsheet-id"##), @@ -2060,7 +2059,7 @@ async fn main() { (Some(r##"range"##), None, - Some(r##"The A1 notation or R1C1 notation of the values to clear."##), + Some(r##"The [A1 notation or R1C1 notation](/sheets/api/guides/concepts#cell) of the values to clear."##), Some(true), Some(false)), @@ -2094,7 +2093,7 @@ async fn main() { (Some(r##"range"##), None, - Some(r##"The A1 notation or R1C1 notation of the range to retrieve values from."##), + Some(r##"The [A1 notation or R1C1 notation](/sheets/api/guides/concepts#cell) of the range to retrieve values from."##), Some(true), Some(false)), @@ -2122,7 +2121,7 @@ async fn main() { (Some(r##"range"##), None, - Some(r##"The A1 notation of the values to update."##), + Some(r##"The [A1 notation](/sheets/api/guides/concepts#cell) of the values to update."##), Some(true), Some(false)), @@ -2150,7 +2149,7 @@ async fn main() { let mut app = App::new("sheets4") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230119") .about("Reads and writes Google Sheets.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sheets4_cli") .arg(Arg::with_name("url") diff --git a/gen/sheets4/Cargo.toml b/gen/sheets4/Cargo.toml index 67b9f125ac..20fd5b9fd2 100644 --- a/gen/sheets4/Cargo.toml +++ b/gen/sheets4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-sheets4" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Sheets (protocol v4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sheets4" homepage = "https://developers.google.com/sheets/" -documentation = "https://docs.rs/google-sheets4/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-sheets4/5.0.2+20230119" license = "MIT" keywords = ["sheets", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/sheets4/README.md b/gen/sheets4/README.md index 5f4102c8bd..0351b353b2 100644 --- a/gen/sheets4/README.md +++ b/gen/sheets4/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-sheets4` library allows access to all features of the *Google Sheets* service. -This documentation was generated from *Sheets* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *sheets:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Sheets* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *sheets:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Sheets* *v4* API can be found at the [official documentation site](https://developers.google.com/sheets/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/Sheets) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/Sheets) ... -* [spreadsheets](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::Spreadsheet) - * [*batch update*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetBatchUpdateCall), [*create*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetCreateCall), [*developer metadata get*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetDeveloperMetadataGetCall), [*developer metadata search*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetDeveloperMetadataSearchCall), [*get*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetGetCall), [*get by data filter*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetGetByDataFilterCall), [*sheets copy to*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetSheetCopyToCall), [*values append*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueAppendCall), [*values batch clear*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueBatchClearCall), [*values batch clear by data filter*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueBatchClearByDataFilterCall), [*values batch get*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueBatchGetCall), [*values batch get by data filter*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueBatchGetByDataFilterCall), [*values batch update*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueBatchUpdateCall), [*values batch update by data filter*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueBatchUpdateByDataFilterCall), [*values clear*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueClearCall), [*values get*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueGetCall) and [*values update*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/api::SpreadsheetValueUpdateCall) +* [spreadsheets](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::Spreadsheet) + * [*batch update*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetBatchUpdateCall), [*create*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetCreateCall), [*developer metadata get*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetDeveloperMetadataGetCall), [*developer metadata search*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetDeveloperMetadataSearchCall), [*get*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetGetCall), [*get by data filter*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetGetByDataFilterCall), [*sheets copy to*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetSheetCopyToCall), [*values append*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueAppendCall), [*values batch clear*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueBatchClearCall), [*values batch clear by data filter*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueBatchClearByDataFilterCall), [*values batch get*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueBatchGetCall), [*values batch get by data filter*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueBatchGetByDataFilterCall), [*values batch update*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueBatchUpdateCall), [*values batch update by data filter*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueBatchUpdateByDataFilterCall), [*values clear*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueClearCall), [*values get*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueGetCall) and [*values update*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/api::SpreadsheetValueUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/Sheets)** +* **[Hub](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/Sheets)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::CallBuilder) -* **[Resources](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::CallBuilder) +* **[Resources](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Part)** + * **[Parts](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -145,17 +145,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -165,29 +165,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Delegate) to the -[Method Builder](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Delegate) to the +[Method Builder](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::RequestValue) and -[decodable](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::RequestValue) and +[decodable](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-sheets4/5.0.2-beta-1+20230119/google_sheets4/client::RequestValue) are moved +* [request values](https://docs.rs/google-sheets4/5.0.2+20230119/google_sheets4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/sheets4/src/api.rs b/gen/sheets4/src/api.rs index 1953ea267e..779b62d1ad 100644 --- a/gen/sheets4/src/api.rs +++ b/gen/sheets4/src/api.rs @@ -146,7 +146,7 @@ impl<'a, S> Sheets { Sheets { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://sheets.googleapis.com/".to_string(), _root_url: "https://sheets.googleapis.com/".to_string(), } @@ -157,7 +157,7 @@ impl<'a, S> Sheets { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/sheets4/src/client.rs b/gen/sheets4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/sheets4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/sheets4/src/lib.rs b/gen/sheets4/src/lib.rs index 6d9f1441d4..2a3e631a0c 100644 --- a/gen/sheets4/src/lib.rs +++ b/gen/sheets4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Sheets* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *sheets:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Sheets* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *sheets:v4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Sheets* *v4* API can be found at the //! [official documentation site](https://developers.google.com/sheets/). diff --git a/gen/siteverification1-cli/Cargo.toml b/gen/siteverification1-cli/Cargo.toml index 38770d6469..453d13ddc8 100644 --- a/gen/siteverification1-cli/Cargo.toml +++ b/gen/siteverification1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-siteverification1-cli" -version = "4.0.1+20191119" +version = "5.0.2+20191119" authors = ["Sebastian Thiel "] description = "A complete library to interact with siteVerification (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/siteverification1-cli" @@ -20,13 +20,13 @@ name = "siteverification1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-siteverification1] path = "../siteverification1" -version = "4.0.1+20191119" +version = "5.0.2+20191119" + diff --git a/gen/siteverification1-cli/README.md b/gen/siteverification1-cli/README.md index 10d5bbf75f..6aa712f4ec 100644 --- a/gen/siteverification1-cli/README.md +++ b/gen/siteverification1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *siteVerification* API at revision *20191119*. The CLI is at version *4.0.1*. +This documentation was generated from the *siteVerification* API at revision *20191119*. The CLI is at version *5.0.2*. ```bash siteverification1 [options] diff --git a/gen/siteverification1-cli/mkdocs.yml b/gen/siteverification1-cli/mkdocs.yml index f6c096341e..ff6da2af0e 100644 --- a/gen/siteverification1-cli/mkdocs.yml +++ b/gen/siteverification1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: siteVerification v4.0.1+20191119 +site_name: siteVerification v5.0.2+20191119 site_url: http://byron.github.io/google-apis-rs/google-siteverification1-cli site_description: A complete library to interact with siteVerification (protocol v1) @@ -7,15 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/siteverification docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['web-resource_delete.md', 'Web Resource', 'Delete'] -- ['web-resource_get.md', 'Web Resource', 'Get'] -- ['web-resource_get-token.md', 'Web Resource', 'Get Token'] -- ['web-resource_insert.md', 'Web Resource', 'Insert'] -- ['web-resource_list.md', 'Web Resource', 'List'] -- ['web-resource_patch.md', 'Web Resource', 'Patch'] -- ['web-resource_update.md', 'Web Resource', 'Update'] +nav: +- Home: 'index.md' +- 'Web Resource': + - 'Delete': 'web-resource_delete.md' + - 'Get': 'web-resource_get.md' + - 'Get Token': 'web-resource_get-token.md' + - 'Insert': 'web-resource_insert.md' + - 'List': 'web-resource_list.md' + - 'Patch': 'web-resource_patch.md' + - 'Update': 'web-resource_update.md' theme: readthedocs diff --git a/gen/siteverification1-cli/src/client.rs b/gen/siteverification1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/siteverification1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/siteverification1-cli/src/main.rs b/gen/siteverification1-cli/src/main.rs index 1f2ee74eae..4315ec5f5b 100644 --- a/gen/siteverification1-cli/src/main.rs +++ b/gen/siteverification1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_siteverification1::{api, Error, oauth2}; +use google_siteverification1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -821,7 +820,7 @@ async fn main() { let mut app = App::new("siteverification1") .author("Sebastian Thiel ") - .version("4.0.1+20191119") + .version("5.0.2+20191119") .about("Verifies ownership of websites or domains with Google.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_siteverification1_cli") .arg(Arg::with_name("url") diff --git a/gen/siteverification1/Cargo.toml b/gen/siteverification1/Cargo.toml index d77d7bcbed..8ae083b04c 100644 --- a/gen/siteverification1/Cargo.toml +++ b/gen/siteverification1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-siteverification1" -version = "5.0.2-beta-1+20191119" +version = "5.0.2+20191119" authors = ["Sebastian Thiel "] description = "A complete library to interact with siteVerification (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/siteverification1" homepage = "https://developers.google.com/site-verification/" -documentation = "https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119" +documentation = "https://docs.rs/google-siteverification1/5.0.2+20191119" license = "MIT" keywords = ["siteVerification", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/siteverification1/README.md b/gen/siteverification1/README.md index d2c7d89ca0..b1378d145d 100644 --- a/gen/siteverification1/README.md +++ b/gen/siteverification1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-siteverification1` library allows access to all features of the *Google siteVerification* service. -This documentation was generated from *siteVerification* crate version *5.0.2-beta-1+20191119*, where *20191119* is the exact revision of the *siteVerification:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *siteVerification* crate version *5.0.2+20191119*, where *20191119* is the exact revision of the *siteVerification:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *siteVerification* *v1* API can be found at the [official documentation site](https://developers.google.com/site-verification/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/SiteVerification) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/SiteVerification) ... * web resource - * [*delete*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/api::WebResourceDeleteCall), [*get*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/api::WebResourceGetCall), [*get token*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/api::WebResourceGetTokenCall), [*insert*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/api::WebResourceInsertCall), [*list*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/api::WebResourceListCall), [*patch*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/api::WebResourcePatchCall) and [*update*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/api::WebResourceUpdateCall) + * [*delete*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/api::WebResourceDeleteCall), [*get*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/api::WebResourceGetCall), [*get token*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/api::WebResourceGetTokenCall), [*insert*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/api::WebResourceInsertCall), [*list*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/api::WebResourceListCall), [*patch*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/api::WebResourcePatchCall) and [*update*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/api::WebResourceUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/SiteVerification)** +* **[Hub](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/SiteVerification)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::CallBuilder) -* **[Resources](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::CallBuilder) +* **[Resources](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Part)** + * **[Parts](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Delegate) to the -[Method Builder](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Delegate) to the +[Method Builder](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::RequestValue) and -[decodable](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::RequestValue) and +[decodable](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-siteverification1/5.0.2-beta-1+20191119/google_siteverification1/client::RequestValue) are moved +* [request values](https://docs.rs/google-siteverification1/5.0.2+20191119/google_siteverification1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/siteverification1/src/api.rs b/gen/siteverification1/src/api.rs index 7281916332..238a297cf0 100644 --- a/gen/siteverification1/src/api.rs +++ b/gen/siteverification1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> SiteVerification { SiteVerification { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/siteVerification/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> SiteVerification { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/siteverification1/src/client.rs b/gen/siteverification1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/siteverification1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/siteverification1/src/lib.rs b/gen/siteverification1/src/lib.rs index 976705b22b..fa3202788b 100644 --- a/gen/siteverification1/src/lib.rs +++ b/gen/siteverification1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *siteVerification* crate version *5.0.2-beta-1+20191119*, where *20191119* is the exact revision of the *siteVerification:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *siteVerification* crate version *5.0.2+20191119*, where *20191119* is the exact revision of the *siteVerification:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *siteVerification* *v1* API can be found at the //! [official documentation site](https://developers.google.com/site-verification/). diff --git a/gen/smartdevicemanagement1-cli/Cargo.toml b/gen/smartdevicemanagement1-cli/Cargo.toml index cb0074a31e..ba318e460f 100644 --- a/gen/smartdevicemanagement1-cli/Cargo.toml +++ b/gen/smartdevicemanagement1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-smartdevicemanagement1-cli" -version = "4.0.1+20220302" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Smart Device Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/smartdevicemanagement1-cli" @@ -20,13 +20,13 @@ name = "smartdevicemanagement1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-smartdevicemanagement1] path = "../smartdevicemanagement1" -version = "4.0.1+20220302" +version = "5.0.2+20230112" + diff --git a/gen/smartdevicemanagement1-cli/README.md b/gen/smartdevicemanagement1-cli/README.md index 32b8b855e7..238b55fd67 100644 --- a/gen/smartdevicemanagement1-cli/README.md +++ b/gen/smartdevicemanagement1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Smart Device Management* API at revision *20220302*. The CLI is at version *4.0.1*. +This documentation was generated from the *Smart Device Management* API at revision *20230112*. The CLI is at version *5.0.2*. ```bash smartdevicemanagement1 [options] diff --git a/gen/smartdevicemanagement1-cli/mkdocs.yml b/gen/smartdevicemanagement1-cli/mkdocs.yml index 7b5adc8521..730d181b5b 100644 --- a/gen/smartdevicemanagement1-cli/mkdocs.yml +++ b/gen/smartdevicemanagement1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Smart Device Management v4.0.1+20220302 +site_name: Smart Device Management v5.0.2+20230112 site_url: http://byron.github.io/google-apis-rs/google-smartdevicemanagement1-cli site_description: A complete library to interact with Smart Device Management (protocol v1) @@ -7,15 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/smartdevicemanag docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['enterprises_devices-execute-command.md', 'Enterprises', 'Devices Execute Command'] -- ['enterprises_devices-get.md', 'Enterprises', 'Devices Get'] -- ['enterprises_devices-list.md', 'Enterprises', 'Devices List'] -- ['enterprises_structures-get.md', 'Enterprises', 'Structures Get'] -- ['enterprises_structures-list.md', 'Enterprises', 'Structures List'] -- ['enterprises_structures-rooms-get.md', 'Enterprises', 'Structures Rooms Get'] -- ['enterprises_structures-rooms-list.md', 'Enterprises', 'Structures Rooms List'] +nav: +- Home: 'index.md' +- 'Enterprises': + - 'Devices Execute Command': 'enterprises_devices-execute-command.md' + - 'Devices Get': 'enterprises_devices-get.md' + - 'Devices List': 'enterprises_devices-list.md' + - 'Structures Get': 'enterprises_structures-get.md' + - 'Structures List': 'enterprises_structures-list.md' + - 'Structures Rooms Get': 'enterprises_structures-rooms-get.md' + - 'Structures Rooms List': 'enterprises_structures-rooms-list.md' theme: readthedocs diff --git a/gen/smartdevicemanagement1-cli/src/client.rs b/gen/smartdevicemanagement1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/smartdevicemanagement1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/smartdevicemanagement1-cli/src/main.rs b/gen/smartdevicemanagement1-cli/src/main.rs index 55e71047b7..211e3c2d7a 100644 --- a/gen/smartdevicemanagement1-cli/src/main.rs +++ b/gen/smartdevicemanagement1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_smartdevicemanagement1::{api, Error, oauth2}; +use google_smartdevicemanagement1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -198,7 +197,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -312,7 +311,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -426,7 +425,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -749,7 +748,7 @@ async fn main() { let mut app = App::new("smartdevicemanagement1") .author("Sebastian Thiel ") - .version("4.0.1+20220302") + .version("5.0.2+20230112") .about("Allow select enterprise partners to access, control, and manage Google and Nest devices programmatically.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_smartdevicemanagement1_cli") .arg(Arg::with_name("url") diff --git a/gen/smartdevicemanagement1/Cargo.toml b/gen/smartdevicemanagement1/Cargo.toml index ef37907c71..62f3d3062b 100644 --- a/gen/smartdevicemanagement1/Cargo.toml +++ b/gen/smartdevicemanagement1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-smartdevicemanagement1" -version = "5.0.2-beta-1+20230112" +version = "5.0.2+20230112" authors = ["Sebastian Thiel "] description = "A complete library to interact with Smart Device Management (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/smartdevicemanagement1" homepage = "https://developers.google.com/nest/device-access" -documentation = "https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112" +documentation = "https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112" license = "MIT" keywords = ["smartdevicemanagemen", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/smartdevicemanagement1/README.md b/gen/smartdevicemanagement1/README.md index 81c08432b7..0349a6270a 100644 --- a/gen/smartdevicemanagement1/README.md +++ b/gen/smartdevicemanagement1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-smartdevicemanagement1` library allows access to all features of the *Google Smart Device Management* service. -This documentation was generated from *Smart Device Management* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *smartdevicemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Smart Device Management* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *smartdevicemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Smart Device Management* *v1* API can be found at the [official documentation site](https://developers.google.com/nest/device-access). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/SmartDeviceManagement) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/SmartDeviceManagement) ... * enterprises - * [*devices execute command*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/api::EnterpriseDeviceExecuteCommandCall), [*devices get*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/api::EnterpriseDeviceGetCall), [*devices list*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/api::EnterpriseDeviceListCall), [*structures get*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/api::EnterpriseStructureGetCall), [*structures list*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/api::EnterpriseStructureListCall), [*structures rooms get*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/api::EnterpriseStructureRoomGetCall) and [*structures rooms list*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/api::EnterpriseStructureRoomListCall) + * [*devices execute command*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/api::EnterpriseDeviceExecuteCommandCall), [*devices get*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/api::EnterpriseDeviceGetCall), [*devices list*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/api::EnterpriseDeviceListCall), [*structures get*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/api::EnterpriseStructureGetCall), [*structures list*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/api::EnterpriseStructureListCall), [*structures rooms get*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/api::EnterpriseStructureRoomGetCall) and [*structures rooms list*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/api::EnterpriseStructureRoomListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/SmartDeviceManagement)** +* **[Hub](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/SmartDeviceManagement)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::CallBuilder) -* **[Resources](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::CallBuilder) +* **[Resources](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Part)** + * **[Parts](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -118,17 +118,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -138,29 +138,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Delegate) to the -[Method Builder](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Delegate) to the +[Method Builder](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::RequestValue) and -[decodable](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::RequestValue) and +[decodable](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-smartdevicemanagement1/5.0.2-beta-1+20230112/google_smartdevicemanagement1/client::RequestValue) are moved +* [request values](https://docs.rs/google-smartdevicemanagement1/5.0.2+20230112/google_smartdevicemanagement1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/smartdevicemanagement1/src/api.rs b/gen/smartdevicemanagement1/src/api.rs index e7d4fced98..440cbe2aa3 100644 --- a/gen/smartdevicemanagement1/src/api.rs +++ b/gen/smartdevicemanagement1/src/api.rs @@ -119,7 +119,7 @@ impl<'a, S> SmartDeviceManagement { SmartDeviceManagement { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://smartdevicemanagement.googleapis.com/".to_string(), _root_url: "https://smartdevicemanagement.googleapis.com/".to_string(), } @@ -130,7 +130,7 @@ impl<'a, S> SmartDeviceManagement { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/smartdevicemanagement1/src/client.rs b/gen/smartdevicemanagement1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/smartdevicemanagement1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/smartdevicemanagement1/src/lib.rs b/gen/smartdevicemanagement1/src/lib.rs index 094271aaa1..115db5ce85 100644 --- a/gen/smartdevicemanagement1/src/lib.rs +++ b/gen/smartdevicemanagement1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Smart Device Management* crate version *5.0.2-beta-1+20230112*, where *20230112* is the exact revision of the *smartdevicemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Smart Device Management* crate version *5.0.2+20230112*, where *20230112* is the exact revision of the *smartdevicemanagement:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Smart Device Management* *v1* API can be found at the //! [official documentation site](https://developers.google.com/nest/device-access). diff --git a/gen/sourcerepo1-cli/Cargo.toml b/gen/sourcerepo1-cli/Cargo.toml index 14ffee1cd0..6a057ca0a2 100644 --- a/gen/sourcerepo1-cli/Cargo.toml +++ b/gen/sourcerepo1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-sourcerepo1-cli" -version = "4.0.1+20220217" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Source Repositories (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sourcerepo1-cli" @@ -20,13 +20,13 @@ name = "sourcerepo1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-sourcerepo1] path = "../sourcerepo1" -version = "4.0.1+20220217" +version = "5.0.2+20230123" + diff --git a/gen/sourcerepo1-cli/README.md b/gen/sourcerepo1-cli/README.md index e0e77a362e..bbfa142a45 100644 --- a/gen/sourcerepo1-cli/README.md +++ b/gen/sourcerepo1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Source Repositories* API at revision *20220217*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Source Repositories* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash sourcerepo1 [options] diff --git a/gen/sourcerepo1-cli/mkdocs.yml b/gen/sourcerepo1-cli/mkdocs.yml index 7f3141d614..c33cfc5496 100644 --- a/gen/sourcerepo1-cli/mkdocs.yml +++ b/gen/sourcerepo1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Source Repositories v4.0.1+20220217 +site_name: Cloud Source Repositories v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-sourcerepo1-cli site_description: A complete library to interact with Cloud Source Repositories (protocol v1) @@ -7,19 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/sourcerepo1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_get-config.md', 'Projects', 'Get Config'] -- ['projects_repos-create.md', 'Projects', 'Repos Create'] -- ['projects_repos-delete.md', 'Projects', 'Repos Delete'] -- ['projects_repos-get.md', 'Projects', 'Repos Get'] -- ['projects_repos-get-iam-policy.md', 'Projects', 'Repos Get Iam Policy'] -- ['projects_repos-list.md', 'Projects', 'Repos List'] -- ['projects_repos-patch.md', 'Projects', 'Repos Patch'] -- ['projects_repos-set-iam-policy.md', 'Projects', 'Repos Set Iam Policy'] -- ['projects_repos-sync.md', 'Projects', 'Repos Sync'] -- ['projects_repos-test-iam-permissions.md', 'Projects', 'Repos Test Iam Permissions'] -- ['projects_update-config.md', 'Projects', 'Update Config'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Get Config': 'projects_get-config.md' + - 'Repos Create': 'projects_repos-create.md' + - 'Repos Delete': 'projects_repos-delete.md' + - 'Repos Get': 'projects_repos-get.md' + - 'Repos Get Iam Policy': 'projects_repos-get-iam-policy.md' + - 'Repos List': 'projects_repos-list.md' + - 'Repos Patch': 'projects_repos-patch.md' + - 'Repos Set Iam Policy': 'projects_repos-set-iam-policy.md' + - 'Repos Sync': 'projects_repos-sync.md' + - 'Repos Test Iam Permissions': 'projects_repos-test-iam-permissions.md' + - 'Update Config': 'projects_update-config.md' theme: readthedocs diff --git a/gen/sourcerepo1-cli/src/client.rs b/gen/sourcerepo1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/sourcerepo1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/sourcerepo1-cli/src/main.rs b/gen/sourcerepo1-cli/src/main.rs index 710d505104..a2946e3ff6 100644 --- a/gen/sourcerepo1-cli/src/main.rs +++ b/gen/sourcerepo1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_sourcerepo1::{api, Error, oauth2}; +use google_sourcerepo1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -304,7 +303,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -363,7 +362,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1066,7 +1065,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1138,7 +1137,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1194,7 +1193,7 @@ async fn main() { vec![ (Some(r##"resource"##), None, - Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), + Some(r##"REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field."##), Some(true), Some(false)), @@ -1250,7 +1249,7 @@ async fn main() { let mut app = App::new("sourcerepo1") .author("Sebastian Thiel ") - .version("4.0.1+20220217") + .version("5.0.2+20230123") .about("Accesses source code repositories hosted by Google.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sourcerepo1_cli") .arg(Arg::with_name("url") diff --git a/gen/sourcerepo1/Cargo.toml b/gen/sourcerepo1/Cargo.toml index e7a5eecdbb..e197afa5b4 100644 --- a/gen/sourcerepo1/Cargo.toml +++ b/gen/sourcerepo1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-sourcerepo1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Source Repositories (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sourcerepo1" homepage = "https://cloud.google.com/source-repositories/docs/apis" -documentation = "https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-sourcerepo1/5.0.2+20230123" license = "MIT" keywords = ["sourcerepo", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/sourcerepo1/README.md b/gen/sourcerepo1/README.md index ee156c9ccf..77c28476e4 100644 --- a/gen/sourcerepo1/README.md +++ b/gen/sourcerepo1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-sourcerepo1` library allows access to all features of the *Google Cloud Source Repositories* service. -This documentation was generated from *Cloud Source Repositories* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *sourcerepo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Source Repositories* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *sourcerepo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Source Repositories* *v1* API can be found at the [official documentation site](https://cloud.google.com/source-repositories/docs/apis). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/CloudSourceRepositories) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/CloudSourceRepositories) ... * projects - * [*get config*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectGetConfigCall), [*repos create*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoCreateCall), [*repos delete*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoDeleteCall), [*repos get*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoGetCall), [*repos get iam policy*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoGetIamPolicyCall), [*repos list*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoListCall), [*repos patch*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoPatchCall), [*repos set iam policy*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoSetIamPolicyCall), [*repos sync*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoSyncCall), [*repos test iam permissions*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectRepoTestIamPermissionCall) and [*update config*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/api::ProjectUpdateConfigCall) + * [*get config*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectGetConfigCall), [*repos create*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoCreateCall), [*repos delete*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoDeleteCall), [*repos get*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoGetCall), [*repos get iam policy*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoGetIamPolicyCall), [*repos list*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoListCall), [*repos patch*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoPatchCall), [*repos set iam policy*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoSetIamPolicyCall), [*repos sync*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoSyncCall), [*repos test iam permissions*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectRepoTestIamPermissionCall) and [*update config*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/api::ProjectUpdateConfigCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/CloudSourceRepositories)** +* **[Hub](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/CloudSourceRepositories)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::CallBuilder) -* **[Resources](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::CallBuilder) +* **[Resources](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Part)** + * **[Parts](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Delegate) to the -[Method Builder](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Delegate) to the +[Method Builder](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::RequestValue) and -[decodable](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::RequestValue) and +[decodable](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-sourcerepo1/5.0.2-beta-1+20230123/google_sourcerepo1/client::RequestValue) are moved +* [request values](https://docs.rs/google-sourcerepo1/5.0.2+20230123/google_sourcerepo1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/sourcerepo1/src/api.rs b/gen/sourcerepo1/src/api.rs index 6e2102b849..5992d1a46f 100644 --- a/gen/sourcerepo1/src/api.rs +++ b/gen/sourcerepo1/src/api.rs @@ -137,7 +137,7 @@ impl<'a, S> CloudSourceRepositories { CloudSourceRepositories { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://sourcerepo.googleapis.com/".to_string(), _root_url: "https://sourcerepo.googleapis.com/".to_string(), } @@ -148,7 +148,7 @@ impl<'a, S> CloudSourceRepositories { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/sourcerepo1/src/client.rs b/gen/sourcerepo1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/sourcerepo1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/sourcerepo1/src/lib.rs b/gen/sourcerepo1/src/lib.rs index 25add7356a..ec4170eb90 100644 --- a/gen/sourcerepo1/src/lib.rs +++ b/gen/sourcerepo1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Source Repositories* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *sourcerepo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Source Repositories* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *sourcerepo:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Source Repositories* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/source-repositories/docs/apis). diff --git a/gen/spectrum1_explorer-cli/Cargo.toml b/gen/spectrum1_explorer-cli/Cargo.toml index 835144a8c1..5469005a77 100644 --- a/gen/spectrum1_explorer-cli/Cargo.toml +++ b/gen/spectrum1_explorer-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-spectrum1_explorer-cli" -version = "4.0.1+20170306" +version = "5.0.2+20170306" authors = ["Sebastian Thiel "] description = "A complete library to interact with spectrum (protocol v1explorer)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/spectrum1_explorer-cli" @@ -20,13 +20,13 @@ name = "spectrum1-explorer" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-spectrum1_explorer] path = "../spectrum1_explorer" -version = "4.0.1+20170306" +version = "5.0.2+20170306" + diff --git a/gen/spectrum1_explorer-cli/README.md b/gen/spectrum1_explorer-cli/README.md index 35caf65e85..61185120d1 100644 --- a/gen/spectrum1_explorer-cli/README.md +++ b/gen/spectrum1_explorer-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *spectrum* API at revision *20170306*. The CLI is at version *4.0.1*. +This documentation was generated from the *spectrum* API at revision *20170306*. The CLI is at version *5.0.2*. ```bash spectrum1-explorer [options] diff --git a/gen/spectrum1_explorer-cli/mkdocs.yml b/gen/spectrum1_explorer-cli/mkdocs.yml index 8576877d93..0240e54fe4 100644 --- a/gen/spectrum1_explorer-cli/mkdocs.yml +++ b/gen/spectrum1_explorer-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: spectrum v4.0.1+20170306 +site_name: spectrum v5.0.2+20170306 site_url: http://byron.github.io/google-apis-rs/google-spectrum1_explorer-cli site_description: A complete library to interact with spectrum (protocol v1explorer) @@ -7,14 +7,15 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/spectrum1_explor docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['paws_get-spectrum.md', 'Paws', 'Get Spectrum'] -- ['paws_get-spectrum-batch.md', 'Paws', 'Get Spectrum Batch'] -- ['paws_init.md', 'Paws', 'Init'] -- ['paws_notify-spectrum-use.md', 'Paws', 'Notify Spectrum Use'] -- ['paws_register.md', 'Paws', 'Register'] -- ['paws_verify-device.md', 'Paws', 'Verify Device'] +nav: +- Home: 'index.md' +- 'Paws': + - 'Get Spectrum': 'paws_get-spectrum.md' + - 'Get Spectrum Batch': 'paws_get-spectrum-batch.md' + - 'Init': 'paws_init.md' + - 'Notify Spectrum Use': 'paws_notify-spectrum-use.md' + - 'Register': 'paws_register.md' + - 'Verify Device': 'paws_verify-device.md' theme: readthedocs diff --git a/gen/spectrum1_explorer-cli/src/client.rs b/gen/spectrum1_explorer-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/spectrum1_explorer-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/spectrum1_explorer-cli/src/main.rs b/gen/spectrum1_explorer-cli/src/main.rs index bbf1d7ac1e..b8a25a0e81 100644 --- a/gen/spectrum1_explorer-cli/src/main.rs +++ b/gen/spectrum1_explorer-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_spectrum1_explorer::{api, Error, oauth2}; +use google_spectrum1_explorer::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -954,7 +953,7 @@ async fn main() { let mut app = App::new("spectrum1-explorer") .author("Sebastian Thiel ") - .version("4.0.1+20170306") + .version("5.0.2+20170306") .about("API for spectrum-management functions.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_spectrum1_explorer_cli") .arg(Arg::with_name("folder") diff --git a/gen/spectrum1_explorer/Cargo.toml b/gen/spectrum1_explorer/Cargo.toml index 5401304237..bad8a11a5e 100644 --- a/gen/spectrum1_explorer/Cargo.toml +++ b/gen/spectrum1_explorer/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-spectrum1_explorer" -version = "5.0.2-beta-1+20170306" +version = "5.0.2+20170306" authors = ["Sebastian Thiel "] description = "A complete library to interact with spectrum (protocol v1explorer)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/spectrum1_explorer" homepage = "http://developers.google.com/spectrum" -documentation = "https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306" +documentation = "https://docs.rs/google-spectrum1_explorer/5.0.2+20170306" license = "MIT" keywords = ["spectrum", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/spectrum1_explorer/README.md b/gen/spectrum1_explorer/README.md index f59ad44642..6fd363d818 100644 --- a/gen/spectrum1_explorer/README.md +++ b/gen/spectrum1_explorer/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-spectrum1_explorer` library allows access to all features of the *Google spectrum* service. -This documentation was generated from *spectrum* crate version *5.0.2-beta-1+20170306*, where *20170306* is the exact revision of the *spectrum:v1explorer* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *spectrum* crate version *5.0.2+20170306*, where *20170306* is the exact revision of the *spectrum:v1explorer* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *spectrum* *v1_explorer* API can be found at the [official documentation site](http://developers.google.com/spectrum). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/Spectrum) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/Spectrum) ... * paws - * [*get spectrum*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/api::PawGetSpectrumCall), [*get spectrum batch*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/api::PawGetSpectrumBatchCall), [*init*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/api::PawInitCall), [*notify spectrum use*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/api::PawNotifySpectrumUseCall), [*register*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/api::PawRegisterCall) and [*verify device*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/api::PawVerifyDeviceCall) + * [*get spectrum*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/api::PawGetSpectrumCall), [*get spectrum batch*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/api::PawGetSpectrumBatchCall), [*init*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/api::PawInitCall), [*notify spectrum use*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/api::PawNotifySpectrumUseCall), [*register*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/api::PawRegisterCall) and [*verify device*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/api::PawVerifyDeviceCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/Spectrum)** +* **[Hub](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/Spectrum)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::CallBuilder) -* **[Resources](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::CallBuilder) +* **[Resources](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Part)** + * **[Parts](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::CallBuilder)** +* **[Activities](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Delegate) to the -[Method Builder](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Delegate) to the +[Method Builder](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::RequestValue) and -[decodable](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::RequestValue) and +[decodable](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-spectrum1_explorer/5.0.2-beta-1+20170306/google_spectrum1_explorer/client::RequestValue) are moved +* [request values](https://docs.rs/google-spectrum1_explorer/5.0.2+20170306/google_spectrum1_explorer/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/spectrum1_explorer/src/api.rs b/gen/spectrum1_explorer/src/api.rs index 17d22b3802..89c7d405b8 100644 --- a/gen/spectrum1_explorer/src/api.rs +++ b/gen/spectrum1_explorer/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> Spectrum { Spectrum { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/spectrum/v1explorer/paws/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -114,7 +114,7 @@ impl<'a, S> Spectrum { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/spectrum1_explorer/src/client.rs b/gen/spectrum1_explorer/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/spectrum1_explorer/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/spectrum1_explorer/src/lib.rs b/gen/spectrum1_explorer/src/lib.rs index eebf83831d..aa6976eb43 100644 --- a/gen/spectrum1_explorer/src/lib.rs +++ b/gen/spectrum1_explorer/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *spectrum* crate version *5.0.2-beta-1+20170306*, where *20170306* is the exact revision of the *spectrum:v1explorer* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *spectrum* crate version *5.0.2+20170306*, where *20170306* is the exact revision of the *spectrum:v1explorer* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *spectrum* *v1_explorer* API can be found at the //! [official documentation site](http://developers.google.com/spectrum). diff --git a/gen/speech1-cli/Cargo.toml b/gen/speech1-cli/Cargo.toml index d2f6bc979c..fe7725b937 100644 --- a/gen/speech1-cli/Cargo.toml +++ b/gen/speech1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-speech1-cli" -version = "4.0.1+20220221" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Speech (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/speech1-cli" @@ -20,13 +20,13 @@ name = "speech1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-speech1] path = "../speech1" -version = "4.0.1+20220221" +version = "5.0.2+20230119" + diff --git a/gen/speech1-cli/README.md b/gen/speech1-cli/README.md index adb334e3e6..72101ddcec 100644 --- a/gen/speech1-cli/README.md +++ b/gen/speech1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Speech* API at revision *20220221*. The CLI is at version *4.0.1*. +This documentation was generated from the *Speech* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash speech1 [options] diff --git a/gen/speech1-cli/mkdocs.yml b/gen/speech1-cli/mkdocs.yml index 2813ccaceb..4ae5a985b7 100644 --- a/gen/speech1-cli/mkdocs.yml +++ b/gen/speech1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Speech v4.0.1+20220221 +site_name: Speech v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-speech1-cli site_description: A complete library to interact with Speech (protocol v1) @@ -7,22 +7,25 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/speech1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['projects_locations-custom-classes-create.md', 'Projects', 'Locations Custom Classes Create'] -- ['projects_locations-custom-classes-delete.md', 'Projects', 'Locations Custom Classes Delete'] -- ['projects_locations-custom-classes-get.md', 'Projects', 'Locations Custom Classes Get'] -- ['projects_locations-custom-classes-list.md', 'Projects', 'Locations Custom Classes List'] -- ['projects_locations-custom-classes-patch.md', 'Projects', 'Locations Custom Classes Patch'] -- ['projects_locations-phrase-sets-create.md', 'Projects', 'Locations Phrase Sets Create'] -- ['projects_locations-phrase-sets-delete.md', 'Projects', 'Locations Phrase Sets Delete'] -- ['projects_locations-phrase-sets-get.md', 'Projects', 'Locations Phrase Sets Get'] -- ['projects_locations-phrase-sets-list.md', 'Projects', 'Locations Phrase Sets List'] -- ['projects_locations-phrase-sets-patch.md', 'Projects', 'Locations Phrase Sets Patch'] -- ['speech_longrunningrecognize.md', 'Speech', 'Longrunningrecognize'] -- ['speech_recognize.md', 'Speech', 'Recognize'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Projects': + - 'Locations Custom Classes Create': 'projects_locations-custom-classes-create.md' + - 'Locations Custom Classes Delete': 'projects_locations-custom-classes-delete.md' + - 'Locations Custom Classes Get': 'projects_locations-custom-classes-get.md' + - 'Locations Custom Classes List': 'projects_locations-custom-classes-list.md' + - 'Locations Custom Classes Patch': 'projects_locations-custom-classes-patch.md' + - 'Locations Phrase Sets Create': 'projects_locations-phrase-sets-create.md' + - 'Locations Phrase Sets Delete': 'projects_locations-phrase-sets-delete.md' + - 'Locations Phrase Sets Get': 'projects_locations-phrase-sets-get.md' + - 'Locations Phrase Sets List': 'projects_locations-phrase-sets-list.md' + - 'Locations Phrase Sets Patch': 'projects_locations-phrase-sets-patch.md' +- 'Speech': + - 'Longrunningrecognize': 'speech_longrunningrecognize.md' + - 'Recognize': 'speech_recognize.md' theme: readthedocs diff --git a/gen/speech1-cli/src/client.rs b/gen/speech1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/speech1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/speech1-cli/src/main.rs b/gen/speech1-cli/src/main.rs index ee8503d0ed..461ca498e7 100644 --- a/gen/speech1-cli/src/main.rs +++ b/gen/speech1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_speech1::{api, Error, oauth2}; +use google_speech1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -369,7 +368,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -459,7 +458,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -709,7 +708,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -799,7 +798,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -873,6 +872,7 @@ where match &temp_cursor.to_string()[..] { "audio.content" => Some(("audio.content", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "audio.uri" => Some(("audio.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.adaptation.abnf-grammar.abnf-strings" => Some(("config.adaptation.abnfGrammar.abnfStrings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.adaptation.phrase-set-references" => Some(("config.adaptation.phraseSetReferences", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.alternative-language-codes" => Some(("config.alternativeLanguageCodes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.audio-channel-count" => Some(("config.audioChannelCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -903,7 +903,7 @@ where "config.use-enhanced" => Some(("config.useEnhanced", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "output-config.gcs-uri" => Some(("outputConfig.gcsUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptation", "alternative-language-codes", "audio", "audio-channel-count", "audio-topic", "config", "content", "diarization-config", "enable-automatic-punctuation", "enable-separate-recognition-per-channel", "enable-speaker-diarization", "enable-spoken-emojis", "enable-spoken-punctuation", "enable-word-confidence", "enable-word-time-offsets", "encoding", "gcs-uri", "industry-naics-code-of-audio", "interaction-type", "language-code", "max-alternatives", "max-speaker-count", "metadata", "microphone-distance", "min-speaker-count", "model", "original-media-type", "original-mime-type", "output-config", "phrase-set-references", "profanity-filter", "recording-device-name", "recording-device-type", "sample-rate-hertz", "speaker-tag", "uri", "use-enhanced"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["abnf-grammar", "abnf-strings", "adaptation", "alternative-language-codes", "audio", "audio-channel-count", "audio-topic", "config", "content", "diarization-config", "enable-automatic-punctuation", "enable-separate-recognition-per-channel", "enable-speaker-diarization", "enable-spoken-emojis", "enable-spoken-punctuation", "enable-word-confidence", "enable-word-time-offsets", "encoding", "gcs-uri", "industry-naics-code-of-audio", "interaction-type", "language-code", "max-alternatives", "max-speaker-count", "metadata", "microphone-distance", "min-speaker-count", "model", "original-media-type", "original-mime-type", "output-config", "phrase-set-references", "profanity-filter", "recording-device-name", "recording-device-type", "sample-rate-hertz", "speaker-tag", "uri", "use-enhanced"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -988,6 +988,7 @@ where match &temp_cursor.to_string()[..] { "audio.content" => Some(("audio.content", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "audio.uri" => Some(("audio.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "config.adaptation.abnf-grammar.abnf-strings" => Some(("config.adaptation.abnfGrammar.abnfStrings", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.adaptation.phrase-set-references" => Some(("config.adaptation.phraseSetReferences", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.alternative-language-codes" => Some(("config.alternativeLanguageCodes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config.audio-channel-count" => Some(("config.audioChannelCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -1017,7 +1018,7 @@ where "config.sample-rate-hertz" => Some(("config.sampleRateHertz", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "config.use-enhanced" => Some(("config.useEnhanced", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["adaptation", "alternative-language-codes", "audio", "audio-channel-count", "audio-topic", "config", "content", "diarization-config", "enable-automatic-punctuation", "enable-separate-recognition-per-channel", "enable-speaker-diarization", "enable-spoken-emojis", "enable-spoken-punctuation", "enable-word-confidence", "enable-word-time-offsets", "encoding", "industry-naics-code-of-audio", "interaction-type", "language-code", "max-alternatives", "max-speaker-count", "metadata", "microphone-distance", "min-speaker-count", "model", "original-media-type", "original-mime-type", "phrase-set-references", "profanity-filter", "recording-device-name", "recording-device-type", "sample-rate-hertz", "speaker-tag", "uri", "use-enhanced"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["abnf-grammar", "abnf-strings", "adaptation", "alternative-language-codes", "audio", "audio-channel-count", "audio-topic", "config", "content", "diarization-config", "enable-automatic-punctuation", "enable-separate-recognition-per-channel", "enable-speaker-diarization", "enable-spoken-emojis", "enable-spoken-punctuation", "enable-word-confidence", "enable-word-time-offsets", "encoding", "industry-naics-code-of-audio", "interaction-type", "language-code", "max-alternatives", "max-speaker-count", "metadata", "microphone-distance", "min-speaker-count", "model", "original-media-type", "original-mime-type", "phrase-set-references", "profanity-filter", "recording-device-name", "recording-device-type", "sample-rate-hertz", "speaker-tag", "uri", "use-enhanced"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1269,7 +1270,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The parent resource where this custom class will be created. Format: `projects/{project}/locations/{location}/customClasses` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), + Some(r##"Required. The parent resource where this custom class will be created. Format: `projects/{project}/locations/{location}/customClasses` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](https://cloud.google.com/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), Some(true), Some(false)), @@ -1297,7 +1298,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the custom class to delete. Format: `projects/{project}/locations/{location}/customClasses/{custom_class}` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), + Some(r##"Required. The name of the custom class to delete. Format: `projects/{project}/locations/{location}/customClasses/{custom_class}` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](https://cloud.google.com/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), Some(true), Some(false)), @@ -1341,7 +1342,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The parent, which owns this collection of custom classes. Format: `projects/{project}/locations/{location}/customClasses` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), + Some(r##"Required. The parent, which owns this collection of custom classes. Format: `projects/{project}/locations/{location}/customClasses` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](https://cloud.google.com/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), Some(true), Some(false)), @@ -1391,7 +1392,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The parent resource where this phrase set will be created. Format: `projects/{project}/locations/{location}/phraseSets` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), + Some(r##"Required. The parent resource where this phrase set will be created. Format: `projects/{project}/locations/{location}/phraseSets` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](https://cloud.google.com/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), Some(true), Some(false)), @@ -1441,7 +1442,7 @@ async fn main() { vec![ (Some(r##"name"##), None, - Some(r##"Required. The name of the phrase set to retrieve. Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), + Some(r##"Required. The name of the phrase set to retrieve. Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](https://cloud.google.com/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), Some(true), Some(false)), @@ -1463,7 +1464,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"Required. The parent, which owns this collection of phrase set. Format: `projects/{project}/locations/{location}` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), + Some(r##"Required. The parent, which owns this collection of phrase set. Format: `projects/{project}/locations/{location}` Speech-to-Text supports three locations: `global`, `us` (US North America), and `eu` (Europe). If you are calling the `speech.googleapis.com` endpoint, use the `global` location. To specify a region, use a [regional endpoint](https://cloud.google.com/speech-to-text/docs/endpoints) with matching `us` or `eu` location value."##), Some(true), Some(false)), @@ -1560,7 +1561,7 @@ async fn main() { let mut app = App::new("speech1") .author("Sebastian Thiel ") - .version("4.0.1+20220221") + .version("5.0.2+20230119") .about("Converts audio to text by applying powerful neural network models.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_speech1_cli") .arg(Arg::with_name("url") diff --git a/gen/speech1/Cargo.toml b/gen/speech1/Cargo.toml index 80ba194d1e..d234fbce83 100644 --- a/gen/speech1/Cargo.toml +++ b/gen/speech1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-speech1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Speech (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/speech1" homepage = "https://cloud.google.com/speech-to-text/docs/quickstart-protocol" -documentation = "https://docs.rs/google-speech1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-speech1/5.0.2+20230119" license = "MIT" keywords = ["speech", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/speech1/README.md b/gen/speech1/README.md index 6748b91295..1c87dbaada 100644 --- a/gen/speech1/README.md +++ b/gen/speech1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-speech1` library allows access to all features of the *Google Speech* service. -This documentation was generated from *Speech* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *speech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Speech* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *speech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Speech* *v1* API can be found at the [official documentation site](https://cloud.google.com/speech-to-text/docs/quickstart-protocol). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/Speech) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/Speech) ... -* [operations](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::Operation) - * [*get*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::OperationGetCall) and [*list*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::OperationListCall) +* [operations](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::Operation) + * [*get*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::OperationGetCall) and [*list*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::OperationListCall) * projects - * [*locations custom classes create*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationCustomClassCreateCall), [*locations custom classes delete*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationCustomClassDeleteCall), [*locations custom classes get*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationCustomClassGetCall), [*locations custom classes list*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationCustomClassListCall), [*locations custom classes patch*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationCustomClassPatchCall), [*locations phrase sets create*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationPhraseSetCreateCall), [*locations phrase sets delete*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationPhraseSetDeleteCall), [*locations phrase sets get*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationPhraseSetGetCall), [*locations phrase sets list*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationPhraseSetListCall) and [*locations phrase sets patch*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::ProjectLocationPhraseSetPatchCall) + * [*locations custom classes create*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationCustomClassCreateCall), [*locations custom classes delete*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationCustomClassDeleteCall), [*locations custom classes get*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationCustomClassGetCall), [*locations custom classes list*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationCustomClassListCall), [*locations custom classes patch*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationCustomClassPatchCall), [*locations phrase sets create*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationPhraseSetCreateCall), [*locations phrase sets delete*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationPhraseSetDeleteCall), [*locations phrase sets get*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationPhraseSetGetCall), [*locations phrase sets list*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationPhraseSetListCall) and [*locations phrase sets patch*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::ProjectLocationPhraseSetPatchCall) * speech - * [*longrunningrecognize*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::SpeechLongrunningrecognizeCall) and [*recognize*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/api::SpeechRecognizeCall) + * [*longrunningrecognize*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::SpeechLongrunningrecognizeCall) and [*recognize*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/api::SpeechRecognizeCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/Speech)** +* **[Hub](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/Speech)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::CallBuilder) -* **[Resources](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::CallBuilder) +* **[Resources](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Part)** + * **[Parts](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Delegate) to the -[Method Builder](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Delegate) to the +[Method Builder](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::RequestValue) and -[decodable](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::RequestValue) and +[decodable](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-speech1/5.0.2-beta-1+20230119/google_speech1/client::RequestValue) are moved +* [request values](https://docs.rs/google-speech1/5.0.2+20230119/google_speech1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/speech1/src/api.rs b/gen/speech1/src/api.rs index 27eb322c87..e275b67313 100644 --- a/gen/speech1/src/api.rs +++ b/gen/speech1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> Speech { Speech { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://speech.googleapis.com/".to_string(), _root_url: "https://speech.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Speech { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/speech1/src/client.rs b/gen/speech1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/speech1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/speech1/src/lib.rs b/gen/speech1/src/lib.rs index 76cb780a0d..d6c3214b5d 100644 --- a/gen/speech1/src/lib.rs +++ b/gen/speech1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Speech* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *speech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Speech* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *speech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Speech* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/speech-to-text/docs/quickstart-protocol). diff --git a/gen/speech1_beta1-cli/Cargo.toml b/gen/speech1_beta1-cli/Cargo.toml index a675167ad4..1facbf8d65 100644 --- a/gen/speech1_beta1-cli/Cargo.toml +++ b/gen/speech1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-speech1_beta1-cli" -version = "4.0.1+20181005" +version = "5.0.2+20181005" authors = ["Sebastian Thiel "] description = "A complete library to interact with Speech (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/speech1_beta1-cli" @@ -20,13 +20,13 @@ name = "speech1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-speech1_beta1] path = "../speech1_beta1" -version = "4.0.1+20181005" +version = "5.0.2+20181005" + diff --git a/gen/speech1_beta1-cli/README.md b/gen/speech1_beta1-cli/README.md index 37608b9184..3d7e601ac0 100644 --- a/gen/speech1_beta1-cli/README.md +++ b/gen/speech1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Speech* API at revision *20181005*. The CLI is at version *4.0.1*. +This documentation was generated from the *Speech* API at revision *20181005*. The CLI is at version *5.0.2*. ```bash speech1-beta1 [options] diff --git a/gen/speech1_beta1-cli/mkdocs.yml b/gen/speech1_beta1-cli/mkdocs.yml index 37c1f2b103..0bd1057c1e 100644 --- a/gen/speech1_beta1-cli/mkdocs.yml +++ b/gen/speech1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Speech v4.0.1+20181005 +site_name: Speech v5.0.2+20181005 site_url: http://byron.github.io/google-apis-rs/google-speech1_beta1-cli site_description: A complete library to interact with Speech (protocol v1beta1) @@ -7,12 +7,14 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/speech1_beta1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['speech_asyncrecognize.md', 'Speech', 'Asyncrecognize'] -- ['speech_syncrecognize.md', 'Speech', 'Syncrecognize'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Speech': + - 'Asyncrecognize': 'speech_asyncrecognize.md' + - 'Syncrecognize': 'speech_syncrecognize.md' theme: readthedocs diff --git a/gen/speech1_beta1-cli/src/client.rs b/gen/speech1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/speech1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/speech1_beta1-cli/src/main.rs b/gen/speech1_beta1-cli/src/main.rs index 3951d196cd..365bdce86a 100644 --- a/gen/speech1_beta1-cli/src/main.rs +++ b/gen/speech1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_speech1_beta1::{api, Error, oauth2}; +use google_speech1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "name" => { call = call.name(value.unwrap_or("")); @@ -567,7 +566,7 @@ async fn main() { let mut app = App::new("speech1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20181005") + .version("5.0.2+20181005") .about("Converts audio to text by applying powerful neural network models.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_speech1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/speech1_beta1/Cargo.toml b/gen/speech1_beta1/Cargo.toml index c1f7f1d83c..926c017237 100644 --- a/gen/speech1_beta1/Cargo.toml +++ b/gen/speech1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-speech1_beta1" -version = "5.0.2-beta-1+20181005" +version = "5.0.2+20181005" authors = ["Sebastian Thiel "] description = "A complete library to interact with Speech (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/speech1_beta1" homepage = "https://cloud.google.com/speech-to-text/docs/quickstart-protocol" -documentation = "https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005" +documentation = "https://docs.rs/google-speech1_beta1/5.0.2+20181005" license = "MIT" keywords = ["speech", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/speech1_beta1/README.md b/gen/speech1_beta1/README.md index a113cebdca..e80092bf4d 100644 --- a/gen/speech1_beta1/README.md +++ b/gen/speech1_beta1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-speech1_beta1` library allows access to all features of the *Google Speech* service. -This documentation was generated from *Speech* crate version *5.0.2-beta-1+20181005*, where *20181005* is the exact revision of the *speech:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Speech* crate version *5.0.2+20181005*, where *20181005* is the exact revision of the *speech:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Speech* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/speech-to-text/docs/quickstart-protocol). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/Speech) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/Speech) ... -* [operations](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/api::Operation) - * [*get*](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/api::OperationGetCall) and [*list*](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/api::OperationListCall) +* [operations](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/api::Operation) + * [*get*](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/api::OperationGetCall) and [*list*](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/api::OperationListCall) * speech - * [*asyncrecognize*](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/api::SpeechAsyncrecognizeCall) and [*syncrecognize*](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/api::SpeechSyncrecognizeCall) + * [*asyncrecognize*](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/api::SpeechAsyncrecognizeCall) and [*syncrecognize*](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/api::SpeechSyncrecognizeCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/Speech)** +* **[Hub](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/Speech)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -126,17 +126,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -146,29 +146,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-speech1_beta1/5.0.2-beta-1+20181005/google_speech1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-speech1_beta1/5.0.2+20181005/google_speech1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/speech1_beta1/src/api.rs b/gen/speech1_beta1/src/api.rs index b2d2fde4ba..4f0d75bbf0 100644 --- a/gen/speech1_beta1/src/api.rs +++ b/gen/speech1_beta1/src/api.rs @@ -123,7 +123,7 @@ impl<'a, S> Speech { Speech { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://speech.googleapis.com/".to_string(), _root_url: "https://speech.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Speech { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/speech1_beta1/src/client.rs b/gen/speech1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/speech1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/speech1_beta1/src/lib.rs b/gen/speech1_beta1/src/lib.rs index 6271cdf873..c9ba9a2233 100644 --- a/gen/speech1_beta1/src/lib.rs +++ b/gen/speech1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Speech* crate version *5.0.2-beta-1+20181005*, where *20181005* is the exact revision of the *speech:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Speech* crate version *5.0.2+20181005*, where *20181005* is the exact revision of the *speech:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Speech* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/speech-to-text/docs/quickstart-protocol). diff --git a/gen/sql1_beta4-cli/Cargo.toml b/gen/sql1_beta4-cli/Cargo.toml index 419d3237a1..be2d3ef394 100644 --- a/gen/sql1_beta4-cli/Cargo.toml +++ b/gen/sql1_beta4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-sql1_beta4-cli" -version = "4.0.1+20200331" +version = "5.0.2+20200331" authors = ["Sebastian Thiel "] description = "A complete library to interact with SQL Admin (protocol v1beta4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sql1_beta4-cli" @@ -20,13 +20,13 @@ name = "sql1-beta4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-sql1_beta4] path = "../sql1_beta4" -version = "4.0.1+20200331" +version = "5.0.2+20200331" + diff --git a/gen/sql1_beta4-cli/README.md b/gen/sql1_beta4-cli/README.md index 34f7f5ae94..e78bb08aa2 100644 --- a/gen/sql1_beta4-cli/README.md +++ b/gen/sql1_beta4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *SQL Admin* API at revision *20200331*. The CLI is at version *4.0.1*. +This documentation was generated from the *SQL Admin* API at revision *20200331*. The CLI is at version *5.0.2*. ```bash sql1-beta4 [options] diff --git a/gen/sql1_beta4-cli/mkdocs.yml b/gen/sql1_beta4-cli/mkdocs.yml index 759854cc20..6ac887f6b8 100644 --- a/gen/sql1_beta4-cli/mkdocs.yml +++ b/gen/sql1_beta4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: SQL Admin v4.0.1+20200331 +site_name: SQL Admin v5.0.2+20200331 site_url: http://byron.github.io/google-apis-rs/google-sql1_beta4-cli site_description: A complete library to interact with SQL Admin (protocol v1beta4) @@ -7,55 +7,64 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/sql1_beta4-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['backup-runs_delete.md', 'Backup Runs', 'Delete'] -- ['backup-runs_get.md', 'Backup Runs', 'Get'] -- ['backup-runs_insert.md', 'Backup Runs', 'Insert'] -- ['backup-runs_list.md', 'Backup Runs', 'List'] -- ['databases_delete.md', 'Databases', 'Delete'] -- ['databases_get.md', 'Databases', 'Get'] -- ['databases_insert.md', 'Databases', 'Insert'] -- ['databases_list.md', 'Databases', 'List'] -- ['databases_patch.md', 'Databases', 'Patch'] -- ['databases_update.md', 'Databases', 'Update'] -- ['flags_list.md', 'Flags', 'List'] -- ['instances_add-server-ca.md', 'Instances', 'Add Server Ca'] -- ['instances_clone.md', 'Instances', 'Clone'] -- ['instances_delete.md', 'Instances', 'Delete'] -- ['instances_demote-master.md', 'Instances', 'Demote Master'] -- ['instances_export.md', 'Instances', 'Export'] -- ['instances_failover.md', 'Instances', 'Failover'] -- ['instances_get.md', 'Instances', 'Get'] -- ['instances_import.md', 'Instances', 'Import'] -- ['instances_insert.md', 'Instances', 'Insert'] -- ['instances_list.md', 'Instances', 'List'] -- ['instances_list-server-cas.md', 'Instances', 'List Server Cas'] -- ['instances_patch.md', 'Instances', 'Patch'] -- ['instances_promote-replica.md', 'Instances', 'Promote Replica'] -- ['instances_reset-ssl-config.md', 'Instances', 'Reset Ssl Config'] -- ['instances_restart.md', 'Instances', 'Restart'] -- ['instances_restore-backup.md', 'Instances', 'Restore Backup'] -- ['instances_rotate-server-ca.md', 'Instances', 'Rotate Server Ca'] -- ['instances_start-replica.md', 'Instances', 'Start Replica'] -- ['instances_stop-replica.md', 'Instances', 'Stop Replica'] -- ['instances_truncate-log.md', 'Instances', 'Truncate Log'] -- ['instances_update.md', 'Instances', 'Update'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['projects_instances-reschedule-maintenance.md', 'Projects', 'Instances Reschedule Maintenance'] -- ['projects_instances-start-external-sync.md', 'Projects', 'Instances Start External Sync'] -- ['projects_instances-verify-external-sync-settings.md', 'Projects', 'Instances Verify External Sync Settings'] -- ['ssl-certs_create-ephemeral.md', 'Ssl Certs', 'Create Ephemeral'] -- ['ssl-certs_delete.md', 'Ssl Certs', 'Delete'] -- ['ssl-certs_get.md', 'Ssl Certs', 'Get'] -- ['ssl-certs_insert.md', 'Ssl Certs', 'Insert'] -- ['ssl-certs_list.md', 'Ssl Certs', 'List'] -- ['tiers_list.md', 'Tiers', 'List'] -- ['users_delete.md', 'Users', 'Delete'] -- ['users_insert.md', 'Users', 'Insert'] -- ['users_list.md', 'Users', 'List'] -- ['users_update.md', 'Users', 'Update'] +nav: +- Home: 'index.md' +- 'Backup Runs': + - 'Delete': 'backup-runs_delete.md' + - 'Get': 'backup-runs_get.md' + - 'Insert': 'backup-runs_insert.md' + - 'List': 'backup-runs_list.md' +- 'Databases': + - 'Delete': 'databases_delete.md' + - 'Get': 'databases_get.md' + - 'Insert': 'databases_insert.md' + - 'List': 'databases_list.md' + - 'Patch': 'databases_patch.md' + - 'Update': 'databases_update.md' +- 'Flags': + - 'List': 'flags_list.md' +- 'Instances': + - 'Add Server Ca': 'instances_add-server-ca.md' + - 'Clone': 'instances_clone.md' + - 'Delete': 'instances_delete.md' + - 'Demote Master': 'instances_demote-master.md' + - 'Export': 'instances_export.md' + - 'Failover': 'instances_failover.md' + - 'Get': 'instances_get.md' + - 'Import': 'instances_import.md' + - 'Insert': 'instances_insert.md' + - 'List': 'instances_list.md' + - 'List Server Cas': 'instances_list-server-cas.md' + - 'Patch': 'instances_patch.md' + - 'Promote Replica': 'instances_promote-replica.md' + - 'Reset Ssl Config': 'instances_reset-ssl-config.md' + - 'Restart': 'instances_restart.md' + - 'Restore Backup': 'instances_restore-backup.md' + - 'Rotate Server Ca': 'instances_rotate-server-ca.md' + - 'Start Replica': 'instances_start-replica.md' + - 'Stop Replica': 'instances_stop-replica.md' + - 'Truncate Log': 'instances_truncate-log.md' + - 'Update': 'instances_update.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Projects': + - 'Instances Reschedule Maintenance': 'projects_instances-reschedule-maintenance.md' + - 'Instances Start External Sync': 'projects_instances-start-external-sync.md' + - 'Instances Verify External Sync Settings': 'projects_instances-verify-external-sync-settings.md' +- 'Ssl Certs': + - 'Create Ephemeral': 'ssl-certs_create-ephemeral.md' + - 'Delete': 'ssl-certs_delete.md' + - 'Get': 'ssl-certs_get.md' + - 'Insert': 'ssl-certs_insert.md' + - 'List': 'ssl-certs_list.md' +- 'Tiers': + - 'List': 'tiers_list.md' +- 'Users': + - 'Delete': 'users_delete.md' + - 'Insert': 'users_insert.md' + - 'List': 'users_list.md' + - 'Update': 'users_update.md' theme: readthedocs diff --git a/gen/sql1_beta4-cli/src/client.rs b/gen/sql1_beta4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/sql1_beta4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/sql1_beta4-cli/src/main.rs b/gen/sql1_beta4-cli/src/main.rs index 16c0d57b43..69f8cfe86e 100644 --- a/gen/sql1_beta4-cli/src/main.rs +++ b/gen/sql1_beta4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_sql1_beta4::{api, Error, oauth2}; +use google_sql1_beta4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -268,7 +267,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1609,7 +1608,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2645,7 +2644,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "instance" => { call = call.instance(value.unwrap_or("")); @@ -2846,7 +2845,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "verify-connection-only" => { - call = call.verify_connection_only(arg_from_str(value.unwrap_or("false"), err, "verify-connection-only", "boolean")); + call = call.verify_connection_only( value.map(|v| arg_from_str(v, err, "verify-connection-only", "boolean")).unwrap_or(false)); }, "sync-mode" => { call = call.sync_mode(value.unwrap_or("")); @@ -5390,7 +5389,7 @@ async fn main() { let mut app = App::new("sql1-beta4") .author("Sebastian Thiel ") - .version("4.0.1+20200331") + .version("5.0.2+20200331") .about("API for Cloud SQL database instance management") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli") .arg(Arg::with_name("url") diff --git a/gen/sql1_beta4/Cargo.toml b/gen/sql1_beta4/Cargo.toml index 42bf177f58..c8e9668f2c 100644 --- a/gen/sql1_beta4/Cargo.toml +++ b/gen/sql1_beta4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-sql1_beta4" -version = "5.0.2-beta-1+20200331" +version = "5.0.2+20200331" authors = ["Sebastian Thiel "] description = "A complete library to interact with SQL Admin (protocol v1beta4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sql1_beta4" homepage = "https://developers.google.com/cloud-sql/" -documentation = "https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331" +documentation = "https://docs.rs/google-sql1_beta4/5.0.2+20200331" license = "MIT" keywords = ["sql", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/sql1_beta4/README.md b/gen/sql1_beta4/README.md index d94f8a7a74..339351a79d 100644 --- a/gen/sql1_beta4/README.md +++ b/gen/sql1_beta4/README.md @@ -5,32 +5,32 @@ DO NOT EDIT ! --> The `google-sql1_beta4` library allows access to all features of the *Google SQL Admin* service. -This documentation was generated from *SQL Admin* crate version *5.0.2-beta-1+20200331*, where *20200331* is the exact revision of the *sql:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *SQL Admin* crate version *5.0.2+20200331*, where *20200331* is the exact revision of the *sql:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *SQL Admin* *v1_beta4* API can be found at the [official documentation site](https://developers.google.com/cloud-sql/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/SQLAdmin) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/SQLAdmin) ... -* [backup runs](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::BackupRun) - * [*delete*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::BackupRunDeleteCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::BackupRunGetCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::BackupRunInsertCall) and [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::BackupRunListCall) -* [databases](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::Database) - * [*delete*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::DatabaseDeleteCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::DatabaseGetCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::DatabaseInsertCall), [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::DatabaseListCall), [*patch*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::DatabasePatchCall) and [*update*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::DatabaseUpdateCall) -* [flags](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::Flag) - * [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::FlagListCall) +* [backup runs](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::BackupRun) + * [*delete*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::BackupRunDeleteCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::BackupRunGetCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::BackupRunInsertCall) and [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::BackupRunListCall) +* [databases](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::Database) + * [*delete*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::DatabaseDeleteCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::DatabaseGetCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::DatabaseInsertCall), [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::DatabaseListCall), [*patch*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::DatabasePatchCall) and [*update*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::DatabaseUpdateCall) +* [flags](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::Flag) + * [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::FlagListCall) * instances - * [*add server ca*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceAddServerCaCall), [*clone*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceCloneCall), [*delete*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceDeleteCall), [*demote master*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceDemoteMasterCall), [*export*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceExportCall), [*failover*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceFailoverCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceGetCall), [*import*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceImportCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceInsertCall), [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceListCall), [*list server cas*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceListServerCaCall), [*patch*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstancePatchCall), [*promote replica*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstancePromoteReplicaCall), [*reset ssl config*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceResetSslConfigCall), [*restart*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceRestartCall), [*restore backup*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceRestoreBackupCall), [*rotate server ca*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceRotateServerCaCall), [*start replica*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceStartReplicaCall), [*stop replica*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceStopReplicaCall), [*truncate log*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceTruncateLogCall) and [*update*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::InstanceUpdateCall) -* [operations](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::Operation) - * [*get*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::OperationGetCall) and [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::OperationListCall) + * [*add server ca*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceAddServerCaCall), [*clone*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceCloneCall), [*delete*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceDeleteCall), [*demote master*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceDemoteMasterCall), [*export*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceExportCall), [*failover*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceFailoverCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceGetCall), [*import*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceImportCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceInsertCall), [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceListCall), [*list server cas*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceListServerCaCall), [*patch*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstancePatchCall), [*promote replica*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstancePromoteReplicaCall), [*reset ssl config*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceResetSslConfigCall), [*restart*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceRestartCall), [*restore backup*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceRestoreBackupCall), [*rotate server ca*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceRotateServerCaCall), [*start replica*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceStartReplicaCall), [*stop replica*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceStopReplicaCall), [*truncate log*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceTruncateLogCall) and [*update*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::InstanceUpdateCall) +* [operations](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::Operation) + * [*get*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::OperationGetCall) and [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::OperationListCall) * projects - * [*instances reschedule maintenance*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::ProjectInstanceRescheduleMaintenanceCall), [*instances start external sync*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::ProjectInstanceStartExternalSyncCall) and [*instances verify external sync settings*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::ProjectInstanceVerifyExternalSyncSettingCall) -* [ssl certs](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::SslCert) - * [*create ephemeral*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::SslCertCreateEphemeralCall), [*delete*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::SslCertDeleteCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::SslCertGetCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::SslCertInsertCall) and [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::SslCertListCall) -* [tiers](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::Tier) - * [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::TierListCall) -* [users](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::User) - * [*delete*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::UserDeleteCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::UserInsertCall), [*list*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::UserListCall) and [*update*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/api::UserUpdateCall) + * [*instances reschedule maintenance*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::ProjectInstanceRescheduleMaintenanceCall), [*instances start external sync*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::ProjectInstanceStartExternalSyncCall) and [*instances verify external sync settings*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::ProjectInstanceVerifyExternalSyncSettingCall) +* [ssl certs](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::SslCert) + * [*create ephemeral*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::SslCertCreateEphemeralCall), [*delete*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::SslCertDeleteCall), [*get*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::SslCertGetCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::SslCertInsertCall) and [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::SslCertListCall) +* [tiers](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::Tier) + * [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::TierListCall) +* [users](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::User) + * [*delete*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::UserDeleteCall), [*insert*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::UserInsertCall), [*list*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::UserListCall) and [*update*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/api::UserUpdateCall) @@ -39,17 +39,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/SQLAdmin)** +* **[Hub](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/SQLAdmin)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::CallBuilder) -* **[Resources](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::CallBuilder) +* **[Resources](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Part)** + * **[Parts](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -173,17 +173,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -193,29 +193,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Delegate) to the -[Method Builder](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Delegate) to the +[Method Builder](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::RequestValue) and -[decodable](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::RequestValue) and +[decodable](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-sql1_beta4/5.0.2-beta-1+20200331/google_sql1_beta4/client::RequestValue) are moved +* [request values](https://docs.rs/google-sql1_beta4/5.0.2+20200331/google_sql1_beta4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/sql1_beta4/src/api.rs b/gen/sql1_beta4/src/api.rs index f2ae5646d6..f883280655 100644 --- a/gen/sql1_beta4/src/api.rs +++ b/gen/sql1_beta4/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> SQLAdmin { SQLAdmin { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://sqladmin.googleapis.com/".to_string(), _root_url: "https://sqladmin.googleapis.com/".to_string(), } @@ -166,7 +166,7 @@ impl<'a, S> SQLAdmin { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/sql1_beta4/src/client.rs b/gen/sql1_beta4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/sql1_beta4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/sql1_beta4/src/lib.rs b/gen/sql1_beta4/src/lib.rs index 071f004c7f..9ddab584cf 100644 --- a/gen/sql1_beta4/src/lib.rs +++ b/gen/sql1_beta4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *SQL Admin* crate version *5.0.2-beta-1+20200331*, where *20200331* is the exact revision of the *sql:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *SQL Admin* crate version *5.0.2+20200331*, where *20200331* is the exact revision of the *sql:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *SQL Admin* *v1_beta4* API can be found at the //! [official documentation site](https://developers.google.com/cloud-sql/). diff --git a/gen/sqladmin1-cli/Cargo.toml b/gen/sqladmin1-cli/Cargo.toml index bae58acc8e..f9e31c2d95 100644 --- a/gen/sqladmin1-cli/Cargo.toml +++ b/gen/sqladmin1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-sqladmin1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20221209" authors = ["Sebastian Thiel "] description = "A complete library to interact with SQL Admin (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sqladmin1-cli" @@ -20,13 +20,13 @@ name = "sqladmin1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-sqladmin1] path = "../sqladmin1" -version = "4.0.1+20220226" +version = "5.0.2+20221209" + diff --git a/gen/sqladmin1-cli/README.md b/gen/sqladmin1-cli/README.md index b62caa5290..14b45ece5d 100644 --- a/gen/sqladmin1-cli/README.md +++ b/gen/sqladmin1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *SQL Admin* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *SQL Admin* API at revision *20221209*. The CLI is at version *5.0.2*. ```bash sqladmin1 [options] @@ -85,6 +85,7 @@ sqladmin1 [options] list [-p ]... [-o ] users delete [-p ]... [-o ] + get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] update (-r )... [-p ]... [-o ] diff --git a/gen/sqladmin1-cli/mkdocs.yml b/gen/sqladmin1-cli/mkdocs.yml index d2419bc430..0704f58089 100644 --- a/gen/sqladmin1-cli/mkdocs.yml +++ b/gen/sqladmin1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: SQL Admin v4.0.1+20220226 +site_name: SQL Admin v5.0.2+20221209 site_url: http://byron.github.io/google-apis-rs/google-sqladmin1-cli site_description: A complete library to interact with SQL Admin (protocol v1) @@ -7,57 +7,68 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/sqladmin1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['backup-runs_delete.md', 'Backup Runs', 'Delete'] -- ['backup-runs_get.md', 'Backup Runs', 'Get'] -- ['backup-runs_insert.md', 'Backup Runs', 'Insert'] -- ['backup-runs_list.md', 'Backup Runs', 'List'] -- ['connect_generate-ephemeral.md', 'Connect', 'Generate Ephemeral'] -- ['connect_get.md', 'Connect', 'Get'] -- ['databases_delete.md', 'Databases', 'Delete'] -- ['databases_get.md', 'Databases', 'Get'] -- ['databases_insert.md', 'Databases', 'Insert'] -- ['databases_list.md', 'Databases', 'List'] -- ['databases_patch.md', 'Databases', 'Patch'] -- ['databases_update.md', 'Databases', 'Update'] -- ['flags_list.md', 'Flags', 'List'] -- ['instances_add-server-ca.md', 'Instances', 'Add Server Ca'] -- ['instances_clone.md', 'Instances', 'Clone'] -- ['instances_delete.md', 'Instances', 'Delete'] -- ['instances_demote-master.md', 'Instances', 'Demote Master'] -- ['instances_export.md', 'Instances', 'Export'] -- ['instances_failover.md', 'Instances', 'Failover'] -- ['instances_get.md', 'Instances', 'Get'] -- ['instances_import.md', 'Instances', 'Import'] -- ['instances_insert.md', 'Instances', 'Insert'] -- ['instances_list.md', 'Instances', 'List'] -- ['instances_list-server-cas.md', 'Instances', 'List Server Cas'] -- ['instances_patch.md', 'Instances', 'Patch'] -- ['instances_promote-replica.md', 'Instances', 'Promote Replica'] -- ['instances_reset-ssl-config.md', 'Instances', 'Reset Ssl Config'] -- ['instances_restart.md', 'Instances', 'Restart'] -- ['instances_restore-backup.md', 'Instances', 'Restore Backup'] -- ['instances_rotate-server-ca.md', 'Instances', 'Rotate Server Ca'] -- ['instances_start-replica.md', 'Instances', 'Start Replica'] -- ['instances_stop-replica.md', 'Instances', 'Stop Replica'] -- ['instances_truncate-log.md', 'Instances', 'Truncate Log'] -- ['instances_update.md', 'Instances', 'Update'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['projects_instances-reschedule-maintenance.md', 'Projects', 'Instances Reschedule Maintenance'] -- ['projects_instances-start-external-sync.md', 'Projects', 'Instances Start External Sync'] -- ['projects_instances-verify-external-sync-settings.md', 'Projects', 'Instances Verify External Sync Settings'] -- ['ssl-certs_create-ephemeral.md', 'Ssl Certs', 'Create Ephemeral'] -- ['ssl-certs_delete.md', 'Ssl Certs', 'Delete'] -- ['ssl-certs_get.md', 'Ssl Certs', 'Get'] -- ['ssl-certs_insert.md', 'Ssl Certs', 'Insert'] -- ['ssl-certs_list.md', 'Ssl Certs', 'List'] -- ['tiers_list.md', 'Tiers', 'List'] -- ['users_delete.md', 'Users', 'Delete'] -- ['users_insert.md', 'Users', 'Insert'] -- ['users_list.md', 'Users', 'List'] -- ['users_update.md', 'Users', 'Update'] +nav: +- Home: 'index.md' +- 'Backup Runs': + - 'Delete': 'backup-runs_delete.md' + - 'Get': 'backup-runs_get.md' + - 'Insert': 'backup-runs_insert.md' + - 'List': 'backup-runs_list.md' +- 'Connect': + - 'Generate Ephemeral': 'connect_generate-ephemeral.md' + - 'Get': 'connect_get.md' +- 'Databases': + - 'Delete': 'databases_delete.md' + - 'Get': 'databases_get.md' + - 'Insert': 'databases_insert.md' + - 'List': 'databases_list.md' + - 'Patch': 'databases_patch.md' + - 'Update': 'databases_update.md' +- 'Flags': + - 'List': 'flags_list.md' +- 'Instances': + - 'Add Server Ca': 'instances_add-server-ca.md' + - 'Clone': 'instances_clone.md' + - 'Delete': 'instances_delete.md' + - 'Demote Master': 'instances_demote-master.md' + - 'Export': 'instances_export.md' + - 'Failover': 'instances_failover.md' + - 'Get': 'instances_get.md' + - 'Import': 'instances_import.md' + - 'Insert': 'instances_insert.md' + - 'List': 'instances_list.md' + - 'List Server Cas': 'instances_list-server-cas.md' + - 'Patch': 'instances_patch.md' + - 'Promote Replica': 'instances_promote-replica.md' + - 'Reset Ssl Config': 'instances_reset-ssl-config.md' + - 'Restart': 'instances_restart.md' + - 'Restore Backup': 'instances_restore-backup.md' + - 'Rotate Server Ca': 'instances_rotate-server-ca.md' + - 'Start Replica': 'instances_start-replica.md' + - 'Stop Replica': 'instances_stop-replica.md' + - 'Truncate Log': 'instances_truncate-log.md' + - 'Update': 'instances_update.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Projects': + - 'Instances Reschedule Maintenance': 'projects_instances-reschedule-maintenance.md' + - 'Instances Start External Sync': 'projects_instances-start-external-sync.md' + - 'Instances Verify External Sync Settings': 'projects_instances-verify-external-sync-settings.md' +- 'Ssl Certs': + - 'Create Ephemeral': 'ssl-certs_create-ephemeral.md' + - 'Delete': 'ssl-certs_delete.md' + - 'Get': 'ssl-certs_get.md' + - 'Insert': 'ssl-certs_insert.md' + - 'List': 'ssl-certs_list.md' +- 'Tiers': + - 'List': 'tiers_list.md' +- 'Users': + - 'Delete': 'users_delete.md' + - 'Get': 'users_get.md' + - 'Insert': 'users_insert.md' + - 'List': 'users_list.md' + - 'Update': 'users_update.md' theme: readthedocs diff --git a/gen/sqladmin1-cli/src/client.rs b/gen/sqladmin1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/sqladmin1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/sqladmin1-cli/src/main.rs b/gen/sqladmin1-cli/src/main.rs index e91df62af4..12c6fea56e 100644 --- a/gen/sqladmin1-cli/src/main.rs +++ b/gen/sqladmin1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_sqladmin1::{api, Error, oauth2}; +use google_sqladmin1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -196,10 +195,11 @@ where "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "start-time" => Some(("startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "window-start-time" => Some(("windowStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-kind", "code", "description", "disk-encryption-configuration", "disk-encryption-status", "end-time", "enqueued-time", "error", "id", "instance", "kind", "kms-key-name", "kms-key-version-name", "location", "message", "self-link", "start-time", "status", "type", "window-start-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-kind", "code", "description", "disk-encryption-configuration", "disk-encryption-status", "end-time", "enqueued-time", "error", "id", "instance", "kind", "kms-key-name", "kms-key-version-name", "location", "message", "self-link", "start-time", "status", "time-zone", "type", "window-start-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -269,7 +269,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -413,7 +413,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -1035,12 +1035,13 @@ where "clone-context.bin-log-coordinates.bin-log-file-name" => Some(("cloneContext.binLogCoordinates.binLogFileName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.bin-log-coordinates.bin-log-position" => Some(("cloneContext.binLogCoordinates.binLogPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.bin-log-coordinates.kind" => Some(("cloneContext.binLogCoordinates.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-context.database-names" => Some(("cloneContext.databaseNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "clone-context.destination-instance-name" => Some(("cloneContext.destinationInstanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.kind" => Some(("cloneContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.pitr-timestamp-ms" => Some(("cloneContext.pitrTimestampMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.point-in-time" => Some(("cloneContext.pointInTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allocated-ip-range", "bin-log-coordinates", "bin-log-file-name", "bin-log-position", "clone-context", "destination-instance-name", "kind", "pitr-timestamp-ms", "point-in-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allocated-ip-range", "bin-log-coordinates", "bin-log-file-name", "bin-log-position", "clone-context", "database-names", "destination-instance-name", "kind", "pitr-timestamp-ms", "point-in-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1270,6 +1271,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "export-context.bak-export-options.stripe-count" => Some(("exportContext.bakExportOptions.stripeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "export-context.bak-export-options.striped" => Some(("exportContext.bakExportOptions.striped", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "export-context.csv-export-options.escape-character" => Some(("exportContext.csvExportOptions.escapeCharacter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "export-context.csv-export-options.fields-terminated-by" => Some(("exportContext.csvExportOptions.fieldsTerminatedBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "export-context.csv-export-options.lines-terminated-by" => Some(("exportContext.csvExportOptions.linesTerminatedBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1284,7 +1287,7 @@ where "export-context.sql-export-options.tables" => Some(("exportContext.sqlExportOptions.tables", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "export-context.uri" => Some(("exportContext.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["csv-export-options", "databases", "escape-character", "export-context", "fields-terminated-by", "file-type", "kind", "lines-terminated-by", "master-data", "mysql-export-options", "offload", "quote-character", "schema-only", "select-query", "sql-export-options", "tables", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bak-export-options", "csv-export-options", "databases", "escape-character", "export-context", "fields-terminated-by", "file-type", "kind", "lines-terminated-by", "master-data", "mysql-export-options", "offload", "quote-character", "schema-only", "select-query", "sql-export-options", "stripe-count", "striped", "tables", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1508,6 +1511,7 @@ where "import-context.bak-import-options.encryption-options.cert-path" => Some(("importContext.bakImportOptions.encryptionOptions.certPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.bak-import-options.encryption-options.pvk-password" => Some(("importContext.bakImportOptions.encryptionOptions.pvkPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.bak-import-options.encryption-options.pvk-path" => Some(("importContext.bakImportOptions.encryptionOptions.pvkPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "import-context.bak-import-options.striped" => Some(("importContext.bakImportOptions.striped", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "import-context.csv-import-options.columns" => Some(("importContext.csvImportOptions.columns", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "import-context.csv-import-options.escape-character" => Some(("importContext.csvImportOptions.escapeCharacter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.csv-import-options.fields-terminated-by" => Some(("importContext.csvImportOptions.fieldsTerminatedBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1520,7 +1524,7 @@ where "import-context.kind" => Some(("importContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.uri" => Some(("importContext.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bak-import-options", "cert-path", "columns", "csv-import-options", "database", "encryption-options", "escape-character", "fields-terminated-by", "file-type", "import-context", "import-user", "kind", "lines-terminated-by", "pvk-password", "pvk-path", "quote-character", "table", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bak-import-options", "cert-path", "columns", "csv-import-options", "database", "encryption-options", "escape-character", "fields-terminated-by", "file-type", "import-context", "import-user", "kind", "lines-terminated-by", "pvk-password", "pvk-path", "quote-character", "striped", "table", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1688,16 +1692,19 @@ where "settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.backup-configuration.transaction-log-retention-days" => Some(("settings.backupConfiguration.transactionLogRetentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.collation" => Some(("settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.connector-enforcement" => Some(("settings.connectorEnforcement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.deletion-protection-enabled" => Some(("settings.deletionProtectionEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-insights-enabled" => Some(("settings.insightsConfig.queryInsightsEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-plans-per-minute" => Some(("settings.insightsConfig.queryPlansPerMinute", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.query-string-length" => Some(("settings.insightsConfig.queryStringLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.record-application-tags" => Some(("settings.insightsConfig.recordApplicationTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.record-client-address" => Some(("settings.insightsConfig.recordClientAddress", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.allocated-ip-range" => Some(("settings.ipConfiguration.allocatedIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.ip-configuration.enable-private-path-for-google-cloud-services" => Some(("settings.ipConfiguration.enablePrivatePathForGoogleCloudServices", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1712,6 +1719,7 @@ where "settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.complexity" => Some(("settings.passwordValidationPolicy.complexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.disallow-username-substring" => Some(("settings.passwordValidationPolicy.disallowUsernameSubstring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.password-validation-policy.enable-password-policy" => Some(("settings.passwordValidationPolicy.enablePasswordPolicy", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.password-validation-policy.min-length" => Some(("settings.passwordValidationPolicy.minLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.password-validation-policy.password-change-interval" => Some(("settings.passwordValidationPolicy.passwordChangeInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.reuse-interval" => Some(("settings.passwordValidationPolicy.reuseInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -1720,14 +1728,17 @@ where "settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.bucket" => Some(("settings.sqlServerAuditConfig.bucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.kind" => Some(("settings.sqlServerAuditConfig.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.retention-interval" => Some(("settings.sqlServerAuditConfig.retentionInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.upload-interval" => Some(("settings.sqlServerAuditConfig.uploadInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.time-zone" => Some(("settings.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "transaction-log-retention-days", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "connector-enforcement", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "deletion-protection-enabled", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enable-password-policy", "enable-private-path-for-google-cloud-services", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-interval", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "time-zone", "transaction-log-retention-days", "update-track", "upload-interval", "user-labels", "username", "verify-server-certificate", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1797,7 +1808,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2009,16 +2020,19 @@ where "settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.backup-configuration.transaction-log-retention-days" => Some(("settings.backupConfiguration.transactionLogRetentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.collation" => Some(("settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.connector-enforcement" => Some(("settings.connectorEnforcement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.deletion-protection-enabled" => Some(("settings.deletionProtectionEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-insights-enabled" => Some(("settings.insightsConfig.queryInsightsEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-plans-per-minute" => Some(("settings.insightsConfig.queryPlansPerMinute", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.query-string-length" => Some(("settings.insightsConfig.queryStringLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.record-application-tags" => Some(("settings.insightsConfig.recordApplicationTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.record-client-address" => Some(("settings.insightsConfig.recordClientAddress", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.allocated-ip-range" => Some(("settings.ipConfiguration.allocatedIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.ip-configuration.enable-private-path-for-google-cloud-services" => Some(("settings.ipConfiguration.enablePrivatePathForGoogleCloudServices", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2033,6 +2047,7 @@ where "settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.complexity" => Some(("settings.passwordValidationPolicy.complexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.disallow-username-substring" => Some(("settings.passwordValidationPolicy.disallowUsernameSubstring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.password-validation-policy.enable-password-policy" => Some(("settings.passwordValidationPolicy.enablePasswordPolicy", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.password-validation-policy.min-length" => Some(("settings.passwordValidationPolicy.minLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.password-validation-policy.password-change-interval" => Some(("settings.passwordValidationPolicy.passwordChangeInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.reuse-interval" => Some(("settings.passwordValidationPolicy.reuseInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2041,14 +2056,17 @@ where "settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.bucket" => Some(("settings.sqlServerAuditConfig.bucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.kind" => Some(("settings.sqlServerAuditConfig.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.retention-interval" => Some(("settings.sqlServerAuditConfig.retentionInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.upload-interval" => Some(("settings.sqlServerAuditConfig.uploadInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.time-zone" => Some(("settings.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "transaction-log-retention-days", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "connector-enforcement", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "deletion-protection-enabled", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enable-password-policy", "enable-private-path-for-google-cloud-services", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-interval", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "time-zone", "transaction-log-retention-days", "update-track", "upload-interval", "user-labels", "username", "verify-server-certificate", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2736,16 +2754,19 @@ where "settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.backup-configuration.transaction-log-retention-days" => Some(("settings.backupConfiguration.transactionLogRetentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.collation" => Some(("settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.connector-enforcement" => Some(("settings.connectorEnforcement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.deletion-protection-enabled" => Some(("settings.deletionProtectionEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-insights-enabled" => Some(("settings.insightsConfig.queryInsightsEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-plans-per-minute" => Some(("settings.insightsConfig.queryPlansPerMinute", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.query-string-length" => Some(("settings.insightsConfig.queryStringLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.record-application-tags" => Some(("settings.insightsConfig.recordApplicationTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.record-client-address" => Some(("settings.insightsConfig.recordClientAddress", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.allocated-ip-range" => Some(("settings.ipConfiguration.allocatedIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.ip-configuration.enable-private-path-for-google-cloud-services" => Some(("settings.ipConfiguration.enablePrivatePathForGoogleCloudServices", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2760,6 +2781,7 @@ where "settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.complexity" => Some(("settings.passwordValidationPolicy.complexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.disallow-username-substring" => Some(("settings.passwordValidationPolicy.disallowUsernameSubstring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.password-validation-policy.enable-password-policy" => Some(("settings.passwordValidationPolicy.enablePasswordPolicy", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.password-validation-policy.min-length" => Some(("settings.passwordValidationPolicy.minLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.password-validation-policy.password-change-interval" => Some(("settings.passwordValidationPolicy.passwordChangeInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.reuse-interval" => Some(("settings.passwordValidationPolicy.reuseInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2768,14 +2790,17 @@ where "settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.bucket" => Some(("settings.sqlServerAuditConfig.bucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.kind" => Some(("settings.sqlServerAuditConfig.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.retention-interval" => Some(("settings.sqlServerAuditConfig.retentionInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.upload-interval" => Some(("settings.sqlServerAuditConfig.uploadInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.time-zone" => Some(("settings.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "transaction-log-retention-days", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "connector-enforcement", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "deletion-protection-enabled", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enable-password-policy", "enable-private-path-for-google-cloud-services", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-interval", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "time-zone", "transaction-log-retention-days", "update-track", "upload-interval", "user-labels", "username", "verify-server-certificate", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2897,7 +2922,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "instance" => { call = call.instance(value.unwrap_or("")); @@ -3646,6 +3671,62 @@ where } } + async fn _users_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.users().get(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "host" => { + call = call.host(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["host"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _users_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3669,6 +3750,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "dual-password-type" => Some(("dualPasswordType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3677,6 +3759,7 @@ where "password" => Some(("password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.allowed-failed-attempts" => Some(("passwordPolicy.allowedFailedAttempts", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "password-policy.enable-failed-attempts-check" => Some(("passwordPolicy.enableFailedAttemptsCheck", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "password-policy.enable-password-verification" => Some(("passwordPolicy.enablePasswordVerification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.password-expiration-duration" => Some(("passwordPolicy.passwordExpirationDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.status.locked" => Some(("passwordPolicy.status.locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.status.password-expiration-time" => Some(("passwordPolicy.status.passwordExpirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3685,7 +3768,7 @@ where "sqlserver-user-details.server-roles" => Some(("sqlserverUserDetails.serverRoles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "enable-failed-attempts-check", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "dual-password-type", "enable-failed-attempts-check", "enable-password-verification", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3820,6 +3903,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "dual-password-type" => Some(("dualPasswordType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3828,6 +3912,7 @@ where "password" => Some(("password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.allowed-failed-attempts" => Some(("passwordPolicy.allowedFailedAttempts", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "password-policy.enable-failed-attempts-check" => Some(("passwordPolicy.enableFailedAttemptsCheck", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "password-policy.enable-password-verification" => Some(("passwordPolicy.enablePasswordVerification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.password-expiration-duration" => Some(("passwordPolicy.passwordExpirationDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.status.locked" => Some(("passwordPolicy.status.locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.status.password-expiration-time" => Some(("passwordPolicy.status.passwordExpirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3836,7 +3921,7 @@ where "sqlserver-user-details.server-roles" => Some(("sqlserverUserDetails.serverRoles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "enable-failed-attempts-check", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "dual-password-type", "enable-failed-attempts-check", "enable-password-verification", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4120,6 +4205,9 @@ where ("delete", Some(opt)) => { call_result = self._users_delete(opt, dry_run, &mut err).await; }, + ("get", Some(opt)) => { + call_result = self._users_get(opt, dry_run, &mut err).await; + }, ("insert", Some(opt)) => { call_result = self._users_insert(opt, dry_run, &mut err).await; }, @@ -4972,7 +5060,7 @@ async fn main() { Some(false)), ]), ("patch", - Some(r##"Updates settings of a Cloud SQL instance. This method supports patch semantics."##), + Some(r##"Partially updates settings of a Cloud SQL instance by merging the request with the current configuration. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_sqladmin1_cli/instances_patch", vec![ (Some(r##"project"##), @@ -5633,7 +5721,7 @@ async fn main() { ]), ]), - ("users", "methods: 'delete', 'insert', 'list' and 'update'", vec![ + ("users", "methods: 'delete', 'get', 'insert', 'list' and 'update'", vec![ ("delete", Some(r##"Deletes a user from a Cloud SQL instance."##), "Details at http://byron.github.io/google-apis-rs/google_sqladmin1_cli/users_delete", @@ -5656,6 +5744,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Retrieves a resource containing information about a user."##), + "Details at http://byron.github.io/google-apis-rs/google_sqladmin1_cli/users_get", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID of the project that contains the instance."##), + Some(true), + Some(false)), + + (Some(r##"instance"##), + None, + Some(r##"Database instance ID. This does not include the project ID."##), + Some(true), + Some(false)), + + (Some(r##"name"##), + None, + Some(r##"User of the instance."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5764,7 +5886,7 @@ async fn main() { let mut app = App::new("sqladmin1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20221209") .about("API for Cloud SQL database instance management") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sqladmin1_cli") .arg(Arg::with_name("url") diff --git a/gen/sqladmin1/Cargo.toml b/gen/sqladmin1/Cargo.toml index 58e554ec78..f1af56a2d2 100644 --- a/gen/sqladmin1/Cargo.toml +++ b/gen/sqladmin1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-sqladmin1" -version = "5.0.2-beta-1+20221209" +version = "5.0.2+20221209" authors = ["Sebastian Thiel "] description = "A complete library to interact with SQL Admin (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sqladmin1" homepage = "https://developers.google.com/cloud-sql/" -documentation = "https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209" +documentation = "https://docs.rs/google-sqladmin1/5.0.2+20221209" license = "MIT" keywords = ["sqladmin", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/sqladmin1/README.md b/gen/sqladmin1/README.md index 5ded20034c..5347987ee8 100644 --- a/gen/sqladmin1/README.md +++ b/gen/sqladmin1/README.md @@ -5,34 +5,34 @@ DO NOT EDIT ! --> The `google-sqladmin1` library allows access to all features of the *Google SQL Admin* service. -This documentation was generated from *SQL Admin* crate version *5.0.2-beta-1+20221209*, where *20221209* is the exact revision of the *sqladmin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *SQL Admin* crate version *5.0.2+20221209*, where *20221209* is the exact revision of the *sqladmin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *SQL Admin* *v1* API can be found at the [official documentation site](https://developers.google.com/cloud-sql/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/SQLAdmin) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/SQLAdmin) ... -* [backup runs](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::BackupRun) - * [*delete*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::BackupRunDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::BackupRunGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::BackupRunInsertCall) and [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::BackupRunListCall) +* [backup runs](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::BackupRun) + * [*delete*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::BackupRunDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::BackupRunGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::BackupRunInsertCall) and [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::BackupRunListCall) * connect - * [*generate ephemeral*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::ConnectGenerateEphemeralCall) and [*get*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::ConnectGetCall) -* [databases](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::Database) - * [*delete*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::DatabaseDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::DatabaseGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::DatabaseInsertCall), [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::DatabaseListCall), [*patch*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::DatabasePatchCall) and [*update*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::DatabaseUpdateCall) -* [flags](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::Flag) - * [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::FlagListCall) + * [*generate ephemeral*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::ConnectGenerateEphemeralCall) and [*get*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::ConnectGetCall) +* [databases](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::Database) + * [*delete*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::DatabaseDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::DatabaseGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::DatabaseInsertCall), [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::DatabaseListCall), [*patch*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::DatabasePatchCall) and [*update*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::DatabaseUpdateCall) +* [flags](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::Flag) + * [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::FlagListCall) * instances - * [*add server ca*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceAddServerCaCall), [*clone*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceCloneCall), [*delete*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceDeleteCall), [*demote master*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceDemoteMasterCall), [*export*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceExportCall), [*failover*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceFailoverCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceGetCall), [*import*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceImportCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceInsertCall), [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceListCall), [*list server cas*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceListServerCaCall), [*patch*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstancePatchCall), [*promote replica*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstancePromoteReplicaCall), [*reset ssl config*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceResetSslConfigCall), [*restart*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceRestartCall), [*restore backup*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceRestoreBackupCall), [*rotate server ca*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceRotateServerCaCall), [*start replica*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceStartReplicaCall), [*stop replica*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceStopReplicaCall), [*truncate log*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceTruncateLogCall) and [*update*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::InstanceUpdateCall) -* [operations](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::Operation) - * [*get*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::OperationGetCall) and [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::OperationListCall) + * [*add server ca*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceAddServerCaCall), [*clone*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceCloneCall), [*delete*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceDeleteCall), [*demote master*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceDemoteMasterCall), [*export*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceExportCall), [*failover*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceFailoverCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceGetCall), [*import*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceImportCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceInsertCall), [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceListCall), [*list server cas*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceListServerCaCall), [*patch*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstancePatchCall), [*promote replica*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstancePromoteReplicaCall), [*reset ssl config*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceResetSslConfigCall), [*restart*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceRestartCall), [*restore backup*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceRestoreBackupCall), [*rotate server ca*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceRotateServerCaCall), [*start replica*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceStartReplicaCall), [*stop replica*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceStopReplicaCall), [*truncate log*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceTruncateLogCall) and [*update*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::InstanceUpdateCall) +* [operations](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::Operation) + * [*get*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::OperationGetCall) and [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::OperationListCall) * projects - * [*instances reschedule maintenance*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::ProjectInstanceRescheduleMaintenanceCall), [*instances start external sync*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::ProjectInstanceStartExternalSyncCall) and [*instances verify external sync settings*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::ProjectInstanceVerifyExternalSyncSettingCall) -* [ssl certs](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::SslCert) - * [*create ephemeral*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::SslCertCreateEphemeralCall), [*delete*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::SslCertDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::SslCertGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::SslCertInsertCall) and [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::SslCertListCall) -* [tiers](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::Tier) - * [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::TierListCall) -* [users](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::User) - * [*delete*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::UserDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::UserGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::UserInsertCall), [*list*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::UserListCall) and [*update*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/api::UserUpdateCall) + * [*instances reschedule maintenance*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::ProjectInstanceRescheduleMaintenanceCall), [*instances start external sync*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::ProjectInstanceStartExternalSyncCall) and [*instances verify external sync settings*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::ProjectInstanceVerifyExternalSyncSettingCall) +* [ssl certs](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::SslCert) + * [*create ephemeral*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::SslCertCreateEphemeralCall), [*delete*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::SslCertDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::SslCertGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::SslCertInsertCall) and [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::SslCertListCall) +* [tiers](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::Tier) + * [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::TierListCall) +* [users](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::User) + * [*delete*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::UserDeleteCall), [*get*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::UserGetCall), [*insert*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::UserInsertCall), [*list*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::UserListCall) and [*update*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/api::UserUpdateCall) @@ -41,17 +41,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/SQLAdmin)** +* **[Hub](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/SQLAdmin)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::CallBuilder) -* **[Resources](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::CallBuilder) +* **[Resources](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Part)** + * **[Parts](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -175,17 +175,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -195,29 +195,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Delegate) to the -[Method Builder](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Delegate) to the +[Method Builder](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::RequestValue) and -[decodable](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::RequestValue) and +[decodable](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-sqladmin1/5.0.2-beta-1+20221209/google_sqladmin1/client::RequestValue) are moved +* [request values](https://docs.rs/google-sqladmin1/5.0.2+20221209/google_sqladmin1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/sqladmin1/src/api.rs b/gen/sqladmin1/src/api.rs index 122e8e1998..53848519dd 100644 --- a/gen/sqladmin1/src/api.rs +++ b/gen/sqladmin1/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> SQLAdmin { SQLAdmin { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://sqladmin.googleapis.com/".to_string(), _root_url: "https://sqladmin.googleapis.com/".to_string(), } @@ -169,7 +169,7 @@ impl<'a, S> SQLAdmin { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/sqladmin1/src/client.rs b/gen/sqladmin1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/sqladmin1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/sqladmin1/src/lib.rs b/gen/sqladmin1/src/lib.rs index 0bb0439d72..943eb8bb6d 100644 --- a/gen/sqladmin1/src/lib.rs +++ b/gen/sqladmin1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *SQL Admin* crate version *5.0.2-beta-1+20221209*, where *20221209* is the exact revision of the *sqladmin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *SQL Admin* crate version *5.0.2+20221209*, where *20221209* is the exact revision of the *sqladmin:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *SQL Admin* *v1* API can be found at the //! [official documentation site](https://developers.google.com/cloud-sql/). diff --git a/gen/sqladmin1_beta4-cli/Cargo.toml b/gen/sqladmin1_beta4-cli/Cargo.toml index 0262e3bc96..1ff076fd82 100644 --- a/gen/sqladmin1_beta4-cli/Cargo.toml +++ b/gen/sqladmin1_beta4-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-sqladmin1_beta4-cli" -version = "4.0.1+20220226" +version = "5.0.2+20221209" authors = ["Sebastian Thiel "] description = "A complete library to interact with SQL Admin (protocol v1beta4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sqladmin1_beta4-cli" @@ -20,13 +20,13 @@ name = "sqladmin1-beta4" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-sqladmin1_beta4] path = "../sqladmin1_beta4" -version = "4.0.1+20220226" +version = "5.0.2+20221209" + diff --git a/gen/sqladmin1_beta4-cli/README.md b/gen/sqladmin1_beta4-cli/README.md index 3fcf764a37..fa9eeff8f4 100644 --- a/gen/sqladmin1_beta4-cli/README.md +++ b/gen/sqladmin1_beta4-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *SQL Admin* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *SQL Admin* API at revision *20221209*. The CLI is at version *5.0.2*. ```bash sqladmin1-beta4 [options] @@ -85,6 +85,7 @@ sqladmin1-beta4 [options] list [-p ]... [-o ] users delete [-p ]... [-o ] + get [-p ]... [-o ] insert (-r )... [-p ]... [-o ] list [-p ]... [-o ] update (-r )... [-p ]... [-o ] diff --git a/gen/sqladmin1_beta4-cli/mkdocs.yml b/gen/sqladmin1_beta4-cli/mkdocs.yml index 98b0964961..e04c2c6851 100644 --- a/gen/sqladmin1_beta4-cli/mkdocs.yml +++ b/gen/sqladmin1_beta4-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: SQL Admin v4.0.1+20220226 +site_name: SQL Admin v5.0.2+20221209 site_url: http://byron.github.io/google-apis-rs/google-sqladmin1_beta4-cli site_description: A complete library to interact with SQL Admin (protocol v1beta4) @@ -7,57 +7,68 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/sqladmin1_beta4- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['backup-runs_delete.md', 'Backup Runs', 'Delete'] -- ['backup-runs_get.md', 'Backup Runs', 'Get'] -- ['backup-runs_insert.md', 'Backup Runs', 'Insert'] -- ['backup-runs_list.md', 'Backup Runs', 'List'] -- ['connect_generate-ephemeral.md', 'Connect', 'Generate Ephemeral'] -- ['connect_get.md', 'Connect', 'Get'] -- ['databases_delete.md', 'Databases', 'Delete'] -- ['databases_get.md', 'Databases', 'Get'] -- ['databases_insert.md', 'Databases', 'Insert'] -- ['databases_list.md', 'Databases', 'List'] -- ['databases_patch.md', 'Databases', 'Patch'] -- ['databases_update.md', 'Databases', 'Update'] -- ['flags_list.md', 'Flags', 'List'] -- ['instances_add-server-ca.md', 'Instances', 'Add Server Ca'] -- ['instances_clone.md', 'Instances', 'Clone'] -- ['instances_delete.md', 'Instances', 'Delete'] -- ['instances_demote-master.md', 'Instances', 'Demote Master'] -- ['instances_export.md', 'Instances', 'Export'] -- ['instances_failover.md', 'Instances', 'Failover'] -- ['instances_get.md', 'Instances', 'Get'] -- ['instances_import.md', 'Instances', 'Import'] -- ['instances_insert.md', 'Instances', 'Insert'] -- ['instances_list.md', 'Instances', 'List'] -- ['instances_list-server-cas.md', 'Instances', 'List Server Cas'] -- ['instances_patch.md', 'Instances', 'Patch'] -- ['instances_promote-replica.md', 'Instances', 'Promote Replica'] -- ['instances_reset-ssl-config.md', 'Instances', 'Reset Ssl Config'] -- ['instances_restart.md', 'Instances', 'Restart'] -- ['instances_restore-backup.md', 'Instances', 'Restore Backup'] -- ['instances_rotate-server-ca.md', 'Instances', 'Rotate Server Ca'] -- ['instances_start-replica.md', 'Instances', 'Start Replica'] -- ['instances_stop-replica.md', 'Instances', 'Stop Replica'] -- ['instances_truncate-log.md', 'Instances', 'Truncate Log'] -- ['instances_update.md', 'Instances', 'Update'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['projects_instances-reschedule-maintenance.md', 'Projects', 'Instances Reschedule Maintenance'] -- ['projects_instances-start-external-sync.md', 'Projects', 'Instances Start External Sync'] -- ['projects_instances-verify-external-sync-settings.md', 'Projects', 'Instances Verify External Sync Settings'] -- ['ssl-certs_create-ephemeral.md', 'Ssl Certs', 'Create Ephemeral'] -- ['ssl-certs_delete.md', 'Ssl Certs', 'Delete'] -- ['ssl-certs_get.md', 'Ssl Certs', 'Get'] -- ['ssl-certs_insert.md', 'Ssl Certs', 'Insert'] -- ['ssl-certs_list.md', 'Ssl Certs', 'List'] -- ['tiers_list.md', 'Tiers', 'List'] -- ['users_delete.md', 'Users', 'Delete'] -- ['users_insert.md', 'Users', 'Insert'] -- ['users_list.md', 'Users', 'List'] -- ['users_update.md', 'Users', 'Update'] +nav: +- Home: 'index.md' +- 'Backup Runs': + - 'Delete': 'backup-runs_delete.md' + - 'Get': 'backup-runs_get.md' + - 'Insert': 'backup-runs_insert.md' + - 'List': 'backup-runs_list.md' +- 'Connect': + - 'Generate Ephemeral': 'connect_generate-ephemeral.md' + - 'Get': 'connect_get.md' +- 'Databases': + - 'Delete': 'databases_delete.md' + - 'Get': 'databases_get.md' + - 'Insert': 'databases_insert.md' + - 'List': 'databases_list.md' + - 'Patch': 'databases_patch.md' + - 'Update': 'databases_update.md' +- 'Flags': + - 'List': 'flags_list.md' +- 'Instances': + - 'Add Server Ca': 'instances_add-server-ca.md' + - 'Clone': 'instances_clone.md' + - 'Delete': 'instances_delete.md' + - 'Demote Master': 'instances_demote-master.md' + - 'Export': 'instances_export.md' + - 'Failover': 'instances_failover.md' + - 'Get': 'instances_get.md' + - 'Import': 'instances_import.md' + - 'Insert': 'instances_insert.md' + - 'List': 'instances_list.md' + - 'List Server Cas': 'instances_list-server-cas.md' + - 'Patch': 'instances_patch.md' + - 'Promote Replica': 'instances_promote-replica.md' + - 'Reset Ssl Config': 'instances_reset-ssl-config.md' + - 'Restart': 'instances_restart.md' + - 'Restore Backup': 'instances_restore-backup.md' + - 'Rotate Server Ca': 'instances_rotate-server-ca.md' + - 'Start Replica': 'instances_start-replica.md' + - 'Stop Replica': 'instances_stop-replica.md' + - 'Truncate Log': 'instances_truncate-log.md' + - 'Update': 'instances_update.md' +- 'Operations': + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Projects': + - 'Instances Reschedule Maintenance': 'projects_instances-reschedule-maintenance.md' + - 'Instances Start External Sync': 'projects_instances-start-external-sync.md' + - 'Instances Verify External Sync Settings': 'projects_instances-verify-external-sync-settings.md' +- 'Ssl Certs': + - 'Create Ephemeral': 'ssl-certs_create-ephemeral.md' + - 'Delete': 'ssl-certs_delete.md' + - 'Get': 'ssl-certs_get.md' + - 'Insert': 'ssl-certs_insert.md' + - 'List': 'ssl-certs_list.md' +- 'Tiers': + - 'List': 'tiers_list.md' +- 'Users': + - 'Delete': 'users_delete.md' + - 'Get': 'users_get.md' + - 'Insert': 'users_insert.md' + - 'List': 'users_list.md' + - 'Update': 'users_update.md' theme: readthedocs diff --git a/gen/sqladmin1_beta4-cli/src/client.rs b/gen/sqladmin1_beta4-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/sqladmin1_beta4-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/sqladmin1_beta4-cli/src/main.rs b/gen/sqladmin1_beta4-cli/src/main.rs index d909667458..efeb983833 100644 --- a/gen/sqladmin1_beta4-cli/src/main.rs +++ b/gen/sqladmin1_beta4-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_sqladmin1_beta4::{api, Error, oauth2}; +use google_sqladmin1_beta4::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -196,10 +195,11 @@ where "self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "start-time" => Some(("startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "time-zone" => Some(("timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "window-start-time" => Some(("windowStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-kind", "code", "description", "disk-encryption-configuration", "disk-encryption-status", "end-time", "enqueued-time", "error", "id", "instance", "kind", "kms-key-name", "kms-key-version-name", "location", "message", "self-link", "start-time", "status", "type", "window-start-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-kind", "code", "description", "disk-encryption-configuration", "disk-encryption-status", "end-time", "enqueued-time", "error", "id", "instance", "kind", "kms-key-name", "kms-key-version-name", "location", "message", "self-link", "start-time", "status", "time-zone", "type", "window-start-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -269,7 +269,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -413,7 +413,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "read-time" => { - call = call.read_time(value.unwrap_or("")); + call = call.read_time( value.map(|v| arg_from_str(v, err, "read-time", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -1035,12 +1035,13 @@ where "clone-context.bin-log-coordinates.bin-log-file-name" => Some(("cloneContext.binLogCoordinates.binLogFileName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.bin-log-coordinates.bin-log-position" => Some(("cloneContext.binLogCoordinates.binLogPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.bin-log-coordinates.kind" => Some(("cloneContext.binLogCoordinates.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "clone-context.database-names" => Some(("cloneContext.databaseNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "clone-context.destination-instance-name" => Some(("cloneContext.destinationInstanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.kind" => Some(("cloneContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.pitr-timestamp-ms" => Some(("cloneContext.pitrTimestampMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "clone-context.point-in-time" => Some(("cloneContext.pointInTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allocated-ip-range", "bin-log-coordinates", "bin-log-file-name", "bin-log-position", "clone-context", "destination-instance-name", "kind", "pitr-timestamp-ms", "point-in-time"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allocated-ip-range", "bin-log-coordinates", "bin-log-file-name", "bin-log-position", "clone-context", "database-names", "destination-instance-name", "kind", "pitr-timestamp-ms", "point-in-time"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1270,6 +1271,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "export-context.bak-export-options.stripe-count" => Some(("exportContext.bakExportOptions.stripeCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "export-context.bak-export-options.striped" => Some(("exportContext.bakExportOptions.striped", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "export-context.csv-export-options.escape-character" => Some(("exportContext.csvExportOptions.escapeCharacter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "export-context.csv-export-options.fields-terminated-by" => Some(("exportContext.csvExportOptions.fieldsTerminatedBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "export-context.csv-export-options.lines-terminated-by" => Some(("exportContext.csvExportOptions.linesTerminatedBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1284,7 +1287,7 @@ where "export-context.sql-export-options.tables" => Some(("exportContext.sqlExportOptions.tables", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "export-context.uri" => Some(("exportContext.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["csv-export-options", "databases", "escape-character", "export-context", "fields-terminated-by", "file-type", "kind", "lines-terminated-by", "master-data", "mysql-export-options", "offload", "quote-character", "schema-only", "select-query", "sql-export-options", "tables", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bak-export-options", "csv-export-options", "databases", "escape-character", "export-context", "fields-terminated-by", "file-type", "kind", "lines-terminated-by", "master-data", "mysql-export-options", "offload", "quote-character", "schema-only", "select-query", "sql-export-options", "stripe-count", "striped", "tables", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1508,6 +1511,7 @@ where "import-context.bak-import-options.encryption-options.cert-path" => Some(("importContext.bakImportOptions.encryptionOptions.certPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.bak-import-options.encryption-options.pvk-password" => Some(("importContext.bakImportOptions.encryptionOptions.pvkPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.bak-import-options.encryption-options.pvk-path" => Some(("importContext.bakImportOptions.encryptionOptions.pvkPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "import-context.bak-import-options.striped" => Some(("importContext.bakImportOptions.striped", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "import-context.csv-import-options.columns" => Some(("importContext.csvImportOptions.columns", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "import-context.csv-import-options.escape-character" => Some(("importContext.csvImportOptions.escapeCharacter", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.csv-import-options.fields-terminated-by" => Some(("importContext.csvImportOptions.fieldsTerminatedBy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1520,7 +1524,7 @@ where "import-context.kind" => Some(("importContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "import-context.uri" => Some(("importContext.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["bak-import-options", "cert-path", "columns", "csv-import-options", "database", "encryption-options", "escape-character", "fields-terminated-by", "file-type", "import-context", "import-user", "kind", "lines-terminated-by", "pvk-password", "pvk-path", "quote-character", "table", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["bak-import-options", "cert-path", "columns", "csv-import-options", "database", "encryption-options", "escape-character", "fields-terminated-by", "file-type", "import-context", "import-user", "kind", "lines-terminated-by", "pvk-password", "pvk-path", "quote-character", "striped", "table", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1688,16 +1692,19 @@ where "settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.backup-configuration.transaction-log-retention-days" => Some(("settings.backupConfiguration.transactionLogRetentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.collation" => Some(("settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.connector-enforcement" => Some(("settings.connectorEnforcement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.deletion-protection-enabled" => Some(("settings.deletionProtectionEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-insights-enabled" => Some(("settings.insightsConfig.queryInsightsEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-plans-per-minute" => Some(("settings.insightsConfig.queryPlansPerMinute", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.query-string-length" => Some(("settings.insightsConfig.queryStringLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.record-application-tags" => Some(("settings.insightsConfig.recordApplicationTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.record-client-address" => Some(("settings.insightsConfig.recordClientAddress", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.allocated-ip-range" => Some(("settings.ipConfiguration.allocatedIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.ip-configuration.enable-private-path-for-google-cloud-services" => Some(("settings.ipConfiguration.enablePrivatePathForGoogleCloudServices", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -1712,6 +1719,7 @@ where "settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.complexity" => Some(("settings.passwordValidationPolicy.complexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.disallow-username-substring" => Some(("settings.passwordValidationPolicy.disallowUsernameSubstring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.password-validation-policy.enable-password-policy" => Some(("settings.passwordValidationPolicy.enablePasswordPolicy", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.password-validation-policy.min-length" => Some(("settings.passwordValidationPolicy.minLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.password-validation-policy.password-change-interval" => Some(("settings.passwordValidationPolicy.passwordChangeInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.reuse-interval" => Some(("settings.passwordValidationPolicy.reuseInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -1720,14 +1728,17 @@ where "settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.bucket" => Some(("settings.sqlServerAuditConfig.bucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.kind" => Some(("settings.sqlServerAuditConfig.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.retention-interval" => Some(("settings.sqlServerAuditConfig.retentionInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.upload-interval" => Some(("settings.sqlServerAuditConfig.uploadInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.time-zone" => Some(("settings.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "transaction-log-retention-days", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "connector-enforcement", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "deletion-protection-enabled", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enable-password-policy", "enable-private-path-for-google-cloud-services", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-interval", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "time-zone", "transaction-log-retention-days", "update-track", "upload-interval", "user-labels", "username", "verify-server-certificate", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1797,7 +1808,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -2009,16 +2020,19 @@ where "settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.backup-configuration.transaction-log-retention-days" => Some(("settings.backupConfiguration.transactionLogRetentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.collation" => Some(("settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.connector-enforcement" => Some(("settings.connectorEnforcement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.deletion-protection-enabled" => Some(("settings.deletionProtectionEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-insights-enabled" => Some(("settings.insightsConfig.queryInsightsEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-plans-per-minute" => Some(("settings.insightsConfig.queryPlansPerMinute", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.query-string-length" => Some(("settings.insightsConfig.queryStringLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.record-application-tags" => Some(("settings.insightsConfig.recordApplicationTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.record-client-address" => Some(("settings.insightsConfig.recordClientAddress", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.allocated-ip-range" => Some(("settings.ipConfiguration.allocatedIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.ip-configuration.enable-private-path-for-google-cloud-services" => Some(("settings.ipConfiguration.enablePrivatePathForGoogleCloudServices", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2033,6 +2047,7 @@ where "settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.complexity" => Some(("settings.passwordValidationPolicy.complexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.disallow-username-substring" => Some(("settings.passwordValidationPolicy.disallowUsernameSubstring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.password-validation-policy.enable-password-policy" => Some(("settings.passwordValidationPolicy.enablePasswordPolicy", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.password-validation-policy.min-length" => Some(("settings.passwordValidationPolicy.minLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.password-validation-policy.password-change-interval" => Some(("settings.passwordValidationPolicy.passwordChangeInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.reuse-interval" => Some(("settings.passwordValidationPolicy.reuseInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2041,14 +2056,17 @@ where "settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.bucket" => Some(("settings.sqlServerAuditConfig.bucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.kind" => Some(("settings.sqlServerAuditConfig.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.retention-interval" => Some(("settings.sqlServerAuditConfig.retentionInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.upload-interval" => Some(("settings.sqlServerAuditConfig.uploadInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.time-zone" => Some(("settings.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "transaction-log-retention-days", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "connector-enforcement", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "deletion-protection-enabled", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enable-password-policy", "enable-private-path-for-google-cloud-services", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-interval", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "time-zone", "transaction-log-retention-days", "update-track", "upload-interval", "user-labels", "username", "verify-server-certificate", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2736,16 +2754,19 @@ where "settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.backup-configuration.transaction-log-retention-days" => Some(("settings.backupConfiguration.transactionLogRetentionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.collation" => Some(("settings.collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.connector-enforcement" => Some(("settings.connectorEnforcement", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.deletion-protection-enabled" => Some(("settings.deletionProtectionEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-insights-enabled" => Some(("settings.insightsConfig.queryInsightsEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.query-plans-per-minute" => Some(("settings.insightsConfig.queryPlansPerMinute", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.query-string-length" => Some(("settings.insightsConfig.queryStringLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.insights-config.record-application-tags" => Some(("settings.insightsConfig.recordApplicationTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.insights-config.record-client-address" => Some(("settings.insightsConfig.recordClientAddress", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.allocated-ip-range" => Some(("settings.ipConfiguration.allocatedIpRange", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.ip-configuration.enable-private-path-for-google-cloud-services" => Some(("settings.ipConfiguration.enablePrivatePathForGoogleCloudServices", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2760,6 +2781,7 @@ where "settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.complexity" => Some(("settings.passwordValidationPolicy.complexity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.disallow-username-substring" => Some(("settings.passwordValidationPolicy.disallowUsernameSubstring", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "settings.password-validation-policy.enable-password-policy" => Some(("settings.passwordValidationPolicy.enablePasswordPolicy", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.password-validation-policy.min-length" => Some(("settings.passwordValidationPolicy.minLength", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "settings.password-validation-policy.password-change-interval" => Some(("settings.passwordValidationPolicy.passwordChangeInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.password-validation-policy.reuse-interval" => Some(("settings.passwordValidationPolicy.reuseInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2768,14 +2790,17 @@ where "settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.bucket" => Some(("settings.sqlServerAuditConfig.bucket", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.sql-server-audit-config.kind" => Some(("settings.sqlServerAuditConfig.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.retention-interval" => Some(("settings.sqlServerAuditConfig.retentionInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.sql-server-audit-config.upload-interval" => Some(("settings.sqlServerAuditConfig.uploadInterval", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "settings.time-zone" => Some(("settings.timeZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "transaction-log-retention-days", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "active-directory-config", "allocated-ip-range", "authorized-gae-applications", "availability-type", "available", "available-maintenance-versions", "backend-type", "backup-configuration", "backup-retention-settings", "binary-log-enabled", "bucket", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "collation", "common-name", "complexity", "connect-retry-interval", "connection-name", "connector-enforcement", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-installed-version", "database-replication-enabled", "database-version", "day", "deletion-protection-enabled", "disallow-username-substring", "disk-encryption-configuration", "disk-encryption-status", "domain", "dump-file-path", "enable-password-policy", "enable-private-path-for-google-cloud-services", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "insights-config", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-version", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "min-length", "mysql-replica-configuration", "name", "on-premises-configuration", "out-of-disk-report", "password", "password-change-interval", "password-validation-policy", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "query-insights-enabled", "query-plans-per-minute", "query-string-length", "record-application-tags", "record-client-address", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "retained-backups", "retention-interval", "retention-unit", "reuse-interval", "root-password", "satisfies-pzs", "schedule-deadline-time", "scheduled-maintenance", "secondary-gce-zone", "secondary-zone", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "source-instance", "sql-min-recommended-increase-size-gb", "sql-out-of-disk-state", "sql-server-audit-config", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "time-zone", "transaction-log-retention-days", "update-track", "upload-interval", "user-labels", "username", "verify-server-certificate", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2897,7 +2922,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "instance" => { call = call.instance(value.unwrap_or("")); @@ -3646,6 +3671,62 @@ where } } + async fn _users_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.users().get(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "host" => { + call = call.host(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["host"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _users_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3669,6 +3750,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "dual-password-type" => Some(("dualPasswordType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3677,6 +3759,7 @@ where "password" => Some(("password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.allowed-failed-attempts" => Some(("passwordPolicy.allowedFailedAttempts", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "password-policy.enable-failed-attempts-check" => Some(("passwordPolicy.enableFailedAttemptsCheck", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "password-policy.enable-password-verification" => Some(("passwordPolicy.enablePasswordVerification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.password-expiration-duration" => Some(("passwordPolicy.passwordExpirationDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.status.locked" => Some(("passwordPolicy.status.locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.status.password-expiration-time" => Some(("passwordPolicy.status.passwordExpirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3685,7 +3768,7 @@ where "sqlserver-user-details.server-roles" => Some(("sqlserverUserDetails.serverRoles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "enable-failed-attempts-check", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "dual-password-type", "enable-failed-attempts-check", "enable-password-verification", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3820,6 +3903,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "dual-password-type" => Some(("dualPasswordType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3828,6 +3912,7 @@ where "password" => Some(("password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.allowed-failed-attempts" => Some(("passwordPolicy.allowedFailedAttempts", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "password-policy.enable-failed-attempts-check" => Some(("passwordPolicy.enableFailedAttemptsCheck", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "password-policy.enable-password-verification" => Some(("passwordPolicy.enablePasswordVerification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.password-expiration-duration" => Some(("passwordPolicy.passwordExpirationDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "password-policy.status.locked" => Some(("passwordPolicy.status.locked", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "password-policy.status.password-expiration-time" => Some(("passwordPolicy.status.passwordExpirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -3836,7 +3921,7 @@ where "sqlserver-user-details.server-roles" => Some(("sqlserverUserDetails.serverRoles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "enable-failed-attempts-check", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["allowed-failed-attempts", "disabled", "dual-password-type", "enable-failed-attempts-check", "enable-password-verification", "etag", "host", "instance", "kind", "locked", "name", "password", "password-expiration-duration", "password-expiration-time", "password-policy", "project", "server-roles", "sqlserver-user-details", "status", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -4120,6 +4205,9 @@ where ("delete", Some(opt)) => { call_result = self._users_delete(opt, dry_run, &mut err).await; }, + ("get", Some(opt)) => { + call_result = self._users_get(opt, dry_run, &mut err).await; + }, ("insert", Some(opt)) => { call_result = self._users_insert(opt, dry_run, &mut err).await; }, @@ -4972,7 +5060,7 @@ async fn main() { Some(false)), ]), ("patch", - Some(r##"Updates settings of a Cloud SQL instance. This method supports patch semantics."##), + Some(r##"Partially updates settings of a Cloud SQL instance by merging the request with the current configuration. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_sqladmin1_beta4_cli/instances_patch", vec![ (Some(r##"project"##), @@ -5633,7 +5721,7 @@ async fn main() { ]), ]), - ("users", "methods: 'delete', 'insert', 'list' and 'update'", vec![ + ("users", "methods: 'delete', 'get', 'insert', 'list' and 'update'", vec![ ("delete", Some(r##"Deletes a user from a Cloud SQL instance."##), "Details at http://byron.github.io/google-apis-rs/google_sqladmin1_beta4_cli/users_delete", @@ -5656,6 +5744,40 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("get", + Some(r##"Retrieves a resource containing information about a user."##), + "Details at http://byron.github.io/google-apis-rs/google_sqladmin1_beta4_cli/users_get", + vec![ + (Some(r##"project"##), + None, + Some(r##"Project ID of the project that contains the instance."##), + Some(true), + Some(false)), + + (Some(r##"instance"##), + None, + Some(r##"Database instance ID. This does not include the project ID."##), + Some(true), + Some(false)), + + (Some(r##"name"##), + None, + Some(r##"User of the instance."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5764,7 +5886,7 @@ async fn main() { let mut app = App::new("sqladmin1-beta4") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20221209") .about("API for Cloud SQL database instance management") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sqladmin1_beta4_cli") .arg(Arg::with_name("url") diff --git a/gen/sqladmin1_beta4/Cargo.toml b/gen/sqladmin1_beta4/Cargo.toml index 71ce812392..e570936d79 100644 --- a/gen/sqladmin1_beta4/Cargo.toml +++ b/gen/sqladmin1_beta4/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-sqladmin1_beta4" -version = "5.0.2-beta-1+20221209" +version = "5.0.2+20221209" authors = ["Sebastian Thiel "] description = "A complete library to interact with SQL Admin (protocol v1beta4)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sqladmin1_beta4" homepage = "https://developers.google.com/cloud-sql/" -documentation = "https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209" +documentation = "https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209" license = "MIT" keywords = ["sqladmin", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/sqladmin1_beta4/README.md b/gen/sqladmin1_beta4/README.md index 08c8811440..b4a992817d 100644 --- a/gen/sqladmin1_beta4/README.md +++ b/gen/sqladmin1_beta4/README.md @@ -5,34 +5,34 @@ DO NOT EDIT ! --> The `google-sqladmin1_beta4` library allows access to all features of the *Google SQL Admin* service. -This documentation was generated from *SQL Admin* crate version *5.0.2-beta-1+20221209*, where *20221209* is the exact revision of the *sqladmin:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *SQL Admin* crate version *5.0.2+20221209*, where *20221209* is the exact revision of the *sqladmin:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *SQL Admin* *v1_beta4* API can be found at the [official documentation site](https://developers.google.com/cloud-sql/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/SQLAdmin) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/SQLAdmin) ... -* [backup runs](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::BackupRun) - * [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::BackupRunDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::BackupRunGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::BackupRunInsertCall) and [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::BackupRunListCall) +* [backup runs](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::BackupRun) + * [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::BackupRunDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::BackupRunGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::BackupRunInsertCall) and [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::BackupRunListCall) * connect - * [*generate ephemeral*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::ConnectGenerateEphemeralCall) and [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::ConnectGetCall) -* [databases](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::Database) - * [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::DatabaseDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::DatabaseGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::DatabaseInsertCall), [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::DatabaseListCall), [*patch*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::DatabasePatchCall) and [*update*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::DatabaseUpdateCall) -* [flags](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::Flag) - * [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::FlagListCall) + * [*generate ephemeral*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::ConnectGenerateEphemeralCall) and [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::ConnectGetCall) +* [databases](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::Database) + * [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::DatabaseDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::DatabaseGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::DatabaseInsertCall), [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::DatabaseListCall), [*patch*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::DatabasePatchCall) and [*update*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::DatabaseUpdateCall) +* [flags](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::Flag) + * [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::FlagListCall) * instances - * [*add server ca*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceAddServerCaCall), [*clone*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceCloneCall), [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceDeleteCall), [*demote master*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceDemoteMasterCall), [*export*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceExportCall), [*failover*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceFailoverCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceGetCall), [*import*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceImportCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceInsertCall), [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceListCall), [*list server cas*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceListServerCaCall), [*patch*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstancePatchCall), [*promote replica*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstancePromoteReplicaCall), [*reset ssl config*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceResetSslConfigCall), [*restart*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceRestartCall), [*restore backup*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceRestoreBackupCall), [*rotate server ca*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceRotateServerCaCall), [*start replica*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceStartReplicaCall), [*stop replica*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceStopReplicaCall), [*truncate log*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceTruncateLogCall) and [*update*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::InstanceUpdateCall) -* [operations](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::Operation) - * [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::OperationGetCall) and [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::OperationListCall) + * [*add server ca*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceAddServerCaCall), [*clone*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceCloneCall), [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceDeleteCall), [*demote master*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceDemoteMasterCall), [*export*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceExportCall), [*failover*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceFailoverCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceGetCall), [*import*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceImportCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceInsertCall), [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceListCall), [*list server cas*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceListServerCaCall), [*patch*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstancePatchCall), [*promote replica*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstancePromoteReplicaCall), [*reset ssl config*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceResetSslConfigCall), [*restart*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceRestartCall), [*restore backup*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceRestoreBackupCall), [*rotate server ca*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceRotateServerCaCall), [*start replica*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceStartReplicaCall), [*stop replica*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceStopReplicaCall), [*truncate log*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceTruncateLogCall) and [*update*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::InstanceUpdateCall) +* [operations](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::Operation) + * [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::OperationGetCall) and [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::OperationListCall) * projects - * [*instances reschedule maintenance*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::ProjectInstanceRescheduleMaintenanceCall), [*instances start external sync*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::ProjectInstanceStartExternalSyncCall) and [*instances verify external sync settings*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::ProjectInstanceVerifyExternalSyncSettingCall) -* [ssl certs](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::SslCert) - * [*create ephemeral*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::SslCertCreateEphemeralCall), [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::SslCertDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::SslCertGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::SslCertInsertCall) and [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::SslCertListCall) -* [tiers](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::Tier) - * [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::TierListCall) -* [users](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::User) - * [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::UserDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::UserGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::UserInsertCall), [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::UserListCall) and [*update*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/api::UserUpdateCall) + * [*instances reschedule maintenance*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::ProjectInstanceRescheduleMaintenanceCall), [*instances start external sync*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::ProjectInstanceStartExternalSyncCall) and [*instances verify external sync settings*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::ProjectInstanceVerifyExternalSyncSettingCall) +* [ssl certs](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::SslCert) + * [*create ephemeral*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::SslCertCreateEphemeralCall), [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::SslCertDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::SslCertGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::SslCertInsertCall) and [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::SslCertListCall) +* [tiers](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::Tier) + * [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::TierListCall) +* [users](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::User) + * [*delete*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::UserDeleteCall), [*get*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::UserGetCall), [*insert*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::UserInsertCall), [*list*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::UserListCall) and [*update*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/api::UserUpdateCall) @@ -41,17 +41,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/SQLAdmin)** +* **[Hub](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/SQLAdmin)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::CallBuilder) -* **[Resources](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::CallBuilder) +* **[Resources](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Part)** + * **[Parts](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::CallBuilder)** +* **[Activities](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -175,17 +175,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -195,29 +195,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Delegate) to the -[Method Builder](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Delegate) to the +[Method Builder](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::RequestValue) and -[decodable](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::RequestValue) and +[decodable](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-sqladmin1_beta4/5.0.2-beta-1+20221209/google_sqladmin1_beta4/client::RequestValue) are moved +* [request values](https://docs.rs/google-sqladmin1_beta4/5.0.2+20221209/google_sqladmin1_beta4/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/sqladmin1_beta4/src/api.rs b/gen/sqladmin1_beta4/src/api.rs index 1b1a850ebb..185af820a8 100644 --- a/gen/sqladmin1_beta4/src/api.rs +++ b/gen/sqladmin1_beta4/src/api.rs @@ -131,7 +131,7 @@ impl<'a, S> SQLAdmin { SQLAdmin { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://sqladmin.googleapis.com/".to_string(), _root_url: "https://sqladmin.googleapis.com/".to_string(), } @@ -169,7 +169,7 @@ impl<'a, S> SQLAdmin { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/sqladmin1_beta4/src/client.rs b/gen/sqladmin1_beta4/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/sqladmin1_beta4/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/sqladmin1_beta4/src/lib.rs b/gen/sqladmin1_beta4/src/lib.rs index d52ea52b6c..19af402f22 100644 --- a/gen/sqladmin1_beta4/src/lib.rs +++ b/gen/sqladmin1_beta4/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *SQL Admin* crate version *5.0.2-beta-1+20221209*, where *20221209* is the exact revision of the *sqladmin:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *SQL Admin* crate version *5.0.2+20221209*, where *20221209* is the exact revision of the *sqladmin:v1beta4* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *SQL Admin* *v1_beta4* API can be found at the //! [official documentation site](https://developers.google.com/cloud-sql/). diff --git a/gen/storage1-cli/Cargo.toml b/gen/storage1-cli/Cargo.toml index 4d774c19ad..0dc5f99264 100644 --- a/gen/storage1-cli/Cargo.toml +++ b/gen/storage1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-storage1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with storage (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/storage1-cli" @@ -20,13 +20,13 @@ name = "storage1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-storage1] path = "../storage1" -version = "4.0.1+20220228" +version = "5.0.2+20230119" + diff --git a/gen/storage1-cli/README.md b/gen/storage1-cli/README.md index 87e0ef82e2..2fe9081ca0 100644 --- a/gen/storage1-cli/README.md +++ b/gen/storage1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *storage* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *storage* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash storage1 [options] diff --git a/gen/storage1-cli/mkdocs.yml b/gen/storage1-cli/mkdocs.yml index df7b9ea875..6a0322c55a 100644 --- a/gen/storage1-cli/mkdocs.yml +++ b/gen/storage1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: storage v4.0.1+20220228 +site_name: storage v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-storage1-cli site_description: A complete library to interact with storage (protocol v1) @@ -7,60 +7,68 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/storage1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['bucket-access-controls_delete.md', 'Bucket Access Controls', 'Delete'] -- ['bucket-access-controls_get.md', 'Bucket Access Controls', 'Get'] -- ['bucket-access-controls_insert.md', 'Bucket Access Controls', 'Insert'] -- ['bucket-access-controls_list.md', 'Bucket Access Controls', 'List'] -- ['bucket-access-controls_patch.md', 'Bucket Access Controls', 'Patch'] -- ['bucket-access-controls_update.md', 'Bucket Access Controls', 'Update'] -- ['buckets_delete.md', 'Buckets', 'Delete'] -- ['buckets_get.md', 'Buckets', 'Get'] -- ['buckets_get-iam-policy.md', 'Buckets', 'Get Iam Policy'] -- ['buckets_insert.md', 'Buckets', 'Insert'] -- ['buckets_list.md', 'Buckets', 'List'] -- ['buckets_lock-retention-policy.md', 'Buckets', 'Lock Retention Policy'] -- ['buckets_patch.md', 'Buckets', 'Patch'] -- ['buckets_set-iam-policy.md', 'Buckets', 'Set Iam Policy'] -- ['buckets_test-iam-permissions.md', 'Buckets', 'Test Iam Permissions'] -- ['buckets_update.md', 'Buckets', 'Update'] -- ['channels_stop.md', 'Channels', 'Stop'] -- ['default-object-access-controls_delete.md', 'Default Object Access Controls', 'Delete'] -- ['default-object-access-controls_get.md', 'Default Object Access Controls', 'Get'] -- ['default-object-access-controls_insert.md', 'Default Object Access Controls', 'Insert'] -- ['default-object-access-controls_list.md', 'Default Object Access Controls', 'List'] -- ['default-object-access-controls_patch.md', 'Default Object Access Controls', 'Patch'] -- ['default-object-access-controls_update.md', 'Default Object Access Controls', 'Update'] -- ['notifications_delete.md', 'Notifications', 'Delete'] -- ['notifications_get.md', 'Notifications', 'Get'] -- ['notifications_insert.md', 'Notifications', 'Insert'] -- ['notifications_list.md', 'Notifications', 'List'] -- ['object-access-controls_delete.md', 'Object Access Controls', 'Delete'] -- ['object-access-controls_get.md', 'Object Access Controls', 'Get'] -- ['object-access-controls_insert.md', 'Object Access Controls', 'Insert'] -- ['object-access-controls_list.md', 'Object Access Controls', 'List'] -- ['object-access-controls_patch.md', 'Object Access Controls', 'Patch'] -- ['object-access-controls_update.md', 'Object Access Controls', 'Update'] -- ['objects_compose.md', 'Objects', 'Compose'] -- ['objects_copy.md', 'Objects', 'Copy'] -- ['objects_delete.md', 'Objects', 'Delete'] -- ['objects_get.md', 'Objects', 'Get'] -- ['objects_get-iam-policy.md', 'Objects', 'Get Iam Policy'] -- ['objects_insert.md', 'Objects', 'Insert'] -- ['objects_list.md', 'Objects', 'List'] -- ['objects_patch.md', 'Objects', 'Patch'] -- ['objects_rewrite.md', 'Objects', 'Rewrite'] -- ['objects_set-iam-policy.md', 'Objects', 'Set Iam Policy'] -- ['objects_test-iam-permissions.md', 'Objects', 'Test Iam Permissions'] -- ['objects_update.md', 'Objects', 'Update'] -- ['objects_watch-all.md', 'Objects', 'Watch All'] -- ['projects_hmac-keys-create.md', 'Projects', 'Hmac Keys Create'] -- ['projects_hmac-keys-delete.md', 'Projects', 'Hmac Keys Delete'] -- ['projects_hmac-keys-get.md', 'Projects', 'Hmac Keys Get'] -- ['projects_hmac-keys-list.md', 'Projects', 'Hmac Keys List'] -- ['projects_hmac-keys-update.md', 'Projects', 'Hmac Keys Update'] -- ['projects_service-account-get.md', 'Projects', 'Service Account Get'] +nav: +- Home: 'index.md' +- 'Bucket Access Controls': + - 'Delete': 'bucket-access-controls_delete.md' + - 'Get': 'bucket-access-controls_get.md' + - 'Insert': 'bucket-access-controls_insert.md' + - 'List': 'bucket-access-controls_list.md' + - 'Patch': 'bucket-access-controls_patch.md' + - 'Update': 'bucket-access-controls_update.md' +- 'Buckets': + - 'Delete': 'buckets_delete.md' + - 'Get': 'buckets_get.md' + - 'Get Iam Policy': 'buckets_get-iam-policy.md' + - 'Insert': 'buckets_insert.md' + - 'List': 'buckets_list.md' + - 'Lock Retention Policy': 'buckets_lock-retention-policy.md' + - 'Patch': 'buckets_patch.md' + - 'Set Iam Policy': 'buckets_set-iam-policy.md' + - 'Test Iam Permissions': 'buckets_test-iam-permissions.md' + - 'Update': 'buckets_update.md' +- 'Channels': + - 'Stop': 'channels_stop.md' +- 'Default Object Access Controls': + - 'Delete': 'default-object-access-controls_delete.md' + - 'Get': 'default-object-access-controls_get.md' + - 'Insert': 'default-object-access-controls_insert.md' + - 'List': 'default-object-access-controls_list.md' + - 'Patch': 'default-object-access-controls_patch.md' + - 'Update': 'default-object-access-controls_update.md' +- 'Notifications': + - 'Delete': 'notifications_delete.md' + - 'Get': 'notifications_get.md' + - 'Insert': 'notifications_insert.md' + - 'List': 'notifications_list.md' +- 'Object Access Controls': + - 'Delete': 'object-access-controls_delete.md' + - 'Get': 'object-access-controls_get.md' + - 'Insert': 'object-access-controls_insert.md' + - 'List': 'object-access-controls_list.md' + - 'Patch': 'object-access-controls_patch.md' + - 'Update': 'object-access-controls_update.md' +- 'Objects': + - 'Compose': 'objects_compose.md' + - 'Copy': 'objects_copy.md' + - 'Delete': 'objects_delete.md' + - 'Get': 'objects_get.md' + - 'Get Iam Policy': 'objects_get-iam-policy.md' + - 'Insert': 'objects_insert.md' + - 'List': 'objects_list.md' + - 'Patch': 'objects_patch.md' + - 'Rewrite': 'objects_rewrite.md' + - 'Set Iam Policy': 'objects_set-iam-policy.md' + - 'Test Iam Permissions': 'objects_test-iam-permissions.md' + - 'Update': 'objects_update.md' + - 'Watch All': 'objects_watch-all.md' +- 'Projects': + - 'Hmac Keys Create': 'projects_hmac-keys-create.md' + - 'Hmac Keys Delete': 'projects_hmac-keys-delete.md' + - 'Hmac Keys Get': 'projects_hmac-keys-get.md' + - 'Hmac Keys List': 'projects_hmac-keys-list.md' + - 'Hmac Keys Update': 'projects_hmac-keys-update.md' + - 'Service Account Get': 'projects_service-account-get.md' theme: readthedocs diff --git a/gen/storage1-cli/src/client.rs b/gen/storage1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/storage1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/storage1-cli/src/main.rs b/gen/storage1-cli/src/main.rs index 8023b85e07..a422ac6f19 100644 --- a/gen/storage1-cli/src/main.rs +++ b/gen/storage1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_storage1::{api, Error, oauth2}; +use google_storage1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -60,9 +59,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -76,7 +72,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -111,9 +107,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -127,7 +120,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -214,9 +207,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -230,7 +220,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -273,9 +263,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -289,7 +276,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -376,9 +363,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -392,7 +376,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -479,9 +463,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -495,7 +476,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -538,14 +519,11 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -560,7 +538,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["if-metageneration-match", "if-metageneration-not-match", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["if-metageneration-match", "if-metageneration-not-match", "user-project"].iter().map(|v|*v)); v } )); } } @@ -595,17 +573,14 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -620,7 +595,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["if-metageneration-match", "if-metageneration-not-match", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["if-metageneration-match", "if-metageneration-not-match", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -663,11 +638,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "options-requested-policy-version" => { - call = call.options_requested_policy_version(arg_from_str(value.unwrap_or("-0"), err, "options-requested-policy-version", "integer")); + call = call.options_requested_policy_version( value.map(|v| arg_from_str(v, err, "options-requested-policy-version", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -682,7 +654,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["options-requested-policy-version", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["options-requested-policy-version", "user-project"].iter().map(|v|*v)); v } )); } } @@ -793,9 +765,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -818,7 +787,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["predefined-acl", "predefined-default-object-acl", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["predefined-acl", "predefined-default-object-acl", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -861,9 +830,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -874,7 +840,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -889,7 +855,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["max-results", "page-token", "prefix", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["max-results", "page-token", "prefix", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -932,9 +898,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -948,7 +911,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -1059,9 +1022,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -1072,10 +1032,10 @@ where call = call.predefined_acl(value.unwrap_or("")); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1090,7 +1050,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "predefined-default-object-acl", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "predefined-default-object-acl", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -1169,9 +1129,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -1185,7 +1142,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -1228,9 +1185,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -1244,7 +1198,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -1355,9 +1309,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -1368,10 +1319,10 @@ where call = call.predefined_acl(value.unwrap_or("")); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1386,7 +1337,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "predefined-default-object-acl", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "predefined-default-object-acl", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -1515,9 +1466,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -1531,7 +1479,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -1566,9 +1514,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -1582,7 +1527,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -1671,9 +1616,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -1687,7 +1629,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -1730,14 +1672,11 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1752,7 +1691,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["if-metageneration-match", "if-metageneration-not-match", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["if-metageneration-match", "if-metageneration-not-match", "user-project"].iter().map(|v|*v)); v } )); } } @@ -1841,9 +1780,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -1857,7 +1793,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -1946,9 +1882,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -1962,7 +1895,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -2005,9 +1938,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -2021,7 +1951,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -2056,9 +1986,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -2072,7 +1999,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -2156,9 +2083,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -2172,7 +2096,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -2215,9 +2139,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -2231,7 +2152,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -2274,11 +2195,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2293,7 +2211,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -2328,11 +2246,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2347,7 +2262,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -2436,11 +2351,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2455,7 +2367,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -2498,11 +2410,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2517,7 +2426,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -2606,11 +2515,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2625,7 +2531,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -2714,11 +2620,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2733,7 +2636,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -2842,17 +2745,14 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "kms-key-name" => { call = call.kms_key_name(value.unwrap_or("")); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "destination-predefined-acl" => { call = call.destination_predefined_acl(value.unwrap_or("")); @@ -2870,7 +2770,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["destination-predefined-acl", "if-generation-match", "if-metageneration-match", "kms-key-name", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["destination-predefined-acl", "if-generation-match", "if-metageneration-match", "kms-key-name", "user-project"].iter().map(|v|*v)); v } )); } } @@ -2979,37 +2879,34 @@ where call = call.user_project(value.unwrap_or("")); }, "source-generation" => { - call = call.source_generation(value.unwrap_or("")); - }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); + call = call.source_generation( value.map(|v| arg_from_str(v, err, "source-generation", "int64")).unwrap_or(-0)); }, "projection" => { call = call.projection(value.unwrap_or("")); }, "if-source-metageneration-not-match" => { - call = call.if_source_metageneration_not_match(value.unwrap_or("")); + call = call.if_source_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-source-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-source-metageneration-match" => { - call = call.if_source_metageneration_match(value.unwrap_or("")); + call = call.if_source_metageneration_match( value.map(|v| arg_from_str(v, err, "if-source-metageneration-match", "int64")).unwrap_or(-0)); }, "if-source-generation-not-match" => { - call = call.if_source_generation_not_match(value.unwrap_or("")); + call = call.if_source_generation_not_match( value.map(|v| arg_from_str(v, err, "if-source-generation-not-match", "int64")).unwrap_or(-0)); }, "if-source-generation-match" => { - call = call.if_source_generation_match(value.unwrap_or("")); + call = call.if_source_generation_match( value.map(|v| arg_from_str(v, err, "if-source-generation-match", "int64")).unwrap_or(-0)); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-not-match" => { - call = call.if_generation_not_match(value.unwrap_or("")); + call = call.if_generation_not_match( value.map(|v| arg_from_str(v, err, "if-generation-not-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "destination-predefined-acl" => { call = call.destination_predefined_acl(value.unwrap_or("")); @@ -3030,7 +2927,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["destination-kms-key-name", "destination-predefined-acl", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "if-source-generation-match", "if-source-generation-not-match", "if-source-metageneration-match", "if-source-metageneration-not-match", "projection", "provisional-user-project", "source-generation", "user-project"].iter().map(|v|*v)); + v.extend(["destination-kms-key-name", "destination-predefined-acl", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "if-source-generation-match", "if-source-generation-not-match", "if-source-metageneration-match", "if-source-metageneration-not-match", "projection", "source-generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3073,23 +2970,20 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-not-match" => { - call = call.if_generation_not_match(value.unwrap_or("")); + call = call.if_generation_not_match( value.map(|v| arg_from_str(v, err, "if-generation-not-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3104,7 +2998,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3140,26 +3034,23 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-not-match" => { - call = call.if_generation_not_match(value.unwrap_or("")); + call = call.if_generation_not_match( value.map(|v| arg_from_str(v, err, "if-generation-not-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3177,7 +3068,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3226,11 +3117,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3245,7 +3133,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3353,9 +3241,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -3369,16 +3254,16 @@ where call = call.kms_key_name(value.unwrap_or("")); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-not-match" => { - call = call.if_generation_not_match(value.unwrap_or("")); + call = call.if_generation_not_match( value.map(|v| arg_from_str(v, err, "if-generation-not-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "content-encoding" => { call = call.content_encoding(value.unwrap_or("")); @@ -3396,7 +3281,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["content-encoding", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "kms-key-name", "name", "predefined-acl", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["content-encoding", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "kms-key-name", "name", "predefined-acl", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3440,7 +3325,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "versions" => { - call = call.versions(arg_from_str(value.unwrap_or("false"), err, "versions", "boolean")); + call = call.versions( value.map(|v| arg_from_str(v, err, "versions", "boolean")).unwrap_or(false)); }, "user-project" => { call = call.user_project(value.unwrap_or("")); @@ -3448,9 +3333,6 @@ where "start-offset" => { call = call.start_offset(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -3461,10 +3343,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-trailing-delimiter" => { - call = call.include_trailing_delimiter(arg_from_str(value.unwrap_or("false"), err, "include-trailing-delimiter", "boolean")); + call = call.include_trailing_delimiter( value.map(|v| arg_from_str(v, err, "include-trailing-delimiter", "boolean")).unwrap_or(false)); }, "end-offset" => { call = call.end_offset(value.unwrap_or("")); @@ -3485,7 +3367,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["delimiter", "end-offset", "include-trailing-delimiter", "max-results", "page-token", "prefix", "projection", "provisional-user-project", "start-offset", "user-project", "versions"].iter().map(|v|*v)); + v.extend(["delimiter", "end-offset", "include-trailing-delimiter", "max-results", "page-token", "prefix", "projection", "start-offset", "user-project", "versions"].iter().map(|v|*v)); v } )); } } @@ -3593,9 +3475,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -3603,19 +3482,19 @@ where call = call.predefined_acl(value.unwrap_or("")); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-not-match" => { - call = call.if_generation_not_match(value.unwrap_or("")); + call = call.if_generation_not_match( value.map(|v| arg_from_str(v, err, "if-generation-not-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3630,7 +3509,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3739,43 +3618,40 @@ where call = call.user_project(value.unwrap_or("")); }, "source-generation" => { - call = call.source_generation(value.unwrap_or("")); + call = call.source_generation( value.map(|v| arg_from_str(v, err, "source-generation", "int64")).unwrap_or(-0)); }, "rewrite-token" => { call = call.rewrite_token(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, "max-bytes-rewritten-per-call" => { - call = call.max_bytes_rewritten_per_call(value.unwrap_or("")); + call = call.max_bytes_rewritten_per_call( value.map(|v| arg_from_str(v, err, "max-bytes-rewritten-per-call", "int64")).unwrap_or(-0)); }, "if-source-metageneration-not-match" => { - call = call.if_source_metageneration_not_match(value.unwrap_or("")); + call = call.if_source_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-source-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-source-metageneration-match" => { - call = call.if_source_metageneration_match(value.unwrap_or("")); + call = call.if_source_metageneration_match( value.map(|v| arg_from_str(v, err, "if-source-metageneration-match", "int64")).unwrap_or(-0)); }, "if-source-generation-not-match" => { - call = call.if_source_generation_not_match(value.unwrap_or("")); + call = call.if_source_generation_not_match( value.map(|v| arg_from_str(v, err, "if-source-generation-not-match", "int64")).unwrap_or(-0)); }, "if-source-generation-match" => { - call = call.if_source_generation_match(value.unwrap_or("")); + call = call.if_source_generation_match( value.map(|v| arg_from_str(v, err, "if-source-generation-match", "int64")).unwrap_or(-0)); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-not-match" => { - call = call.if_generation_not_match(value.unwrap_or("")); + call = call.if_generation_not_match( value.map(|v| arg_from_str(v, err, "if-generation-not-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "destination-predefined-acl" => { call = call.destination_predefined_acl(value.unwrap_or("")); @@ -3796,7 +3672,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["destination-kms-key-name", "destination-predefined-acl", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "if-source-generation-match", "if-source-generation-not-match", "if-source-metageneration-match", "if-source-metageneration-not-match", "max-bytes-rewritten-per-call", "projection", "provisional-user-project", "rewrite-token", "source-generation", "user-project"].iter().map(|v|*v)); + v.extend(["destination-kms-key-name", "destination-predefined-acl", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "if-source-generation-match", "if-source-generation-not-match", "if-source-metageneration-match", "if-source-metageneration-not-match", "max-bytes-rewritten-per-call", "projection", "rewrite-token", "source-generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3875,11 +3751,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3894,7 +3767,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -3937,11 +3810,8 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3956,7 +3826,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "user-project"].iter().map(|v|*v)); v } )); } } @@ -4064,9 +3934,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -4074,19 +3941,19 @@ where call = call.predefined_acl(value.unwrap_or("")); }, "if-metageneration-not-match" => { - call = call.if_metageneration_not_match(value.unwrap_or("")); + call = call.if_metageneration_not_match( value.map(|v| arg_from_str(v, err, "if-metageneration-not-match", "int64")).unwrap_or(-0)); }, "if-metageneration-match" => { - call = call.if_metageneration_match(value.unwrap_or("")); + call = call.if_metageneration_match( value.map(|v| arg_from_str(v, err, "if-metageneration-match", "int64")).unwrap_or(-0)); }, "if-generation-not-match" => { - call = call.if_generation_not_match(value.unwrap_or("")); + call = call.if_generation_not_match( value.map(|v| arg_from_str(v, err, "if-generation-not-match", "int64")).unwrap_or(-0)); }, "if-generation-match" => { - call = call.if_generation_match(value.unwrap_or("")); + call = call.if_generation_match( value.map(|v| arg_from_str(v, err, "if-generation-match", "int64")).unwrap_or(-0)); }, "generation" => { - call = call.generation(value.unwrap_or("")); + call = call.generation( value.map(|v| arg_from_str(v, err, "generation", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4101,7 +3968,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "projection", "provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["generation", "if-generation-match", "if-generation-not-match", "if-metageneration-match", "if-metageneration-not-match", "predefined-acl", "projection", "user-project"].iter().map(|v|*v)); v } )); } } @@ -4184,7 +4051,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "versions" => { - call = call.versions(arg_from_str(value.unwrap_or("false"), err, "versions", "boolean")); + call = call.versions( value.map(|v| arg_from_str(v, err, "versions", "boolean")).unwrap_or(false)); }, "user-project" => { call = call.user_project(value.unwrap_or("")); @@ -4192,9 +4059,6 @@ where "start-offset" => { call = call.start_offset(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, "projection" => { call = call.projection(value.unwrap_or("")); }, @@ -4205,10 +4069,10 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "include-trailing-delimiter" => { - call = call.include_trailing_delimiter(arg_from_str(value.unwrap_or("false"), err, "include-trailing-delimiter", "boolean")); + call = call.include_trailing_delimiter( value.map(|v| arg_from_str(v, err, "include-trailing-delimiter", "boolean")).unwrap_or(false)); }, "end-offset" => { call = call.end_offset(value.unwrap_or("")); @@ -4229,7 +4093,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["delimiter", "end-offset", "include-trailing-delimiter", "max-results", "page-token", "prefix", "projection", "provisional-user-project", "start-offset", "user-project", "versions"].iter().map(|v|*v)); + v.extend(["delimiter", "end-offset", "include-trailing-delimiter", "max-results", "page-token", "prefix", "projection", "start-offset", "user-project", "versions"].iter().map(|v|*v)); v } )); } } @@ -4433,7 +4297,7 @@ where call = call.user_project(value.unwrap_or("")); }, "show-deleted-keys" => { - call = call.show_deleted_keys(arg_from_str(value.unwrap_or("false"), err, "show-deleted-keys", "boolean")); + call = call.show_deleted_keys( value.map(|v| arg_from_str(v, err, "show-deleted-keys", "boolean")).unwrap_or(false)); }, "service-account-email" => { call = call.service_account_email(value.unwrap_or("")); @@ -4442,7 +4306,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -4598,9 +4462,6 @@ where "user-project" => { call = call.user_project(value.unwrap_or("")); }, - "provisional-user-project" => { - call = call.provisional_user_project(value.unwrap_or("")); - }, _ => { let mut found = false; for param in &self.gp { @@ -4614,7 +4475,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["provisional-user-project", "user-project"].iter().map(|v|*v)); + v.extend(["user-project"].iter().map(|v|*v)); v } )); } } @@ -6459,7 +6320,7 @@ async fn main() { let mut app = App::new("storage1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230119") .about("Stores and retrieves potentially large, immutable data objects.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_storage1_cli") .arg(Arg::with_name("url") diff --git a/gen/storage1/Cargo.toml b/gen/storage1/Cargo.toml index fd36e13784..a2d66f79df 100644 --- a/gen/storage1/Cargo.toml +++ b/gen/storage1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-storage1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with storage (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/storage1" homepage = "https://developers.google.com/storage/docs/json_api/" -documentation = "https://docs.rs/google-storage1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-storage1/5.0.2+20230119" license = "MIT" keywords = ["storage", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/storage1/README.md b/gen/storage1/README.md index 22894e7fc9..3809fdaf22 100644 --- a/gen/storage1/README.md +++ b/gen/storage1/README.md @@ -5,44 +5,44 @@ DO NOT EDIT ! --> The `google-storage1` library allows access to all features of the *Google storage* service. -This documentation was generated from *storage* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *storage:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *storage* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *storage:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *storage* *v1* API can be found at the [official documentation site](https://developers.google.com/storage/docs/json_api/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/Storage) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/Storage) ... -* [bucket access controls](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketAccessControl) - * [*delete*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketAccessControlDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketAccessControlGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketAccessControlInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketAccessControlListCall), [*patch*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketAccessControlPatchCall) and [*update*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketAccessControlUpdateCall) -* [buckets](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::Bucket) - * [*delete*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketGetCall), [*get iam policy*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketGetIamPolicyCall), [*insert*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketListCall), [*lock retention policy*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketLockRetentionPolicyCall), [*patch*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketPatchCall), [*set iam policy*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketTestIamPermissionCall) and [*update*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::BucketUpdateCall) -* [channels](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::Channel) - * [*stop*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ChannelStopCall) +* [bucket access controls](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketAccessControl) + * [*delete*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketAccessControlDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketAccessControlGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketAccessControlInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketAccessControlListCall), [*patch*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketAccessControlPatchCall) and [*update*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketAccessControlUpdateCall) +* [buckets](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::Bucket) + * [*delete*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketGetCall), [*get iam policy*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketGetIamPolicyCall), [*insert*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketListCall), [*lock retention policy*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketLockRetentionPolicyCall), [*patch*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketPatchCall), [*set iam policy*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketTestIamPermissionCall) and [*update*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::BucketUpdateCall) +* [channels](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::Channel) + * [*stop*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ChannelStopCall) * default object access controls - * [*delete*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::DefaultObjectAccessControlDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::DefaultObjectAccessControlGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::DefaultObjectAccessControlInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::DefaultObjectAccessControlListCall), [*patch*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::DefaultObjectAccessControlPatchCall) and [*update*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::DefaultObjectAccessControlUpdateCall) -* [notifications](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::Notification) - * [*delete*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::NotificationDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::NotificationGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::NotificationInsertCall) and [*list*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::NotificationListCall) -* [object access controls](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectAccessControl) - * [*delete*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectAccessControlDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectAccessControlGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectAccessControlInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectAccessControlListCall), [*patch*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectAccessControlPatchCall) and [*update*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectAccessControlUpdateCall) -* [objects](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::Object) - * [*compose*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectComposeCall), [*copy*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectCopyCall), [*delete*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectGetCall), [*get iam policy*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectGetIamPolicyCall), [*insert*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectListCall), [*patch*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectPatchCall), [*rewrite*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectRewriteCall), [*set iam policy*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectTestIamPermissionCall), [*update*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectUpdateCall) and [*watch all*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectWatchAllCall) + * [*delete*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::DefaultObjectAccessControlDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::DefaultObjectAccessControlGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::DefaultObjectAccessControlInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::DefaultObjectAccessControlListCall), [*patch*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::DefaultObjectAccessControlPatchCall) and [*update*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::DefaultObjectAccessControlUpdateCall) +* [notifications](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::Notification) + * [*delete*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::NotificationDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::NotificationGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::NotificationInsertCall) and [*list*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::NotificationListCall) +* [object access controls](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectAccessControl) + * [*delete*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectAccessControlDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectAccessControlGetCall), [*insert*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectAccessControlInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectAccessControlListCall), [*patch*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectAccessControlPatchCall) and [*update*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectAccessControlUpdateCall) +* [objects](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::Object) + * [*compose*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectComposeCall), [*copy*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectCopyCall), [*delete*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectDeleteCall), [*get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectGetCall), [*get iam policy*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectGetIamPolicyCall), [*insert*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectInsertCall), [*list*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectListCall), [*patch*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectPatchCall), [*rewrite*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectRewriteCall), [*set iam policy*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectSetIamPolicyCall), [*test iam permissions*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectTestIamPermissionCall), [*update*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectUpdateCall) and [*watch all*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectWatchAllCall) * projects - * [*hmac keys create*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ProjectHmacKeyCreateCall), [*hmac keys delete*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ProjectHmacKeyDeleteCall), [*hmac keys get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ProjectHmacKeyGetCall), [*hmac keys list*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ProjectHmacKeyListCall), [*hmac keys update*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ProjectHmacKeyUpdateCall) and [*service account get*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ProjectServiceAccountGetCall) + * [*hmac keys create*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ProjectHmacKeyCreateCall), [*hmac keys delete*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ProjectHmacKeyDeleteCall), [*hmac keys get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ProjectHmacKeyGetCall), [*hmac keys list*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ProjectHmacKeyListCall), [*hmac keys update*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ProjectHmacKeyUpdateCall) and [*service account get*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ProjectServiceAccountGetCall) Upload supported by ... -* [*insert objects*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectInsertCall) +* [*insert objects*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectInsertCall) Download supported by ... -* [*get objects*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectGetCall) +* [*get objects*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectGetCall) Subscription supported by ... -* [*list objects*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectListCall) -* [*watch all objects*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/api::ObjectWatchAllCall) +* [*list objects*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectListCall) +* [*watch all objects*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/api::ObjectWatchAllCall) @@ -50,17 +50,17 @@ Subscription supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/Storage)** +* **[Hub](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/Storage)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::CallBuilder) -* **[Resources](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::CallBuilder) +* **[Resources](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Part)** + * **[Parts](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -178,17 +178,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -198,29 +198,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Delegate) to the -[Method Builder](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Delegate) to the +[Method Builder](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::RequestValue) and -[decodable](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::RequestValue) and +[decodable](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-storage1/5.0.2-beta-1+20230119/google_storage1/client::RequestValue) are moved +* [request values](https://docs.rs/google-storage1/5.0.2+20230119/google_storage1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/storage1/src/api.rs b/gen/storage1/src/api.rs index 7c6e40c24b..3f5e2e88f9 100644 --- a/gen/storage1/src/api.rs +++ b/gen/storage1/src/api.rs @@ -156,7 +156,7 @@ impl<'a, S> Storage { Storage { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://storage.googleapis.com/storage/v1/".to_string(), _root_url: "https://storage.googleapis.com/".to_string(), } @@ -188,7 +188,7 @@ impl<'a, S> Storage { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/storage1/src/client.rs b/gen/storage1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/storage1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/storage1/src/lib.rs b/gen/storage1/src/lib.rs index 2702f23832..130fcb38ef 100644 --- a/gen/storage1/src/lib.rs +++ b/gen/storage1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *storage* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *storage:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *storage* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *storage:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *storage* *v1* API can be found at the //! [official documentation site](https://developers.google.com/storage/docs/json_api/). diff --git a/gen/storagetransfer1-cli/Cargo.toml b/gen/storagetransfer1-cli/Cargo.toml index a6a43ad795..7705debdaf 100644 --- a/gen/storagetransfer1-cli/Cargo.toml +++ b/gen/storagetransfer1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-storagetransfer1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with storagetransfer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/storagetransfer1-cli" @@ -20,13 +20,13 @@ name = "storagetransfer1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-storagetransfer1] path = "../storagetransfer1" -version = "4.0.1+20220223" +version = "5.0.2+20230111" + diff --git a/gen/storagetransfer1-cli/README.md b/gen/storagetransfer1-cli/README.md index b7ad14e254..6cfe10905f 100644 --- a/gen/storagetransfer1-cli/README.md +++ b/gen/storagetransfer1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *storagetransfer* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *storagetransfer* API at revision *20230111*. The CLI is at version *5.0.2*. ```bash storagetransfer1 [options] @@ -39,6 +39,7 @@ storagetransfer1 [options] agent-pools-patch (-r )... [-p ]... [-o ] transfer-jobs create (-r )... [-p ]... [-o ] + delete [-p ]... [-o ] get [-p ]... [-o ] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] diff --git a/gen/storagetransfer1-cli/mkdocs.yml b/gen/storagetransfer1-cli/mkdocs.yml index f4866892f9..157665138d 100644 --- a/gen/storagetransfer1-cli/mkdocs.yml +++ b/gen/storagetransfer1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: storagetransfer v4.0.1+20220223 +site_name: storagetransfer v5.0.2+20230111 site_url: http://byron.github.io/google-apis-rs/google-storagetransfer1-cli site_description: A complete library to interact with storagetransfer (protocol v1) @@ -7,24 +7,29 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/storagetransfer1 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['google-service-accounts_get.md', 'Google Service Accounts', 'Get'] -- ['projects_agent-pools-create.md', 'Projects', 'Agent Pools Create'] -- ['projects_agent-pools-delete.md', 'Projects', 'Agent Pools Delete'] -- ['projects_agent-pools-get.md', 'Projects', 'Agent Pools Get'] -- ['projects_agent-pools-list.md', 'Projects', 'Agent Pools List'] -- ['projects_agent-pools-patch.md', 'Projects', 'Agent Pools Patch'] -- ['transfer-jobs_create.md', 'Transfer Jobs', 'Create'] -- ['transfer-jobs_get.md', 'Transfer Jobs', 'Get'] -- ['transfer-jobs_list.md', 'Transfer Jobs', 'List'] -- ['transfer-jobs_patch.md', 'Transfer Jobs', 'Patch'] -- ['transfer-jobs_run.md', 'Transfer Jobs', 'Run'] -- ['transfer-operations_cancel.md', 'Transfer Operations', 'Cancel'] -- ['transfer-operations_get.md', 'Transfer Operations', 'Get'] -- ['transfer-operations_list.md', 'Transfer Operations', 'List'] -- ['transfer-operations_pause.md', 'Transfer Operations', 'Pause'] -- ['transfer-operations_resume.md', 'Transfer Operations', 'Resume'] +nav: +- Home: 'index.md' +- 'Google Service Accounts': + - 'Get': 'google-service-accounts_get.md' +- 'Projects': + - 'Agent Pools Create': 'projects_agent-pools-create.md' + - 'Agent Pools Delete': 'projects_agent-pools-delete.md' + - 'Agent Pools Get': 'projects_agent-pools-get.md' + - 'Agent Pools List': 'projects_agent-pools-list.md' + - 'Agent Pools Patch': 'projects_agent-pools-patch.md' +- 'Transfer Jobs': + - 'Create': 'transfer-jobs_create.md' + - 'Delete': 'transfer-jobs_delete.md' + - 'Get': 'transfer-jobs_get.md' + - 'List': 'transfer-jobs_list.md' + - 'Patch': 'transfer-jobs_patch.md' + - 'Run': 'transfer-jobs_run.md' +- 'Transfer Operations': + - 'Cancel': 'transfer-operations_cancel.md' + - 'Get': 'transfer-operations_get.md' + - 'List': 'transfer-operations_list.md' + - 'Pause': 'transfer-operations_pause.md' + - 'Resume': 'transfer-operations_resume.md' theme: readthedocs diff --git a/gen/storagetransfer1-cli/src/client.rs b/gen/storagetransfer1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/storagetransfer1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/storagetransfer1-cli/src/main.rs b/gen/storagetransfer1-cli/src/main.rs index 5d2b05dc84..b6faa4316f 100644 --- a/gen/storagetransfer1-cli/src/main.rs +++ b/gen/storagetransfer1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_storagetransfer1::{api, Error, oauth2}; +use google_storagetransfer1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -309,7 +308,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -404,7 +403,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -479,6 +478,9 @@ where "creation-time" => Some(("creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deletion-time" => Some(("deletionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "event-stream.event-stream-expiration-time" => Some(("eventStream.eventStreamExpirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "event-stream.event-stream-start-time" => Some(("eventStream.eventStreamStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "event-stream.name" => Some(("eventStream.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-modification-time" => Some(("lastModificationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "latest-operation-name" => Some(("latestOperationName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "logging-config.enable-onprem-gcs-transfer-logs" => Some(("loggingConfig.enableOnpremGcsTransferLogs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -505,6 +507,14 @@ where "schedule.start-time-of-day.nanos" => Some(("schedule.startTimeOfDay.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "schedule.start-time-of-day.seconds" => Some(("schedule.startTimeOfDay.seconds", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.bucket-name" => Some(("transferSpec.awsS3CompatibleDataSource.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.endpoint" => Some(("transferSpec.awsS3CompatibleDataSource.endpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.path" => Some(("transferSpec.awsS3CompatibleDataSource.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.region" => Some(("transferSpec.awsS3CompatibleDataSource.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.s3-metadata.auth-method" => Some(("transferSpec.awsS3CompatibleDataSource.s3Metadata.authMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.s3-metadata.list-api" => Some(("transferSpec.awsS3CompatibleDataSource.s3Metadata.listApi", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.s3-metadata.protocol" => Some(("transferSpec.awsS3CompatibleDataSource.s3Metadata.protocol", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-spec.aws-s3-compatible-data-source.s3-metadata.request-model" => Some(("transferSpec.awsS3CompatibleDataSource.s3Metadata.requestModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-spec.aws-s3-data-source.aws-access-key.access-key-id" => Some(("transferSpec.awsS3DataSource.awsAccessKey.accessKeyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-spec.aws-s3-data-source.aws-access-key.secret-access-key" => Some(("transferSpec.awsS3DataSource.awsAccessKey.secretAccessKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-spec.aws-s3-data-source.bucket-name" => Some(("transferSpec.awsS3DataSource.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -544,8 +554,9 @@ where "transfer-spec.transfer-options.metadata-options.time-created" => Some(("transferSpec.transferOptions.metadataOptions.timeCreated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-spec.transfer-options.metadata-options.uid" => Some(("transferSpec.transferOptions.metadataOptions.uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-spec.transfer-options.overwrite-objects-already-existing-in-sink" => Some(("transferSpec.transferOptions.overwriteObjectsAlreadyExistingInSink", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "transfer-spec.transfer-options.overwrite-when" => Some(("transferSpec.transferOptions.overwriteWhen", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["access-key-id", "acl", "aws-access-key", "aws-s3-data-source", "azure-blob-storage-data-source", "azure-credentials", "bucket-name", "container", "creation-time", "day", "delete-objects-from-source-after-transfer", "delete-objects-unique-in-sink", "deletion-time", "description", "enable-onprem-gcs-transfer-logs", "end-time-of-day", "event-types", "exclude-prefixes", "gcs-data-sink", "gcs-data-source", "gcs-intermediate-data-location", "gid", "hours", "http-data-source", "include-prefixes", "kms-key", "last-modification-time", "last-modified-before", "last-modified-since", "latest-operation-name", "list-url", "location", "log-action-states", "log-actions", "logging-config", "max-time-elapsed-since-last-modification", "metadata-options", "min-time-elapsed-since-last-modification", "minutes", "mode", "month", "name", "nanos", "notification-config", "object-conditions", "overwrite-objects-already-existing-in-sink", "path", "payload-format", "posix-data-sink", "posix-data-source", "project-id", "pubsub-topic", "repeat-interval", "role-arn", "root-directory", "sas-token", "schedule", "schedule-end-date", "schedule-start-date", "seconds", "secret-access-key", "sink-agent-pool-name", "source-agent-pool-name", "start-time-of-day", "status", "storage-account", "storage-class", "symlink", "temporary-hold", "time-created", "transfer-manifest", "transfer-options", "transfer-spec", "uid", "year"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access-key-id", "acl", "auth-method", "aws-access-key", "aws-s3-compatible-data-source", "aws-s3-data-source", "azure-blob-storage-data-source", "azure-credentials", "bucket-name", "container", "creation-time", "day", "delete-objects-from-source-after-transfer", "delete-objects-unique-in-sink", "deletion-time", "description", "enable-onprem-gcs-transfer-logs", "end-time-of-day", "endpoint", "event-stream", "event-stream-expiration-time", "event-stream-start-time", "event-types", "exclude-prefixes", "gcs-data-sink", "gcs-data-source", "gcs-intermediate-data-location", "gid", "hours", "http-data-source", "include-prefixes", "kms-key", "last-modification-time", "last-modified-before", "last-modified-since", "latest-operation-name", "list-api", "list-url", "location", "log-action-states", "log-actions", "logging-config", "max-time-elapsed-since-last-modification", "metadata-options", "min-time-elapsed-since-last-modification", "minutes", "mode", "month", "name", "nanos", "notification-config", "object-conditions", "overwrite-objects-already-existing-in-sink", "overwrite-when", "path", "payload-format", "posix-data-sink", "posix-data-source", "project-id", "protocol", "pubsub-topic", "region", "repeat-interval", "request-model", "role-arn", "root-directory", "s3-metadata", "sas-token", "schedule", "schedule-end-date", "schedule-start-date", "seconds", "secret-access-key", "sink-agent-pool-name", "source-agent-pool-name", "start-time-of-day", "status", "storage-account", "storage-class", "symlink", "temporary-hold", "time-created", "transfer-manifest", "transfer-options", "transfer-spec", "uid", "year"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -605,6 +616,58 @@ where } } + async fn _transfer_jobs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.transfer_jobs().delete(opt.value_of("job-name").unwrap_or(""), opt.value_of("project-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _transfer_jobs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.transfer_jobs().get(opt.value_of("job-name").unwrap_or(""), opt.value_of("project-id").unwrap_or("")); @@ -667,7 +730,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -743,6 +806,9 @@ where "transfer-job.creation-time" => Some(("transferJob.creationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.deletion-time" => Some(("transferJob.deletionTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.description" => Some(("transferJob.description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.event-stream.event-stream-expiration-time" => Some(("transferJob.eventStream.eventStreamExpirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.event-stream.event-stream-start-time" => Some(("transferJob.eventStream.eventStreamStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.event-stream.name" => Some(("transferJob.eventStream.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.last-modification-time" => Some(("transferJob.lastModificationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.latest-operation-name" => Some(("transferJob.latestOperationName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.logging-config.enable-onprem-gcs-transfer-logs" => Some(("transferJob.loggingConfig.enableOnpremGcsTransferLogs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -769,6 +835,14 @@ where "transfer-job.schedule.start-time-of-day.nanos" => Some(("transferJob.schedule.startTimeOfDay.nanos", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "transfer-job.schedule.start-time-of-day.seconds" => Some(("transferJob.schedule.startTimeOfDay.seconds", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "transfer-job.status" => Some(("transferJob.status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.bucket-name" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.endpoint" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.endpoint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.path" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.region" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.s3-metadata.auth-method" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.s3Metadata.authMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.s3-metadata.list-api" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.s3Metadata.listApi", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.s3-metadata.protocol" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.s3Metadata.protocol", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.aws-s3-compatible-data-source.s3-metadata.request-model" => Some(("transferJob.transferSpec.awsS3CompatibleDataSource.s3Metadata.requestModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.transfer-spec.aws-s3-data-source.aws-access-key.access-key-id" => Some(("transferJob.transferSpec.awsS3DataSource.awsAccessKey.accessKeyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.transfer-spec.aws-s3-data-source.aws-access-key.secret-access-key" => Some(("transferJob.transferSpec.awsS3DataSource.awsAccessKey.secretAccessKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.transfer-spec.aws-s3-data-source.bucket-name" => Some(("transferJob.transferSpec.awsS3DataSource.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -808,9 +882,10 @@ where "transfer-job.transfer-spec.transfer-options.metadata-options.time-created" => Some(("transferJob.transferSpec.transferOptions.metadataOptions.timeCreated", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.transfer-spec.transfer-options.metadata-options.uid" => Some(("transferJob.transferSpec.transferOptions.metadataOptions.uid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "transfer-job.transfer-spec.transfer-options.overwrite-objects-already-existing-in-sink" => Some(("transferJob.transferSpec.transferOptions.overwriteObjectsAlreadyExistingInSink", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "transfer-job.transfer-spec.transfer-options.overwrite-when" => Some(("transferJob.transferSpec.transferOptions.overwriteWhen", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-transfer-job-field-mask" => Some(("updateTransferJobFieldMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["access-key-id", "acl", "aws-access-key", "aws-s3-data-source", "azure-blob-storage-data-source", "azure-credentials", "bucket-name", "container", "creation-time", "day", "delete-objects-from-source-after-transfer", "delete-objects-unique-in-sink", "deletion-time", "description", "enable-onprem-gcs-transfer-logs", "end-time-of-day", "event-types", "exclude-prefixes", "gcs-data-sink", "gcs-data-source", "gcs-intermediate-data-location", "gid", "hours", "http-data-source", "include-prefixes", "kms-key", "last-modification-time", "last-modified-before", "last-modified-since", "latest-operation-name", "list-url", "location", "log-action-states", "log-actions", "logging-config", "max-time-elapsed-since-last-modification", "metadata-options", "min-time-elapsed-since-last-modification", "minutes", "mode", "month", "name", "nanos", "notification-config", "object-conditions", "overwrite-objects-already-existing-in-sink", "path", "payload-format", "posix-data-sink", "posix-data-source", "project-id", "pubsub-topic", "repeat-interval", "role-arn", "root-directory", "sas-token", "schedule", "schedule-end-date", "schedule-start-date", "seconds", "secret-access-key", "sink-agent-pool-name", "source-agent-pool-name", "start-time-of-day", "status", "storage-account", "storage-class", "symlink", "temporary-hold", "time-created", "transfer-job", "transfer-manifest", "transfer-options", "transfer-spec", "uid", "update-transfer-job-field-mask", "year"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access-key-id", "acl", "auth-method", "aws-access-key", "aws-s3-compatible-data-source", "aws-s3-data-source", "azure-blob-storage-data-source", "azure-credentials", "bucket-name", "container", "creation-time", "day", "delete-objects-from-source-after-transfer", "delete-objects-unique-in-sink", "deletion-time", "description", "enable-onprem-gcs-transfer-logs", "end-time-of-day", "endpoint", "event-stream", "event-stream-expiration-time", "event-stream-start-time", "event-types", "exclude-prefixes", "gcs-data-sink", "gcs-data-source", "gcs-intermediate-data-location", "gid", "hours", "http-data-source", "include-prefixes", "kms-key", "last-modification-time", "last-modified-before", "last-modified-since", "latest-operation-name", "list-api", "list-url", "location", "log-action-states", "log-actions", "logging-config", "max-time-elapsed-since-last-modification", "metadata-options", "min-time-elapsed-since-last-modification", "minutes", "mode", "month", "name", "nanos", "notification-config", "object-conditions", "overwrite-objects-already-existing-in-sink", "overwrite-when", "path", "payload-format", "posix-data-sink", "posix-data-source", "project-id", "protocol", "pubsub-topic", "region", "repeat-interval", "request-model", "role-arn", "root-directory", "s3-metadata", "sas-token", "schedule", "schedule-end-date", "schedule-start-date", "seconds", "secret-access-key", "sink-agent-pool-name", "source-agent-pool-name", "start-time-of-day", "status", "storage-account", "storage-class", "symlink", "temporary-hold", "time-created", "transfer-job", "transfer-manifest", "transfer-options", "transfer-spec", "uid", "update-transfer-job-field-mask", "year"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1101,7 +1176,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1362,6 +1437,9 @@ where ("create", Some(opt)) => { call_result = self._transfer_jobs_create(opt, dry_run, &mut err).await; }, + ("delete", Some(opt)) => { + call_result = self._transfer_jobs_delete(opt, dry_run, &mut err).await; + }, ("get", Some(opt)) => { call_result = self._transfer_jobs_get(opt, dry_run, &mut err).await; }, @@ -1626,7 +1704,7 @@ async fn main() { ]), ]), - ("transfer-jobs", "methods: 'create', 'get', 'list', 'patch' and 'run'", vec![ + ("transfer-jobs", "methods: 'create', 'delete', 'get', 'list', 'patch' and 'run'", vec![ ("create", Some(r##"Creates a transfer job that runs periodically."##), "Details at http://byron.github.io/google-apis-rs/google_storagetransfer1_cli/transfer-jobs_create", @@ -1643,6 +1721,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("delete", + Some(r##"Deletes a transfer job. Deleting a transfer job sets its status to DELETED."##), + "Details at http://byron.github.io/google-apis-rs/google_storagetransfer1_cli/transfer-jobs_delete", + vec![ + (Some(r##"job-name"##), + None, + Some(r##"Required. The job to delete."##), + Some(true), + Some(false)), + + (Some(r##"project-id"##), + None, + Some(r##"Required. The ID of the Google Cloud project that owns the job."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1728,7 +1834,7 @@ async fn main() { Some(false)), ]), ("run", - Some(r##"Attempts to start a new TransferOperation for the current TransferJob. A TransferJob has a maximum of one active TransferOperation. If this method is called while a TransferOperation is active, an error will be returned."##), + Some(r##"Starts a new operation for the specified transfer job. A `TransferJob` has a maximum of one active `TransferOperation`. If this method is called while a `TransferOperation` is active, an error is returned."##), "Details at http://byron.github.io/google-apis-rs/google_storagetransfer1_cli/transfer-jobs_run", vec![ (Some(r##"job-name"##), @@ -1898,8 +2004,8 @@ async fn main() { let mut app = App::new("storagetransfer1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") - .about("Transfers data from external data sources to a Google Cloud Storage bucket or between Google Cloud Storage buckets.") + .version("5.0.2+20230111") + .about("Transfers data from external data sources to a Google Cloud Storage bucket or between Google Cloud Storage buckets. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_storagetransfer1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/storagetransfer1/Cargo.toml b/gen/storagetransfer1/Cargo.toml index 63f7ecf3c2..deb9ef3ff1 100644 --- a/gen/storagetransfer1/Cargo.toml +++ b/gen/storagetransfer1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-storagetransfer1" -version = "5.0.2-beta-1+20230111" +version = "5.0.2+20230111" authors = ["Sebastian Thiel "] description = "A complete library to interact with storagetransfer (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/storagetransfer1" homepage = "https://cloud.google.com/storage-transfer/docs" -documentation = "https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111" +documentation = "https://docs.rs/google-storagetransfer1/5.0.2+20230111" license = "MIT" keywords = ["storagetransfer", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/storagetransfer1/README.md b/gen/storagetransfer1/README.md index e763083322..ba66610cb6 100644 --- a/gen/storagetransfer1/README.md +++ b/gen/storagetransfer1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-storagetransfer1` library allows access to all features of the *Google storagetransfer* service. -This documentation was generated from *storagetransfer* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *storagetransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *storagetransfer* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *storagetransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *storagetransfer* *v1* API can be found at the [official documentation site](https://cloud.google.com/storage-transfer/docs). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/Storagetransfer) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/Storagetransfer) ... -* [google service accounts](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::GoogleServiceAccount) - * [*get*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::GoogleServiceAccountGetCall) +* [google service accounts](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::GoogleServiceAccount) + * [*get*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::GoogleServiceAccountGetCall) * projects - * [*agent pools create*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::ProjectAgentPoolCreateCall), [*agent pools delete*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::ProjectAgentPoolDeleteCall), [*agent pools get*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::ProjectAgentPoolGetCall), [*agent pools list*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::ProjectAgentPoolListCall) and [*agent pools patch*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::ProjectAgentPoolPatchCall) -* [transfer jobs](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferJob) - * [*create*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferJobCreateCall), [*delete*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferJobDeleteCall), [*get*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferJobGetCall), [*list*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferJobListCall), [*patch*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferJobPatchCall) and [*run*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferJobRunCall) -* [transfer operations](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferOperation) - * [*cancel*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferOperationCancelCall), [*get*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferOperationGetCall), [*list*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferOperationListCall), [*pause*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferOperationPauseCall) and [*resume*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/api::TransferOperationResumeCall) + * [*agent pools create*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::ProjectAgentPoolCreateCall), [*agent pools delete*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::ProjectAgentPoolDeleteCall), [*agent pools get*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::ProjectAgentPoolGetCall), [*agent pools list*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::ProjectAgentPoolListCall) and [*agent pools patch*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::ProjectAgentPoolPatchCall) +* [transfer jobs](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferJob) + * [*create*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferJobCreateCall), [*delete*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferJobDeleteCall), [*get*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferJobGetCall), [*list*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferJobListCall), [*patch*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferJobPatchCall) and [*run*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferJobRunCall) +* [transfer operations](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferOperation) + * [*cancel*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferOperationCancelCall), [*get*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferOperationGetCall), [*list*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferOperationListCall), [*pause*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferOperationPauseCall) and [*resume*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/api::TransferOperationResumeCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/Storagetransfer)** +* **[Hub](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/Storagetransfer)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::CallBuilder) -* **[Resources](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::CallBuilder) +* **[Resources](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Part)** + * **[Parts](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Delegate) to the -[Method Builder](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Delegate) to the +[Method Builder](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::RequestValue) and -[decodable](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::RequestValue) and +[decodable](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-storagetransfer1/5.0.2-beta-1+20230111/google_storagetransfer1/client::RequestValue) are moved +* [request values](https://docs.rs/google-storagetransfer1/5.0.2+20230111/google_storagetransfer1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/storagetransfer1/src/api.rs b/gen/storagetransfer1/src/api.rs index d4a7451867..31ddd4fcb5 100644 --- a/gen/storagetransfer1/src/api.rs +++ b/gen/storagetransfer1/src/api.rs @@ -121,7 +121,7 @@ impl<'a, S> Storagetransfer { Storagetransfer { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://storagetransfer.googleapis.com/".to_string(), _root_url: "https://storagetransfer.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> Storagetransfer { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/storagetransfer1/src/client.rs b/gen/storagetransfer1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/storagetransfer1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/storagetransfer1/src/lib.rs b/gen/storagetransfer1/src/lib.rs index 63ca699fa8..6dc7fcf366 100644 --- a/gen/storagetransfer1/src/lib.rs +++ b/gen/storagetransfer1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *storagetransfer* crate version *5.0.2-beta-1+20230111*, where *20230111* is the exact revision of the *storagetransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *storagetransfer* crate version *5.0.2+20230111*, where *20230111* is the exact revision of the *storagetransfer:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *storagetransfer* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/storage-transfer/docs). diff --git a/gen/sts1-cli/Cargo.toml b/gen/sts1-cli/Cargo.toml index cd99fa4b2d..8756385a51 100644 --- a/gen/sts1-cli/Cargo.toml +++ b/gen/sts1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-sts1-cli" -version = "4.0.1+20220227" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Security Token (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sts1-cli" @@ -20,13 +20,13 @@ name = "sts1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-sts1] path = "../sts1" -version = "4.0.1+20220227" +version = "5.0.2+20230114" + diff --git a/gen/sts1-cli/README.md b/gen/sts1-cli/README.md index 00f8ecaf40..9a8cc75cf2 100644 --- a/gen/sts1-cli/README.md +++ b/gen/sts1-cli/README.md @@ -25,12 +25,13 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Security Token* API at revision *20220227*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Security Token* API at revision *20230114*. The CLI is at version *5.0.2*. ```bash sts1 [options] methods introspect (-r )... [-p ]... [-o ] + oauthtoken (-r )... [-p ]... [-o ] token (-r )... [-p ]... [-o ] sts1 --help diff --git a/gen/sts1-cli/mkdocs.yml b/gen/sts1-cli/mkdocs.yml index 496e43aca9..dc035235c3 100644 --- a/gen/sts1-cli/mkdocs.yml +++ b/gen/sts1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Security Token v4.0.1+20220227 +site_name: Cloud Security Token v5.0.2+20230114 site_url: http://byron.github.io/google-apis-rs/google-sts1-cli site_description: A complete library to interact with Cloud Security Token (protocol v1) @@ -7,10 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/sts1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['methods_introspect.md', 'Methods', 'Introspect'] -- ['methods_token.md', 'Methods', 'Token'] +nav: +- Home: 'index.md' +- 'Methods': + - 'Introspect': 'methods_introspect.md' + - 'Oauthtoken': 'methods_oauthtoken.md' + - 'Token': 'methods_token.md' theme: readthedocs diff --git a/gen/sts1-cli/src/client.rs b/gen/sts1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/sts1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/sts1-cli/src/main.rs b/gen/sts1-cli/src/main.rs index f3b9eb081f..4fa72087c5 100644 --- a/gen/sts1-cli/src/main.rs +++ b/gen/sts1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_sts1::{api, Error, oauth2}; +use google_sts1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -134,6 +133,94 @@ where } } + async fn _methods_oauthtoken(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "client-id" => Some(("clientId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "code" => Some(("code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "code-verifier" => Some(("codeVerifier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "grant-type" => Some(("grantType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "redirect-uri" => Some(("redirectUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "refresh-token" => Some(("refreshToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "scope" => Some(("scope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["client-id", "code", "code-verifier", "grant-type", "redirect-uri", "refresh-token", "scope"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GoogleIdentityStsV1ExchangeOauthTokenRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.methods().oauthtoken(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _methods_token(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -232,6 +319,9 @@ where ("introspect", Some(opt)) => { call_result = self._methods_introspect(opt, dry_run, &mut err).await; }, + ("oauthtoken", Some(opt)) => { + call_result = self._methods_oauthtoken(opt, dry_run, &mut err).await; + }, ("token", Some(opt)) => { call_result = self._methods_token(opt, dry_run, &mut err).await; }, @@ -314,7 +404,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("methods", "methods: 'introspect' and 'token'", vec![ + ("methods", "methods: 'introspect', 'oauthtoken' and 'token'", vec![ ("introspect", Some(r##"Gets information about a Google OAuth 2.0 access token issued by the Google Cloud [Security Token Service API](https://cloud.google.com/iam/docs/reference/sts/rest)."##), "Details at http://byron.github.io/google-apis-rs/google_sts1_cli/methods_introspect", @@ -331,6 +421,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("oauthtoken", + Some(r##"Exchanges a credential that represents the resource owner's authorization for a Google-generated [OAuth 2.0 access token] (https://www.rfc-editor.org/rfc/rfc6749#section-5) or [refreshes an accesstoken] (https://www.rfc-editor.org/rfc/rfc6749#section-6) following [the OAuth 2.0 authorization framework] (https://tools.ietf.org/html/rfc8693) The credential can be one of the following: - An authorization code issued by the workforce identity federation authorization endpoint - A [refresh token](https://www.rfc-editor.org/rfc/rfc6749#section-10.4) issued by this endpoint This endpoint is only meant to be called by the Google Cloud CLI. Also note that this API only accepts the authorization code issued for workforce pools."##), + "Details at http://byron.github.io/google-apis-rs/google_sts1_cli/methods_oauthtoken", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -338,7 +450,7 @@ async fn main() { Some(false)), ]), ("token", - Some(r##"Exchanges a credential for a Google OAuth 2.0 access token. The token asserts an external identity within an identity pool, or it applies a Credential Access Boundary to a Google access token. When you call this method, do not send the `Authorization` HTTP header in the request. This method does not require the `Authorization` header, and using the header can cause the request to fail."##), + Some(r##"Exchanges a credential for a Google OAuth 2.0 access token. The token asserts an external identity within an identity pool, or it applies a Credential Access Boundary to a Google access token. Note that workforce pools do not support Credential Access Boundaries. When you call this method, do not send the `Authorization` HTTP header in the request. This method does not require the `Authorization` header, and using the header can cause the request to fail."##), "Details at http://byron.github.io/google-apis-rs/google_sts1_cli/methods_token", vec![ (Some(r##"kv"##), @@ -365,7 +477,7 @@ async fn main() { let mut app = App::new("sts1") .author("Sebastian Thiel ") - .version("4.0.1+20220227") + .version("5.0.2+20230114") .about("The Security Token Service exchanges Google or third-party credentials for a short-lived access token to Google Cloud resources.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sts1_cli") .arg(Arg::with_name("folder") diff --git a/gen/sts1/Cargo.toml b/gen/sts1/Cargo.toml index 8b230bd4d6..de95e2cf3f 100644 --- a/gen/sts1/Cargo.toml +++ b/gen/sts1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-sts1" -version = "5.0.2-beta-1+20230114" +version = "5.0.2+20230114" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Security Token (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/sts1" homepage = "http://cloud.google.com/iam/docs/workload-identity-federation" -documentation = "https://docs.rs/google-sts1/5.0.2-beta-1+20230114" +documentation = "https://docs.rs/google-sts1/5.0.2+20230114" license = "MIT" keywords = ["sts", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/sts1/README.md b/gen/sts1/README.md index b4e433ab6a..685d849f5f 100644 --- a/gen/sts1/README.md +++ b/gen/sts1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-sts1` library allows access to all features of the *Google Cloud Security Token* service. -This documentation was generated from *Cloud Security Token* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *sts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Security Token* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *sts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Security Token* *v1* API can be found at the [official documentation site](http://cloud.google.com/iam/docs/workload-identity-federation). # Features -Use the following functionality with ease from the central [hub](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/CloudSecurityToken) ... +Use the following functionality with ease from the central [hub](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/CloudSecurityToken) ... -* [introspect](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/api::MethodIntrospectCall) -* [oauthtoken](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/api::MethodOauthtokenCall) -* [token](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/api::MethodTokenCall) +* [introspect](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/api::MethodIntrospectCall) +* [oauthtoken](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/api::MethodOauthtokenCall) +* [token](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/api::MethodTokenCall) @@ -24,17 +24,17 @@ Use the following functionality with ease from the central [hub](https://docs.rs The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/CloudSecurityToken)** +* **[Hub](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/CloudSecurityToken)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::CallBuilder) -* **[Resources](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::CallBuilder) +* **[Resources](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Part)** + * **[Parts](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Delegate) to the -[Method Builder](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Delegate) to the +[Method Builder](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::RequestValue) and -[decodable](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::RequestValue) and +[decodable](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-sts1/5.0.2-beta-1+20230114/google_sts1/client::RequestValue) are moved +* [request values](https://docs.rs/google-sts1/5.0.2+20230114/google_sts1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/sts1/src/api.rs b/gen/sts1/src/api.rs index 3f6d184c01..d316e574fb 100644 --- a/gen/sts1/src/api.rs +++ b/gen/sts1/src/api.rs @@ -103,7 +103,7 @@ impl<'a, S> CloudSecurityToken { CloudSecurityToken { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://sts.googleapis.com/".to_string(), _root_url: "https://sts.googleapis.com/".to_string(), } @@ -114,7 +114,7 @@ impl<'a, S> CloudSecurityToken { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/sts1/src/client.rs b/gen/sts1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/sts1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/sts1/src/lib.rs b/gen/sts1/src/lib.rs index 66f26a1832..144221cb94 100644 --- a/gen/sts1/src/lib.rs +++ b/gen/sts1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Security Token* crate version *5.0.2-beta-1+20230114*, where *20230114* is the exact revision of the *sts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Security Token* crate version *5.0.2+20230114*, where *20230114* is the exact revision of the *sts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Security Token* *v1* API can be found at the //! [official documentation site](http://cloud.google.com/iam/docs/workload-identity-federation). diff --git a/gen/surveys2-cli/Cargo.toml b/gen/surveys2-cli/Cargo.toml index 4eb33642e1..d65499defe 100644 --- a/gen/surveys2-cli/Cargo.toml +++ b/gen/surveys2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-surveys2-cli" -version = "4.0.1+20180508" +version = "5.0.2+20180508" authors = ["Sebastian Thiel "] description = "A complete library to interact with Surveys (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/surveys2-cli" @@ -19,13 +19,13 @@ name = "surveys2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -35,7 +35,7 @@ tower-service = "^0.3.1" - [dependencies.google-surveys2] path = "../surveys2" -version = "4.0.1+20180508" +version = "5.0.2+20180508" + diff --git a/gen/surveys2-cli/README.md b/gen/surveys2-cli/README.md index 0388f4749c..6e730616a8 100644 --- a/gen/surveys2-cli/README.md +++ b/gen/surveys2-cli/README.md @@ -22,7 +22,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Surveys* API at revision *20180508*. The CLI is at version *4.0.1*. +This documentation was generated from the *Surveys* API at revision *20180508*. The CLI is at version *5.0.2*. ```bash surveys2 [options] diff --git a/gen/surveys2-cli/mkdocs.yml b/gen/surveys2-cli/mkdocs.yml index 06fc25b891..e78da930af 100644 --- a/gen/surveys2-cli/mkdocs.yml +++ b/gen/surveys2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Surveys v4.0.1+20180508 +site_name: Surveys v5.0.2+20180508 site_url: http://byron.github.io/google-apis-rs/google-surveys2-cli site_description: A complete library to interact with Surveys (protocol v2) @@ -7,16 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/surveys2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['results_get.md', 'Results', 'Get'] -- ['surveys_delete.md', 'Surveys', 'Delete'] -- ['surveys_get.md', 'Surveys', 'Get'] -- ['surveys_insert.md', 'Surveys', 'Insert'] -- ['surveys_list.md', 'Surveys', 'List'] -- ['surveys_start.md', 'Surveys', 'Start'] -- ['surveys_stop.md', 'Surveys', 'Stop'] -- ['surveys_update.md', 'Surveys', 'Update'] +nav: +- Home: 'index.md' +- 'Results': + - 'Get': 'results_get.md' +- 'Surveys': + - 'Delete': 'surveys_delete.md' + - 'Get': 'surveys_get.md' + - 'Insert': 'surveys_insert.md' + - 'List': 'surveys_list.md' + - 'Start': 'surveys_start.md' + - 'Stop': 'surveys_stop.md' + - 'Update': 'surveys_update.md' theme: readthedocs diff --git a/gen/surveys2-cli/src/client.rs b/gen/surveys2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/surveys2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/surveys2-cli/src/main.rs b/gen/surveys2-cli/src/main.rs index 18f8f9f36a..23007c7386 100644 --- a/gen/surveys2-cli/src/main.rs +++ b/gen/surveys2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_surveys2::{api, Error, oauth2}; +use google_surveys2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -363,10 +362,10 @@ where call = call.token(value.unwrap_or("")); }, "start-index" => { - call = call.start_index(arg_from_str(value.unwrap_or("-0"), err, "start-index", "integer")); + call = call.start_index( value.map(|v| arg_from_str(v, err, "start-index", "uint32")).unwrap_or(0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -968,7 +967,7 @@ async fn main() { let mut app = App::new("surveys2") .author("Sebastian Thiel ") - .version("4.0.1+20180508") + .version("5.0.2+20180508") .about("Creates and conducts surveys, lists the surveys that an authenticated user owns, and retrieves survey results and information about specified surveys.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_surveys2_cli") .arg(Arg::with_name("url") diff --git a/gen/surveys2/Cargo.toml b/gen/surveys2/Cargo.toml index 621e401147..b94d505e0d 100644 --- a/gen/surveys2/Cargo.toml +++ b/gen/surveys2/Cargo.toml @@ -4,11 +4,11 @@ [package] name = "google-surveys2" -version = "5.0.2-beta-1+20180508" +version = "5.0.2+20180508" authors = ["Sebastian Thiel "] description = "A complete library to interact with Surveys (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/surveys2" -documentation = "https://docs.rs/google-surveys2/5.0.2-beta-1+20180508" +documentation = "https://docs.rs/google-surveys2/5.0.2+20180508" license = "MIT" keywords = ["surveys", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/surveys2/README.md b/gen/surveys2/README.md index e8339a3db2..48b54160f1 100644 --- a/gen/surveys2/README.md +++ b/gen/surveys2/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-surveys2` library allows access to all features of the *Google Surveys* service. -This documentation was generated from *Surveys* crate version *5.0.2-beta-1+20180508*, where *20180508* is the exact revision of the *surveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Surveys* crate version *5.0.2+20180508*, where *20180508* is the exact revision of the *surveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/Surveys) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/Surveys) ... * results - * [*get*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::ResultGetCall) -* [surveys](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::Survey) - * [*delete*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::SurveyDeleteCall), [*get*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::SurveyGetCall), [*insert*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::SurveyInsertCall), [*list*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::SurveyListCall), [*start*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::SurveyStartCall), [*stop*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::SurveyStopCall) and [*update*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::SurveyUpdateCall) + * [*get*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::ResultGetCall) +* [surveys](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::Survey) + * [*delete*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::SurveyDeleteCall), [*get*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::SurveyGetCall), [*insert*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::SurveyInsertCall), [*list*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::SurveyListCall), [*start*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::SurveyStartCall), [*stop*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::SurveyStopCall) and [*update*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::SurveyUpdateCall) Download supported by ... -* [*get results*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/api::ResultGetCall) +* [*get results*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/api::ResultGetCall) @@ -26,17 +26,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/Surveys)** +* **[Hub](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/Surveys)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::CallBuilder) -* **[Resources](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::CallBuilder) +* **[Resources](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Part)** + * **[Parts](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Delegate) to the -[Method Builder](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Delegate) to the +[Method Builder](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::RequestValue) and -[decodable](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::RequestValue) and +[decodable](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-surveys2/5.0.2-beta-1+20180508/google_surveys2/client::RequestValue) are moved +* [request values](https://docs.rs/google-surveys2/5.0.2+20180508/google_surveys2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/surveys2/src/api.rs b/gen/surveys2/src/api.rs index c0e8ce597a..f6a4a43031 100644 --- a/gen/surveys2/src/api.rs +++ b/gen/surveys2/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> Surveys { Surveys { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/surveys/v2/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -144,7 +144,7 @@ impl<'a, S> Surveys { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/surveys2/src/client.rs b/gen/surveys2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/surveys2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/surveys2/src/lib.rs b/gen/surveys2/src/lib.rs index fc12e320c0..0b871f1739 100644 --- a/gen/surveys2/src/lib.rs +++ b/gen/surveys2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Surveys* crate version *5.0.2-beta-1+20180508*, where *20180508* is the exact revision of the *surveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Surveys* crate version *5.0.2+20180508*, where *20180508* is the exact revision of the *surveys:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/main/gen/surveys2). //! # Features //! diff --git a/gen/tagmanager1-cli/Cargo.toml b/gen/tagmanager1-cli/Cargo.toml index 14b5a38df1..f9806c8d10 100644 --- a/gen/tagmanager1-cli/Cargo.toml +++ b/gen/tagmanager1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-tagmanager1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Tag Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tagmanager1-cli" @@ -20,13 +20,13 @@ name = "tagmanager1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-tagmanager1] path = "../tagmanager1" -version = "4.0.1+20220301" +version = "5.0.2+20230123" + diff --git a/gen/tagmanager1-cli/README.md b/gen/tagmanager1-cli/README.md index d5e5e23edd..7647768666 100644 --- a/gen/tagmanager1-cli/README.md +++ b/gen/tagmanager1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Tag Manager* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Tag Manager* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash tagmanager1 [options] diff --git a/gen/tagmanager1-cli/mkdocs.yml b/gen/tagmanager1-cli/mkdocs.yml index f30b627b5a..a6a1bdb022 100644 --- a/gen/tagmanager1-cli/mkdocs.yml +++ b/gen/tagmanager1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Tag Manager v4.0.1+20220301 +site_name: Tag Manager v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-tagmanager1-cli site_description: A complete library to interact with Tag Manager (protocol v1) @@ -7,57 +7,58 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/tagmanager1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_containers-create.md', 'Accounts', 'Containers Create'] -- ['accounts_containers-delete.md', 'Accounts', 'Containers Delete'] -- ['accounts_containers-environments-create.md', 'Accounts', 'Containers Environments Create'] -- ['accounts_containers-environments-delete.md', 'Accounts', 'Containers Environments Delete'] -- ['accounts_containers-environments-get.md', 'Accounts', 'Containers Environments Get'] -- ['accounts_containers-environments-list.md', 'Accounts', 'Containers Environments List'] -- ['accounts_containers-environments-update.md', 'Accounts', 'Containers Environments Update'] -- ['accounts_containers-folders-create.md', 'Accounts', 'Containers Folders Create'] -- ['accounts_containers-folders-delete.md', 'Accounts', 'Containers Folders Delete'] -- ['accounts_containers-folders-entities-list.md', 'Accounts', 'Containers Folders Entities List'] -- ['accounts_containers-folders-get.md', 'Accounts', 'Containers Folders Get'] -- ['accounts_containers-folders-list.md', 'Accounts', 'Containers Folders List'] -- ['accounts_containers-folders-update.md', 'Accounts', 'Containers Folders Update'] -- ['accounts_containers-get.md', 'Accounts', 'Containers Get'] -- ['accounts_containers-list.md', 'Accounts', 'Containers List'] -- ['accounts_containers-move-folders-update.md', 'Accounts', 'Containers Move Folders Update'] -- ['accounts_containers-reauthorize-environments-update.md', 'Accounts', 'Containers Reauthorize Environments Update'] -- ['accounts_containers-tags-create.md', 'Accounts', 'Containers Tags Create'] -- ['accounts_containers-tags-delete.md', 'Accounts', 'Containers Tags Delete'] -- ['accounts_containers-tags-get.md', 'Accounts', 'Containers Tags Get'] -- ['accounts_containers-tags-list.md', 'Accounts', 'Containers Tags List'] -- ['accounts_containers-tags-update.md', 'Accounts', 'Containers Tags Update'] -- ['accounts_containers-triggers-create.md', 'Accounts', 'Containers Triggers Create'] -- ['accounts_containers-triggers-delete.md', 'Accounts', 'Containers Triggers Delete'] -- ['accounts_containers-triggers-get.md', 'Accounts', 'Containers Triggers Get'] -- ['accounts_containers-triggers-list.md', 'Accounts', 'Containers Triggers List'] -- ['accounts_containers-triggers-update.md', 'Accounts', 'Containers Triggers Update'] -- ['accounts_containers-update.md', 'Accounts', 'Containers Update'] -- ['accounts_containers-variables-create.md', 'Accounts', 'Containers Variables Create'] -- ['accounts_containers-variables-delete.md', 'Accounts', 'Containers Variables Delete'] -- ['accounts_containers-variables-get.md', 'Accounts', 'Containers Variables Get'] -- ['accounts_containers-variables-list.md', 'Accounts', 'Containers Variables List'] -- ['accounts_containers-variables-update.md', 'Accounts', 'Containers Variables Update'] -- ['accounts_containers-versions-create.md', 'Accounts', 'Containers Versions Create'] -- ['accounts_containers-versions-delete.md', 'Accounts', 'Containers Versions Delete'] -- ['accounts_containers-versions-get.md', 'Accounts', 'Containers Versions Get'] -- ['accounts_containers-versions-list.md', 'Accounts', 'Containers Versions List'] -- ['accounts_containers-versions-publish.md', 'Accounts', 'Containers Versions Publish'] -- ['accounts_containers-versions-restore.md', 'Accounts', 'Containers Versions Restore'] -- ['accounts_containers-versions-undelete.md', 'Accounts', 'Containers Versions Undelete'] -- ['accounts_containers-versions-update.md', 'Accounts', 'Containers Versions Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_permissions-create.md', 'Accounts', 'Permissions Create'] -- ['accounts_permissions-delete.md', 'Accounts', 'Permissions Delete'] -- ['accounts_permissions-get.md', 'Accounts', 'Permissions Get'] -- ['accounts_permissions-list.md', 'Accounts', 'Permissions List'] -- ['accounts_permissions-update.md', 'Accounts', 'Permissions Update'] -- ['accounts_update.md', 'Accounts', 'Update'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Containers Create': 'accounts_containers-create.md' + - 'Containers Delete': 'accounts_containers-delete.md' + - 'Containers Environments Create': 'accounts_containers-environments-create.md' + - 'Containers Environments Delete': 'accounts_containers-environments-delete.md' + - 'Containers Environments Get': 'accounts_containers-environments-get.md' + - 'Containers Environments List': 'accounts_containers-environments-list.md' + - 'Containers Environments Update': 'accounts_containers-environments-update.md' + - 'Containers Folders Create': 'accounts_containers-folders-create.md' + - 'Containers Folders Delete': 'accounts_containers-folders-delete.md' + - 'Containers Folders Entities List': 'accounts_containers-folders-entities-list.md' + - 'Containers Folders Get': 'accounts_containers-folders-get.md' + - 'Containers Folders List': 'accounts_containers-folders-list.md' + - 'Containers Folders Update': 'accounts_containers-folders-update.md' + - 'Containers Get': 'accounts_containers-get.md' + - 'Containers List': 'accounts_containers-list.md' + - 'Containers Move Folders Update': 'accounts_containers-move-folders-update.md' + - 'Containers Reauthorize Environments Update': 'accounts_containers-reauthorize-environments-update.md' + - 'Containers Tags Create': 'accounts_containers-tags-create.md' + - 'Containers Tags Delete': 'accounts_containers-tags-delete.md' + - 'Containers Tags Get': 'accounts_containers-tags-get.md' + - 'Containers Tags List': 'accounts_containers-tags-list.md' + - 'Containers Tags Update': 'accounts_containers-tags-update.md' + - 'Containers Triggers Create': 'accounts_containers-triggers-create.md' + - 'Containers Triggers Delete': 'accounts_containers-triggers-delete.md' + - 'Containers Triggers Get': 'accounts_containers-triggers-get.md' + - 'Containers Triggers List': 'accounts_containers-triggers-list.md' + - 'Containers Triggers Update': 'accounts_containers-triggers-update.md' + - 'Containers Update': 'accounts_containers-update.md' + - 'Containers Variables Create': 'accounts_containers-variables-create.md' + - 'Containers Variables Delete': 'accounts_containers-variables-delete.md' + - 'Containers Variables Get': 'accounts_containers-variables-get.md' + - 'Containers Variables List': 'accounts_containers-variables-list.md' + - 'Containers Variables Update': 'accounts_containers-variables-update.md' + - 'Containers Versions Create': 'accounts_containers-versions-create.md' + - 'Containers Versions Delete': 'accounts_containers-versions-delete.md' + - 'Containers Versions Get': 'accounts_containers-versions-get.md' + - 'Containers Versions List': 'accounts_containers-versions-list.md' + - 'Containers Versions Publish': 'accounts_containers-versions-publish.md' + - 'Containers Versions Restore': 'accounts_containers-versions-restore.md' + - 'Containers Versions Undelete': 'accounts_containers-versions-undelete.md' + - 'Containers Versions Update': 'accounts_containers-versions-update.md' + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Permissions Create': 'accounts_permissions-create.md' + - 'Permissions Delete': 'accounts_permissions-delete.md' + - 'Permissions Get': 'accounts_permissions-get.md' + - 'Permissions List': 'accounts_permissions-list.md' + - 'Permissions Update': 'accounts_permissions-update.md' + - 'Update': 'accounts_update.md' theme: readthedocs diff --git a/gen/tagmanager1-cli/src/client.rs b/gen/tagmanager1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/tagmanager1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/tagmanager1-cli/src/main.rs b/gen/tagmanager1-cli/src/main.rs index fb6a112574..177c195372 100644 --- a/gen/tagmanager1-cli/src/main.rs +++ b/gen/tagmanager1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_tagmanager1::{api, Error, oauth2}; +use google_tagmanager1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -2636,10 +2635,10 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, "headers" => { - call = call.headers(arg_from_str(value.unwrap_or("false"), err, "headers", "boolean")); + call = call.headers( value.map(|v| arg_from_str(v, err, "headers", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5242,7 +5241,7 @@ async fn main() { let mut app = App::new("tagmanager1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230123") .about("This API allows clients to access and modify container and tag configuration.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_tagmanager1_cli") .arg(Arg::with_name("url") diff --git a/gen/tagmanager1/Cargo.toml b/gen/tagmanager1/Cargo.toml index 3af71b60a6..d70b652598 100644 --- a/gen/tagmanager1/Cargo.toml +++ b/gen/tagmanager1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-tagmanager1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Tag Manager (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tagmanager1" homepage = "https://developers.google.com/tag-manager" -documentation = "https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-tagmanager1/5.0.2+20230123" license = "MIT" keywords = ["tagmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/tagmanager1/README.md b/gen/tagmanager1/README.md index 0a8eba5c77..c18fd2420e 100644 --- a/gen/tagmanager1/README.md +++ b/gen/tagmanager1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-tagmanager1` library allows access to all features of the *Google Tag Manager* service. -This documentation was generated from *Tag Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *tagmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Tag Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *tagmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Tag Manager* *v1* API can be found at the [official documentation site](https://developers.google.com/tag-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/TagManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/TagManager) ... -* [accounts](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::Account) - * [*containers create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerCreateCall), [*containers delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerDeleteCall), [*containers environments create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerEnvironmentCreateCall), [*containers environments delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerEnvironmentDeleteCall), [*containers environments get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerEnvironmentGetCall), [*containers environments list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerEnvironmentListCall), [*containers environments update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerEnvironmentUpdateCall), [*containers folders create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerFolderCreateCall), [*containers folders delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerFolderDeleteCall), [*containers folders entities list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerFolderEntityListCall), [*containers folders get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerFolderGetCall), [*containers folders list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerFolderListCall), [*containers folders update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerFolderUpdateCall), [*containers get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerGetCall), [*containers list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerListCall), [*containers move_folders update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerMoveFolderUpdateCall), [*containers reauthorize_environments update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerReauthorizeEnvironmentUpdateCall), [*containers tags create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTagCreateCall), [*containers tags delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTagDeleteCall), [*containers tags get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTagGetCall), [*containers tags list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTagListCall), [*containers tags update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTagUpdateCall), [*containers triggers create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTriggerCreateCall), [*containers triggers delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTriggerDeleteCall), [*containers triggers get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTriggerGetCall), [*containers triggers list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTriggerListCall), [*containers triggers update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerTriggerUpdateCall), [*containers update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerUpdateCall), [*containers variables create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVariableCreateCall), [*containers variables delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVariableDeleteCall), [*containers variables get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVariableGetCall), [*containers variables list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVariableListCall), [*containers variables update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVariableUpdateCall), [*containers versions create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionCreateCall), [*containers versions delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionDeleteCall), [*containers versions get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionGetCall), [*containers versions list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionListCall), [*containers versions publish*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionPublishCall), [*containers versions restore*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionRestoreCall), [*containers versions undelete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionUndeleteCall), [*containers versions update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountContainerVersionUpdateCall), [*get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountGetCall), [*list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountListCall), [*permissions create*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountPermissionCreateCall), [*permissions delete*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountPermissionDeleteCall), [*permissions get*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountPermissionGetCall), [*permissions list*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountPermissionListCall), [*permissions update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountPermissionUpdateCall) and [*update*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/api::AccountUpdateCall) +* [accounts](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::Account) + * [*containers create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerCreateCall), [*containers delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerDeleteCall), [*containers environments create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerEnvironmentCreateCall), [*containers environments delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerEnvironmentDeleteCall), [*containers environments get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerEnvironmentGetCall), [*containers environments list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerEnvironmentListCall), [*containers environments update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerEnvironmentUpdateCall), [*containers folders create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerFolderCreateCall), [*containers folders delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerFolderDeleteCall), [*containers folders entities list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerFolderEntityListCall), [*containers folders get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerFolderGetCall), [*containers folders list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerFolderListCall), [*containers folders update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerFolderUpdateCall), [*containers get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerGetCall), [*containers list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerListCall), [*containers move_folders update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerMoveFolderUpdateCall), [*containers reauthorize_environments update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerReauthorizeEnvironmentUpdateCall), [*containers tags create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTagCreateCall), [*containers tags delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTagDeleteCall), [*containers tags get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTagGetCall), [*containers tags list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTagListCall), [*containers tags update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTagUpdateCall), [*containers triggers create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTriggerCreateCall), [*containers triggers delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTriggerDeleteCall), [*containers triggers get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTriggerGetCall), [*containers triggers list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTriggerListCall), [*containers triggers update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerTriggerUpdateCall), [*containers update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerUpdateCall), [*containers variables create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVariableCreateCall), [*containers variables delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVariableDeleteCall), [*containers variables get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVariableGetCall), [*containers variables list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVariableListCall), [*containers variables update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVariableUpdateCall), [*containers versions create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionCreateCall), [*containers versions delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionDeleteCall), [*containers versions get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionGetCall), [*containers versions list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionListCall), [*containers versions publish*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionPublishCall), [*containers versions restore*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionRestoreCall), [*containers versions undelete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionUndeleteCall), [*containers versions update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountContainerVersionUpdateCall), [*get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountGetCall), [*list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountListCall), [*permissions create*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountPermissionCreateCall), [*permissions delete*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountPermissionDeleteCall), [*permissions get*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountPermissionGetCall), [*permissions list*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountPermissionListCall), [*permissions update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountPermissionUpdateCall) and [*update*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/api::AccountUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/TagManager)** +* **[Hub](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/TagManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::CallBuilder) -* **[Resources](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::CallBuilder) +* **[Resources](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Part)** + * **[Parts](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -175,17 +175,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -195,29 +195,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Delegate) to the -[Method Builder](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Delegate) to the +[Method Builder](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::RequestValue) and -[decodable](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::RequestValue) and +[decodable](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-tagmanager1/5.0.2-beta-1+20230123/google_tagmanager1/client::RequestValue) are moved +* [request values](https://docs.rs/google-tagmanager1/5.0.2+20230123/google_tagmanager1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/tagmanager1/src/api.rs b/gen/tagmanager1/src/api.rs index 25f19983b4..d7703e2cdd 100644 --- a/gen/tagmanager1/src/api.rs +++ b/gen/tagmanager1/src/api.rs @@ -152,7 +152,7 @@ impl<'a, S> TagManager { TagManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://tagmanager.googleapis.com/".to_string(), _root_url: "https://tagmanager.googleapis.com/".to_string(), } @@ -163,7 +163,7 @@ impl<'a, S> TagManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/tagmanager1/src/client.rs b/gen/tagmanager1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/tagmanager1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/tagmanager1/src/lib.rs b/gen/tagmanager1/src/lib.rs index e97a5499a3..f041479ee4 100644 --- a/gen/tagmanager1/src/lib.rs +++ b/gen/tagmanager1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Tag Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *tagmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Tag Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *tagmanager:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Tag Manager* *v1* API can be found at the //! [official documentation site](https://developers.google.com/tag-manager). diff --git a/gen/tagmanager2-cli/Cargo.toml b/gen/tagmanager2-cli/Cargo.toml index ee1c59be60..c7563aea9a 100644 --- a/gen/tagmanager2-cli/Cargo.toml +++ b/gen/tagmanager2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-tagmanager2-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Tag Manager (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tagmanager2-cli" @@ -20,13 +20,13 @@ name = "tagmanager2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-tagmanager2] path = "../tagmanager2" -version = "4.0.1+20220301" +version = "5.0.2+20230123" + diff --git a/gen/tagmanager2-cli/README.md b/gen/tagmanager2-cli/README.md index 1f07ab1b57..06b4961ebf 100644 --- a/gen/tagmanager2-cli/README.md +++ b/gen/tagmanager2-cli/README.md @@ -25,13 +25,17 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Tag Manager* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *Tag Manager* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash tagmanager2 [options] accounts + containers-combine [-p ]... [-o ] containers-create (-r )... [-p ]... [-o ] containers-delete [-p ]... + containers-destinations-get [-p ]... [-o ] + containers-destinations-link [-p ]... [-o ] + containers-destinations-list [-p ]... [-o ] containers-environments-create (-r )... [-p ]... [-o ] containers-environments-delete [-p ]... containers-environments-get [-p ]... [-o ] @@ -40,6 +44,9 @@ tagmanager2 [options] containers-environments-update (-r )... [-p ]... [-o ] containers-get [-p ]... [-o ] containers-list [-p ]... [-o ] + containers-lookup [-p ]... [-o ] + containers-move-tag-id [-p ]... [-o ] + containers-snippet [-p ]... [-o ] containers-update (-r )... [-p ]... [-o ] containers-version-headers-latest [-p ]... [-o ] containers-version-headers-list [-p ]... [-o ] @@ -73,6 +80,11 @@ tagmanager2 [options] containers-workspaces-folders-update (-r )... [-p ]... [-o ] containers-workspaces-get [-p ]... [-o ] containers-workspaces-get-status [-p ]... [-o ] + containers-workspaces-gtag-config-create (-r )... [-p ]... [-o ] + containers-workspaces-gtag-config-delete [-p ]... + containers-workspaces-gtag-config-get [-p ]... [-o ] + containers-workspaces-gtag-config-list [-p ]... [-o ] + containers-workspaces-gtag-config-update (-r )... [-p ]... [-o ] containers-workspaces-list [-p ]... [-o ] containers-workspaces-quick-preview [-p ]... [-o ] containers-workspaces-resolve-conflict (-r )... [-p ]... diff --git a/gen/tagmanager2-cli/mkdocs.yml b/gen/tagmanager2-cli/mkdocs.yml index 0301eaeec7..91aa4eb3e9 100644 --- a/gen/tagmanager2-cli/mkdocs.yml +++ b/gen/tagmanager2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Tag Manager v4.0.1+20220301 +site_name: Tag Manager v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-tagmanager2-cli site_description: A complete library to interact with Tag Manager (protocol v2) @@ -7,94 +7,107 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/tagmanager2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['accounts_containers-create.md', 'Accounts', 'Containers Create'] -- ['accounts_containers-delete.md', 'Accounts', 'Containers Delete'] -- ['accounts_containers-environments-create.md', 'Accounts', 'Containers Environments Create'] -- ['accounts_containers-environments-delete.md', 'Accounts', 'Containers Environments Delete'] -- ['accounts_containers-environments-get.md', 'Accounts', 'Containers Environments Get'] -- ['accounts_containers-environments-list.md', 'Accounts', 'Containers Environments List'] -- ['accounts_containers-environments-reauthorize.md', 'Accounts', 'Containers Environments Reauthorize'] -- ['accounts_containers-environments-update.md', 'Accounts', 'Containers Environments Update'] -- ['accounts_containers-get.md', 'Accounts', 'Containers Get'] -- ['accounts_containers-list.md', 'Accounts', 'Containers List'] -- ['accounts_containers-update.md', 'Accounts', 'Containers Update'] -- ['accounts_containers-version-headers-latest.md', 'Accounts', 'Containers Version Headers Latest'] -- ['accounts_containers-version-headers-list.md', 'Accounts', 'Containers Version Headers List'] -- ['accounts_containers-versions-delete.md', 'Accounts', 'Containers Versions Delete'] -- ['accounts_containers-versions-get.md', 'Accounts', 'Containers Versions Get'] -- ['accounts_containers-versions-live.md', 'Accounts', 'Containers Versions Live'] -- ['accounts_containers-versions-publish.md', 'Accounts', 'Containers Versions Publish'] -- ['accounts_containers-versions-set-latest.md', 'Accounts', 'Containers Versions Set Latest'] -- ['accounts_containers-versions-undelete.md', 'Accounts', 'Containers Versions Undelete'] -- ['accounts_containers-versions-update.md', 'Accounts', 'Containers Versions Update'] -- ['accounts_containers-workspaces-built-in-variables-create.md', 'Accounts', 'Containers Workspaces Built In Variables Create'] -- ['accounts_containers-workspaces-built-in-variables-delete.md', 'Accounts', 'Containers Workspaces Built In Variables Delete'] -- ['accounts_containers-workspaces-built-in-variables-list.md', 'Accounts', 'Containers Workspaces Built In Variables List'] -- ['accounts_containers-workspaces-built-in-variables-revert.md', 'Accounts', 'Containers Workspaces Built In Variables Revert'] -- ['accounts_containers-workspaces-clients-create.md', 'Accounts', 'Containers Workspaces Clients Create'] -- ['accounts_containers-workspaces-clients-delete.md', 'Accounts', 'Containers Workspaces Clients Delete'] -- ['accounts_containers-workspaces-clients-get.md', 'Accounts', 'Containers Workspaces Clients Get'] -- ['accounts_containers-workspaces-clients-list.md', 'Accounts', 'Containers Workspaces Clients List'] -- ['accounts_containers-workspaces-clients-revert.md', 'Accounts', 'Containers Workspaces Clients Revert'] -- ['accounts_containers-workspaces-clients-update.md', 'Accounts', 'Containers Workspaces Clients Update'] -- ['accounts_containers-workspaces-create.md', 'Accounts', 'Containers Workspaces Create'] -- ['accounts_containers-workspaces-create-version.md', 'Accounts', 'Containers Workspaces Create Version'] -- ['accounts_containers-workspaces-delete.md', 'Accounts', 'Containers Workspaces Delete'] -- ['accounts_containers-workspaces-folders-create.md', 'Accounts', 'Containers Workspaces Folders Create'] -- ['accounts_containers-workspaces-folders-delete.md', 'Accounts', 'Containers Workspaces Folders Delete'] -- ['accounts_containers-workspaces-folders-entities.md', 'Accounts', 'Containers Workspaces Folders Entities'] -- ['accounts_containers-workspaces-folders-get.md', 'Accounts', 'Containers Workspaces Folders Get'] -- ['accounts_containers-workspaces-folders-list.md', 'Accounts', 'Containers Workspaces Folders List'] -- ['accounts_containers-workspaces-folders-move-entities-to-folder.md', 'Accounts', 'Containers Workspaces Folders Move Entities To Folder'] -- ['accounts_containers-workspaces-folders-revert.md', 'Accounts', 'Containers Workspaces Folders Revert'] -- ['accounts_containers-workspaces-folders-update.md', 'Accounts', 'Containers Workspaces Folders Update'] -- ['accounts_containers-workspaces-get.md', 'Accounts', 'Containers Workspaces Get'] -- ['accounts_containers-workspaces-get-status.md', 'Accounts', 'Containers Workspaces Get Status'] -- ['accounts_containers-workspaces-list.md', 'Accounts', 'Containers Workspaces List'] -- ['accounts_containers-workspaces-quick-preview.md', 'Accounts', 'Containers Workspaces Quick Preview'] -- ['accounts_containers-workspaces-resolve-conflict.md', 'Accounts', 'Containers Workspaces Resolve Conflict'] -- ['accounts_containers-workspaces-sync.md', 'Accounts', 'Containers Workspaces Sync'] -- ['accounts_containers-workspaces-tags-create.md', 'Accounts', 'Containers Workspaces Tags Create'] -- ['accounts_containers-workspaces-tags-delete.md', 'Accounts', 'Containers Workspaces Tags Delete'] -- ['accounts_containers-workspaces-tags-get.md', 'Accounts', 'Containers Workspaces Tags Get'] -- ['accounts_containers-workspaces-tags-list.md', 'Accounts', 'Containers Workspaces Tags List'] -- ['accounts_containers-workspaces-tags-revert.md', 'Accounts', 'Containers Workspaces Tags Revert'] -- ['accounts_containers-workspaces-tags-update.md', 'Accounts', 'Containers Workspaces Tags Update'] -- ['accounts_containers-workspaces-templates-create.md', 'Accounts', 'Containers Workspaces Templates Create'] -- ['accounts_containers-workspaces-templates-delete.md', 'Accounts', 'Containers Workspaces Templates Delete'] -- ['accounts_containers-workspaces-templates-get.md', 'Accounts', 'Containers Workspaces Templates Get'] -- ['accounts_containers-workspaces-templates-list.md', 'Accounts', 'Containers Workspaces Templates List'] -- ['accounts_containers-workspaces-templates-revert.md', 'Accounts', 'Containers Workspaces Templates Revert'] -- ['accounts_containers-workspaces-templates-update.md', 'Accounts', 'Containers Workspaces Templates Update'] -- ['accounts_containers-workspaces-triggers-create.md', 'Accounts', 'Containers Workspaces Triggers Create'] -- ['accounts_containers-workspaces-triggers-delete.md', 'Accounts', 'Containers Workspaces Triggers Delete'] -- ['accounts_containers-workspaces-triggers-get.md', 'Accounts', 'Containers Workspaces Triggers Get'] -- ['accounts_containers-workspaces-triggers-list.md', 'Accounts', 'Containers Workspaces Triggers List'] -- ['accounts_containers-workspaces-triggers-revert.md', 'Accounts', 'Containers Workspaces Triggers Revert'] -- ['accounts_containers-workspaces-triggers-update.md', 'Accounts', 'Containers Workspaces Triggers Update'] -- ['accounts_containers-workspaces-update.md', 'Accounts', 'Containers Workspaces Update'] -- ['accounts_containers-workspaces-variables-create.md', 'Accounts', 'Containers Workspaces Variables Create'] -- ['accounts_containers-workspaces-variables-delete.md', 'Accounts', 'Containers Workspaces Variables Delete'] -- ['accounts_containers-workspaces-variables-get.md', 'Accounts', 'Containers Workspaces Variables Get'] -- ['accounts_containers-workspaces-variables-list.md', 'Accounts', 'Containers Workspaces Variables List'] -- ['accounts_containers-workspaces-variables-revert.md', 'Accounts', 'Containers Workspaces Variables Revert'] -- ['accounts_containers-workspaces-variables-update.md', 'Accounts', 'Containers Workspaces Variables Update'] -- ['accounts_containers-workspaces-zones-create.md', 'Accounts', 'Containers Workspaces Zones Create'] -- ['accounts_containers-workspaces-zones-delete.md', 'Accounts', 'Containers Workspaces Zones Delete'] -- ['accounts_containers-workspaces-zones-get.md', 'Accounts', 'Containers Workspaces Zones Get'] -- ['accounts_containers-workspaces-zones-list.md', 'Accounts', 'Containers Workspaces Zones List'] -- ['accounts_containers-workspaces-zones-revert.md', 'Accounts', 'Containers Workspaces Zones Revert'] -- ['accounts_containers-workspaces-zones-update.md', 'Accounts', 'Containers Workspaces Zones Update'] -- ['accounts_get.md', 'Accounts', 'Get'] -- ['accounts_list.md', 'Accounts', 'List'] -- ['accounts_update.md', 'Accounts', 'Update'] -- ['accounts_user-permissions-create.md', 'Accounts', 'User Permissions Create'] -- ['accounts_user-permissions-delete.md', 'Accounts', 'User Permissions Delete'] -- ['accounts_user-permissions-get.md', 'Accounts', 'User Permissions Get'] -- ['accounts_user-permissions-list.md', 'Accounts', 'User Permissions List'] -- ['accounts_user-permissions-update.md', 'Accounts', 'User Permissions Update'] +nav: +- Home: 'index.md' +- 'Accounts': + - 'Containers Combine': 'accounts_containers-combine.md' + - 'Containers Create': 'accounts_containers-create.md' + - 'Containers Delete': 'accounts_containers-delete.md' + - 'Containers Destinations Get': 'accounts_containers-destinations-get.md' + - 'Containers Destinations Link': 'accounts_containers-destinations-link.md' + - 'Containers Destinations List': 'accounts_containers-destinations-list.md' + - 'Containers Environments Create': 'accounts_containers-environments-create.md' + - 'Containers Environments Delete': 'accounts_containers-environments-delete.md' + - 'Containers Environments Get': 'accounts_containers-environments-get.md' + - 'Containers Environments List': 'accounts_containers-environments-list.md' + - 'Containers Environments Reauthorize': 'accounts_containers-environments-reauthorize.md' + - 'Containers Environments Update': 'accounts_containers-environments-update.md' + - 'Containers Get': 'accounts_containers-get.md' + - 'Containers List': 'accounts_containers-list.md' + - 'Containers Lookup': 'accounts_containers-lookup.md' + - 'Containers Move Tag Id': 'accounts_containers-move-tag-id.md' + - 'Containers Snippet': 'accounts_containers-snippet.md' + - 'Containers Update': 'accounts_containers-update.md' + - 'Containers Version Headers Latest': 'accounts_containers-version-headers-latest.md' + - 'Containers Version Headers List': 'accounts_containers-version-headers-list.md' + - 'Containers Versions Delete': 'accounts_containers-versions-delete.md' + - 'Containers Versions Get': 'accounts_containers-versions-get.md' + - 'Containers Versions Live': 'accounts_containers-versions-live.md' + - 'Containers Versions Publish': 'accounts_containers-versions-publish.md' + - 'Containers Versions Set Latest': 'accounts_containers-versions-set-latest.md' + - 'Containers Versions Undelete': 'accounts_containers-versions-undelete.md' + - 'Containers Versions Update': 'accounts_containers-versions-update.md' + - 'Containers Workspaces Built In Variables Create': 'accounts_containers-workspaces-built-in-variables-create.md' + - 'Containers Workspaces Built In Variables Delete': 'accounts_containers-workspaces-built-in-variables-delete.md' + - 'Containers Workspaces Built In Variables List': 'accounts_containers-workspaces-built-in-variables-list.md' + - 'Containers Workspaces Built In Variables Revert': 'accounts_containers-workspaces-built-in-variables-revert.md' + - 'Containers Workspaces Clients Create': 'accounts_containers-workspaces-clients-create.md' + - 'Containers Workspaces Clients Delete': 'accounts_containers-workspaces-clients-delete.md' + - 'Containers Workspaces Clients Get': 'accounts_containers-workspaces-clients-get.md' + - 'Containers Workspaces Clients List': 'accounts_containers-workspaces-clients-list.md' + - 'Containers Workspaces Clients Revert': 'accounts_containers-workspaces-clients-revert.md' + - 'Containers Workspaces Clients Update': 'accounts_containers-workspaces-clients-update.md' + - 'Containers Workspaces Create': 'accounts_containers-workspaces-create.md' + - 'Containers Workspaces Create Version': 'accounts_containers-workspaces-create-version.md' + - 'Containers Workspaces Delete': 'accounts_containers-workspaces-delete.md' + - 'Containers Workspaces Folders Create': 'accounts_containers-workspaces-folders-create.md' + - 'Containers Workspaces Folders Delete': 'accounts_containers-workspaces-folders-delete.md' + - 'Containers Workspaces Folders Entities': 'accounts_containers-workspaces-folders-entities.md' + - 'Containers Workspaces Folders Get': 'accounts_containers-workspaces-folders-get.md' + - 'Containers Workspaces Folders List': 'accounts_containers-workspaces-folders-list.md' + - 'Containers Workspaces Folders Move Entities To Folder': 'accounts_containers-workspaces-folders-move-entities-to-folder.md' + - 'Containers Workspaces Folders Revert': 'accounts_containers-workspaces-folders-revert.md' + - 'Containers Workspaces Folders Update': 'accounts_containers-workspaces-folders-update.md' + - 'Containers Workspaces Get': 'accounts_containers-workspaces-get.md' + - 'Containers Workspaces Get Status': 'accounts_containers-workspaces-get-status.md' + - 'Containers Workspaces Gtag Config Create': 'accounts_containers-workspaces-gtag-config-create.md' + - 'Containers Workspaces Gtag Config Delete': 'accounts_containers-workspaces-gtag-config-delete.md' + - 'Containers Workspaces Gtag Config Get': 'accounts_containers-workspaces-gtag-config-get.md' + - 'Containers Workspaces Gtag Config List': 'accounts_containers-workspaces-gtag-config-list.md' + - 'Containers Workspaces Gtag Config Update': 'accounts_containers-workspaces-gtag-config-update.md' + - 'Containers Workspaces List': 'accounts_containers-workspaces-list.md' + - 'Containers Workspaces Quick Preview': 'accounts_containers-workspaces-quick-preview.md' + - 'Containers Workspaces Resolve Conflict': 'accounts_containers-workspaces-resolve-conflict.md' + - 'Containers Workspaces Sync': 'accounts_containers-workspaces-sync.md' + - 'Containers Workspaces Tags Create': 'accounts_containers-workspaces-tags-create.md' + - 'Containers Workspaces Tags Delete': 'accounts_containers-workspaces-tags-delete.md' + - 'Containers Workspaces Tags Get': 'accounts_containers-workspaces-tags-get.md' + - 'Containers Workspaces Tags List': 'accounts_containers-workspaces-tags-list.md' + - 'Containers Workspaces Tags Revert': 'accounts_containers-workspaces-tags-revert.md' + - 'Containers Workspaces Tags Update': 'accounts_containers-workspaces-tags-update.md' + - 'Containers Workspaces Templates Create': 'accounts_containers-workspaces-templates-create.md' + - 'Containers Workspaces Templates Delete': 'accounts_containers-workspaces-templates-delete.md' + - 'Containers Workspaces Templates Get': 'accounts_containers-workspaces-templates-get.md' + - 'Containers Workspaces Templates List': 'accounts_containers-workspaces-templates-list.md' + - 'Containers Workspaces Templates Revert': 'accounts_containers-workspaces-templates-revert.md' + - 'Containers Workspaces Templates Update': 'accounts_containers-workspaces-templates-update.md' + - 'Containers Workspaces Triggers Create': 'accounts_containers-workspaces-triggers-create.md' + - 'Containers Workspaces Triggers Delete': 'accounts_containers-workspaces-triggers-delete.md' + - 'Containers Workspaces Triggers Get': 'accounts_containers-workspaces-triggers-get.md' + - 'Containers Workspaces Triggers List': 'accounts_containers-workspaces-triggers-list.md' + - 'Containers Workspaces Triggers Revert': 'accounts_containers-workspaces-triggers-revert.md' + - 'Containers Workspaces Triggers Update': 'accounts_containers-workspaces-triggers-update.md' + - 'Containers Workspaces Update': 'accounts_containers-workspaces-update.md' + - 'Containers Workspaces Variables Create': 'accounts_containers-workspaces-variables-create.md' + - 'Containers Workspaces Variables Delete': 'accounts_containers-workspaces-variables-delete.md' + - 'Containers Workspaces Variables Get': 'accounts_containers-workspaces-variables-get.md' + - 'Containers Workspaces Variables List': 'accounts_containers-workspaces-variables-list.md' + - 'Containers Workspaces Variables Revert': 'accounts_containers-workspaces-variables-revert.md' + - 'Containers Workspaces Variables Update': 'accounts_containers-workspaces-variables-update.md' + - 'Containers Workspaces Zones Create': 'accounts_containers-workspaces-zones-create.md' + - 'Containers Workspaces Zones Delete': 'accounts_containers-workspaces-zones-delete.md' + - 'Containers Workspaces Zones Get': 'accounts_containers-workspaces-zones-get.md' + - 'Containers Workspaces Zones List': 'accounts_containers-workspaces-zones-list.md' + - 'Containers Workspaces Zones Revert': 'accounts_containers-workspaces-zones-revert.md' + - 'Containers Workspaces Zones Update': 'accounts_containers-workspaces-zones-update.md' + - 'Get': 'accounts_get.md' + - 'List': 'accounts_list.md' + - 'Update': 'accounts_update.md' + - 'User Permissions Create': 'accounts_user-permissions-create.md' + - 'User Permissions Delete': 'accounts_user-permissions-delete.md' + - 'User Permissions Get': 'accounts_user-permissions-get.md' + - 'User Permissions List': 'accounts_user-permissions-list.md' + - 'User Permissions Update': 'accounts_user-permissions-update.md' theme: readthedocs diff --git a/gen/tagmanager2-cli/src/client.rs b/gen/tagmanager2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/tagmanager2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/tagmanager2-cli/src/main.rs b/gen/tagmanager2-cli/src/main.rs index 940c7bab6b..e8415db6dc 100644 --- a/gen/tagmanager2-cli/src/main.rs +++ b/gen/tagmanager2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_tagmanager2::{api, Error, oauth2}; +use google_tagmanager2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,68 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _accounts_containers_combine(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_combine(opt.value_of("path").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "setting-source" => { + call = call.setting_source(value.unwrap_or("")); + }, + "container-id" => { + call = call.container_id(value.unwrap_or("")); + }, + "allow-user-permission-feature-update" => { + call = call.allow_user_permission_feature_update( value.map(|v| arg_from_str(v, err, "allow-user-permission-feature-update", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-user-permission-feature-update", "container-id", "setting-source"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_containers_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -77,15 +138,30 @@ where "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container-id" => Some(("containerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "domain-name" => Some(("domainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "features.support-built-in-variables" => Some(("features.supportBuiltInVariables", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-clients" => Some(("features.supportClients", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-environments" => Some(("features.supportEnvironments", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-folders" => Some(("features.supportFolders", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-gtag-configs" => Some(("features.supportGtagConfigs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-tags" => Some(("features.supportTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-templates" => Some(("features.supportTemplates", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-triggers" => Some(("features.supportTriggers", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-user-permissions" => Some(("features.supportUserPermissions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-variables" => Some(("features.supportVariables", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-versions" => Some(("features.supportVersions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-workspaces" => Some(("features.supportWorkspaces", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-zones" => Some(("features.supportZones", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notes" => Some(("notes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "public-id" => Some(("publicId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tag-ids" => Some(("tagIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "tag-manager-url" => Some(("tagManagerUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tagging-server-urls" => Some(("taggingServerUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "usage-context" => Some(("usageContext", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container-id", "domain-name", "fingerprint", "name", "notes", "path", "public-id", "tag-manager-url", "usage-context"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container-id", "domain-name", "features", "fingerprint", "name", "notes", "path", "public-id", "support-built-in-variables", "support-clients", "support-environments", "support-folders", "support-gtag-configs", "support-tags", "support-templates", "support-triggers", "support-user-permissions", "support-variables", "support-versions", "support-workspaces", "support-zones", "tag-ids", "tag-manager-url", "tagging-server-urls", "usage-context"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -189,6 +265,169 @@ where } } + async fn _accounts_containers_destinations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_destinations_get(opt.value_of("path").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_containers_destinations_link(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_destinations_link(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "destination-id" => { + call = call.destination_id(value.unwrap_or("")); + }, + "allow-user-permission-feature-update" => { + call = call.allow_user_permission_feature_update( value.map(|v| arg_from_str(v, err, "allow-user-permission-feature-update", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-user-permission-feature-update", "destination-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_containers_destinations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_destinations_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_containers_environments_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -750,6 +989,185 @@ where } } + async fn _accounts_containers_lookup(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_lookup(); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "destination-id" => { + call = call.destination_id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["destination-id"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_containers_move_tag_id(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_move_tag_id(opt.value_of("path").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "tag-name" => { + call = call.tag_name(value.unwrap_or("")); + }, + "tag-id" => { + call = call.tag_id(value.unwrap_or("")); + }, + "copy-users" => { + call = call.copy_users( value.map(|v| arg_from_str(v, err, "copy-users", "boolean")).unwrap_or(false)); + }, + "copy-terms-of-service" => { + call = call.copy_terms_of_service( value.map(|v| arg_from_str(v, err, "copy-terms-of-service", "boolean")).unwrap_or(false)); + }, + "copy-settings" => { + call = call.copy_settings( value.map(|v| arg_from_str(v, err, "copy-settings", "boolean")).unwrap_or(false)); + }, + "allow-user-permission-feature-update" => { + call = call.allow_user_permission_feature_update( value.map(|v| arg_from_str(v, err, "allow-user-permission-feature-update", "boolean")).unwrap_or(false)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["allow-user-permission-feature-update", "copy-settings", "copy-terms-of-service", "copy-users", "tag-id", "tag-name"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_containers_snippet(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_snippet(opt.value_of("path").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_containers_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -776,15 +1194,30 @@ where "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container-id" => Some(("containerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "domain-name" => Some(("domainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "features.support-built-in-variables" => Some(("features.supportBuiltInVariables", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-clients" => Some(("features.supportClients", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-environments" => Some(("features.supportEnvironments", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-folders" => Some(("features.supportFolders", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-gtag-configs" => Some(("features.supportGtagConfigs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-tags" => Some(("features.supportTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-templates" => Some(("features.supportTemplates", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-triggers" => Some(("features.supportTriggers", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-user-permissions" => Some(("features.supportUserPermissions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-variables" => Some(("features.supportVariables", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-versions" => Some(("features.supportVersions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-workspaces" => Some(("features.supportWorkspaces", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-zones" => Some(("features.supportZones", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "notes" => Some(("notes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "public-id" => Some(("publicId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tag-ids" => Some(("tagIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "tag-manager-url" => Some(("tagManagerUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tagging-server-urls" => Some(("taggingServerUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "usage-context" => Some(("usageContext", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container-id", "domain-name", "fingerprint", "name", "notes", "path", "public-id", "tag-manager-url", "usage-context"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container-id", "domain-name", "features", "fingerprint", "name", "notes", "path", "public-id", "support-built-in-variables", "support-clients", "support-environments", "support-folders", "support-gtag-configs", "support-tags", "support-templates", "support-triggers", "support-user-permissions", "support-variables", "support-versions", "support-workspaces", "support-zones", "tag-ids", "tag-manager-url", "tagging-server-urls", "usage-context"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -910,7 +1343,7 @@ where call = call.page_token(value.unwrap_or("")); }, "include-deleted" => { - call = call.include_deleted(arg_from_str(value.unwrap_or("false"), err, "include-deleted", "boolean")); + call = call.include_deleted( value.map(|v| arg_from_str(v, err, "include-deleted", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1298,12 +1731,27 @@ where "container.account-id" => Some(("container.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container.container-id" => Some(("container.containerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container.domain-name" => Some(("container.domainName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "container.features.support-built-in-variables" => Some(("container.features.supportBuiltInVariables", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-clients" => Some(("container.features.supportClients", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-environments" => Some(("container.features.supportEnvironments", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-folders" => Some(("container.features.supportFolders", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-gtag-configs" => Some(("container.features.supportGtagConfigs", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-tags" => Some(("container.features.supportTags", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-templates" => Some(("container.features.supportTemplates", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-triggers" => Some(("container.features.supportTriggers", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-user-permissions" => Some(("container.features.supportUserPermissions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-variables" => Some(("container.features.supportVariables", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-versions" => Some(("container.features.supportVersions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-workspaces" => Some(("container.features.supportWorkspaces", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "container.features.support-zones" => Some(("container.features.supportZones", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "container.fingerprint" => Some(("container.fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container.name" => Some(("container.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container.notes" => Some(("container.notes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container.path" => Some(("container.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container.public-id" => Some(("container.publicId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "container.tag-ids" => Some(("container.tagIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "container.tag-manager-url" => Some(("container.tagManagerUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "container.tagging-server-urls" => Some(("container.taggingServerUrls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "container.usage-context" => Some(("container.usageContext", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "container-id" => Some(("containerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "container-version-id" => Some(("containerVersionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1314,7 +1762,7 @@ where "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "tag-manager-url" => Some(("tagManagerUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container", "container-id", "container-version-id", "deleted", "description", "domain-name", "fingerprint", "name", "notes", "path", "public-id", "tag-manager-url", "usage-context"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container", "container-id", "container-version-id", "deleted", "description", "domain-name", "features", "fingerprint", "name", "notes", "path", "public-id", "support-built-in-variables", "support-clients", "support-environments", "support-folders", "support-gtag-configs", "support-tags", "support-templates", "support-triggers", "support-user-permissions", "support-variables", "support-versions", "support-workspaces", "support-zones", "tag-ids", "tag-manager-url", "tagging-server-urls", "usage-context"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2873,6 +3321,346 @@ where } } + async fn _accounts_containers_workspaces_gtag_config_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "container-id" => Some(("containerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gtag-config-id" => Some(("gtagConfigId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tag-manager-url" => Some(("tagManagerUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "workspace-id" => Some(("workspaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container-id", "fingerprint", "gtag-config-id", "path", "tag-manager-url", "type", "workspace-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GtagConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().containers_workspaces_gtag_config_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_containers_workspaces_gtag_config_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_workspaces_gtag_config_delete(opt.value_of("path").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok(mut response) => { + Ok(()) + } + } + } + } + + async fn _accounts_containers_workspaces_gtag_config_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_workspaces_gtag_config_get(opt.value_of("path").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_containers_workspaces_gtag_config_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.accounts().containers_workspaces_gtag_config_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _accounts_containers_workspaces_gtag_config_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "container-id" => Some(("containerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "gtag-config-id" => Some(("gtagConfigId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "tag-manager-url" => Some(("tagManagerUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "workspace-id" => Some(("workspaceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "container-id", "fingerprint", "gtag-config-id", "path", "tag-manager-url", "type", "workspace-id"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GtagConfig = json::value::from_value(object).unwrap(); + let mut call = self.hub.accounts().containers_workspaces_gtag_config_update(request, opt.value_of("path").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "fingerprint" => { + call = call.fingerprint(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["fingerprint"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _accounts_containers_workspaces_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().containers_workspaces_list(opt.value_of("parent").unwrap_or("")); @@ -5609,6 +6397,9 @@ where "page-token" => { call = call.page_token(value.unwrap_or("")); }, + "include-google-tags" => { + call = call.include_google_tags( value.map(|v| arg_from_str(v, err, "include-google-tags", "boolean")).unwrap_or(false)); + }, _ => { let mut found = false; for param in &self.gp { @@ -5622,7 +6413,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["page-token"].iter().map(|v|*v)); + v.extend(["include-google-tags", "page-token"].iter().map(|v|*v)); v } )); } } @@ -5680,13 +6471,15 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "features.support-multiple-containers" => Some(("features.supportMultipleContainers", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "features.support-user-permissions" => Some(("features.supportUserPermissions", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "fingerprint" => Some(("fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "path" => Some(("path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "share-data" => Some(("shareData", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "tag-manager-url" => Some(("tagManagerUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "fingerprint", "name", "path", "share-data", "tag-manager-url"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "features", "fingerprint", "name", "path", "share-data", "support-multiple-containers", "support-user-permissions", "tag-manager-url"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -6085,12 +6878,24 @@ where match self.opt.subcommand() { ("accounts", Some(opt)) => { match opt.subcommand() { + ("containers-combine", Some(opt)) => { + call_result = self._accounts_containers_combine(opt, dry_run, &mut err).await; + }, ("containers-create", Some(opt)) => { call_result = self._accounts_containers_create(opt, dry_run, &mut err).await; }, ("containers-delete", Some(opt)) => { call_result = self._accounts_containers_delete(opt, dry_run, &mut err).await; }, + ("containers-destinations-get", Some(opt)) => { + call_result = self._accounts_containers_destinations_get(opt, dry_run, &mut err).await; + }, + ("containers-destinations-link", Some(opt)) => { + call_result = self._accounts_containers_destinations_link(opt, dry_run, &mut err).await; + }, + ("containers-destinations-list", Some(opt)) => { + call_result = self._accounts_containers_destinations_list(opt, dry_run, &mut err).await; + }, ("containers-environments-create", Some(opt)) => { call_result = self._accounts_containers_environments_create(opt, dry_run, &mut err).await; }, @@ -6115,6 +6920,15 @@ where ("containers-list", Some(opt)) => { call_result = self._accounts_containers_list(opt, dry_run, &mut err).await; }, + ("containers-lookup", Some(opt)) => { + call_result = self._accounts_containers_lookup(opt, dry_run, &mut err).await; + }, + ("containers-move-tag-id", Some(opt)) => { + call_result = self._accounts_containers_move_tag_id(opt, dry_run, &mut err).await; + }, + ("containers-snippet", Some(opt)) => { + call_result = self._accounts_containers_snippet(opt, dry_run, &mut err).await; + }, ("containers-update", Some(opt)) => { call_result = self._accounts_containers_update(opt, dry_run, &mut err).await; }, @@ -6214,6 +7028,21 @@ where ("containers-workspaces-get-status", Some(opt)) => { call_result = self._accounts_containers_workspaces_get_status(opt, dry_run, &mut err).await; }, + ("containers-workspaces-gtag-config-create", Some(opt)) => { + call_result = self._accounts_containers_workspaces_gtag_config_create(opt, dry_run, &mut err).await; + }, + ("containers-workspaces-gtag-config-delete", Some(opt)) => { + call_result = self._accounts_containers_workspaces_gtag_config_delete(opt, dry_run, &mut err).await; + }, + ("containers-workspaces-gtag-config-get", Some(opt)) => { + call_result = self._accounts_containers_workspaces_gtag_config_get(opt, dry_run, &mut err).await; + }, + ("containers-workspaces-gtag-config-list", Some(opt)) => { + call_result = self._accounts_containers_workspaces_gtag_config_list(opt, dry_run, &mut err).await; + }, + ("containers-workspaces-gtag-config-update", Some(opt)) => { + call_result = self._accounts_containers_workspaces_gtag_config_update(opt, dry_run, &mut err).await; + }, ("containers-workspaces-list", Some(opt)) => { call_result = self._accounts_containers_workspaces_list(opt, dry_run, &mut err).await; }, @@ -6422,7 +7251,29 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("accounts", "methods: 'containers-create', 'containers-delete', 'containers-environments-create', 'containers-environments-delete', 'containers-environments-get', 'containers-environments-list', 'containers-environments-reauthorize', 'containers-environments-update', 'containers-get', 'containers-list', 'containers-update', 'containers-version-headers-latest', 'containers-version-headers-list', 'containers-versions-delete', 'containers-versions-get', 'containers-versions-live', 'containers-versions-publish', 'containers-versions-set-latest', 'containers-versions-undelete', 'containers-versions-update', 'containers-workspaces-built-in-variables-create', 'containers-workspaces-built-in-variables-delete', 'containers-workspaces-built-in-variables-list', 'containers-workspaces-built-in-variables-revert', 'containers-workspaces-clients-create', 'containers-workspaces-clients-delete', 'containers-workspaces-clients-get', 'containers-workspaces-clients-list', 'containers-workspaces-clients-revert', 'containers-workspaces-clients-update', 'containers-workspaces-create', 'containers-workspaces-create-version', 'containers-workspaces-delete', 'containers-workspaces-folders-create', 'containers-workspaces-folders-delete', 'containers-workspaces-folders-entities', 'containers-workspaces-folders-get', 'containers-workspaces-folders-list', 'containers-workspaces-folders-move-entities-to-folder', 'containers-workspaces-folders-revert', 'containers-workspaces-folders-update', 'containers-workspaces-get', 'containers-workspaces-get-status', 'containers-workspaces-list', 'containers-workspaces-quick-preview', 'containers-workspaces-resolve-conflict', 'containers-workspaces-sync', 'containers-workspaces-tags-create', 'containers-workspaces-tags-delete', 'containers-workspaces-tags-get', 'containers-workspaces-tags-list', 'containers-workspaces-tags-revert', 'containers-workspaces-tags-update', 'containers-workspaces-templates-create', 'containers-workspaces-templates-delete', 'containers-workspaces-templates-get', 'containers-workspaces-templates-list', 'containers-workspaces-templates-revert', 'containers-workspaces-templates-update', 'containers-workspaces-triggers-create', 'containers-workspaces-triggers-delete', 'containers-workspaces-triggers-get', 'containers-workspaces-triggers-list', 'containers-workspaces-triggers-revert', 'containers-workspaces-triggers-update', 'containers-workspaces-update', 'containers-workspaces-variables-create', 'containers-workspaces-variables-delete', 'containers-workspaces-variables-get', 'containers-workspaces-variables-list', 'containers-workspaces-variables-revert', 'containers-workspaces-variables-update', 'containers-workspaces-zones-create', 'containers-workspaces-zones-delete', 'containers-workspaces-zones-get', 'containers-workspaces-zones-list', 'containers-workspaces-zones-revert', 'containers-workspaces-zones-update', 'get', 'list', 'update', 'user-permissions-create', 'user-permissions-delete', 'user-permissions-get', 'user-permissions-list' and 'user-permissions-update'", vec![ + ("accounts", "methods: 'containers-combine', 'containers-create', 'containers-delete', 'containers-destinations-get', 'containers-destinations-link', 'containers-destinations-list', 'containers-environments-create', 'containers-environments-delete', 'containers-environments-get', 'containers-environments-list', 'containers-environments-reauthorize', 'containers-environments-update', 'containers-get', 'containers-list', 'containers-lookup', 'containers-move-tag-id', 'containers-snippet', 'containers-update', 'containers-version-headers-latest', 'containers-version-headers-list', 'containers-versions-delete', 'containers-versions-get', 'containers-versions-live', 'containers-versions-publish', 'containers-versions-set-latest', 'containers-versions-undelete', 'containers-versions-update', 'containers-workspaces-built-in-variables-create', 'containers-workspaces-built-in-variables-delete', 'containers-workspaces-built-in-variables-list', 'containers-workspaces-built-in-variables-revert', 'containers-workspaces-clients-create', 'containers-workspaces-clients-delete', 'containers-workspaces-clients-get', 'containers-workspaces-clients-list', 'containers-workspaces-clients-revert', 'containers-workspaces-clients-update', 'containers-workspaces-create', 'containers-workspaces-create-version', 'containers-workspaces-delete', 'containers-workspaces-folders-create', 'containers-workspaces-folders-delete', 'containers-workspaces-folders-entities', 'containers-workspaces-folders-get', 'containers-workspaces-folders-list', 'containers-workspaces-folders-move-entities-to-folder', 'containers-workspaces-folders-revert', 'containers-workspaces-folders-update', 'containers-workspaces-get', 'containers-workspaces-get-status', 'containers-workspaces-gtag-config-create', 'containers-workspaces-gtag-config-delete', 'containers-workspaces-gtag-config-get', 'containers-workspaces-gtag-config-list', 'containers-workspaces-gtag-config-update', 'containers-workspaces-list', 'containers-workspaces-quick-preview', 'containers-workspaces-resolve-conflict', 'containers-workspaces-sync', 'containers-workspaces-tags-create', 'containers-workspaces-tags-delete', 'containers-workspaces-tags-get', 'containers-workspaces-tags-list', 'containers-workspaces-tags-revert', 'containers-workspaces-tags-update', 'containers-workspaces-templates-create', 'containers-workspaces-templates-delete', 'containers-workspaces-templates-get', 'containers-workspaces-templates-list', 'containers-workspaces-templates-revert', 'containers-workspaces-templates-update', 'containers-workspaces-triggers-create', 'containers-workspaces-triggers-delete', 'containers-workspaces-triggers-get', 'containers-workspaces-triggers-list', 'containers-workspaces-triggers-revert', 'containers-workspaces-triggers-update', 'containers-workspaces-update', 'containers-workspaces-variables-create', 'containers-workspaces-variables-delete', 'containers-workspaces-variables-get', 'containers-workspaces-variables-list', 'containers-workspaces-variables-revert', 'containers-workspaces-variables-update', 'containers-workspaces-zones-create', 'containers-workspaces-zones-delete', 'containers-workspaces-zones-get', 'containers-workspaces-zones-list', 'containers-workspaces-zones-revert', 'containers-workspaces-zones-update', 'get', 'list', 'update', 'user-permissions-create', 'user-permissions-delete', 'user-permissions-get', 'user-permissions-list' and 'user-permissions-update'", vec![ + ("containers-combine", + Some(r##"Combines Containers."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-combine", + vec![ + (Some(r##"path"##), + None, + Some(r##"GTM Container's API relative path. Example: accounts/{account_id}/containers/{container_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("containers-create", Some(r##"Creates a Container."##), "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-create", @@ -6467,6 +7318,72 @@ async fn main() { Some(false), Some(true)), ]), + ("containers-destinations-get", + Some(r##"Gets a Destination."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-destinations-get", + vec![ + (Some(r##"path"##), + None, + Some(r##"Google Tag Destination's API relative path. Example: accounts/{account_id}/containers/{container_id}/destinations/{destination_link_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-destinations-link", + Some(r##"Adds a Destination to this Container and removes it from the Container to which it is currently linked."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-destinations-link", + vec![ + (Some(r##"parent"##), + None, + Some(r##"GTM parent Container's API relative path. Example: accounts/{account_id}/containers/{container_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-destinations-list", + Some(r##"Lists all Destinations linked to a GTM Container."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-destinations-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"GTM parent Container's API relative path. Example: accounts/{account_id}/containers/{container_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), ("containers-environments-create", Some(r##"Creates a GTM Environment."##), "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-environments-create", @@ -6639,7 +7556,67 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"GTM Accounts's API relative path. Example: accounts/{account_id}."##), + Some(r##"GTM Account's API relative path. Example: accounts/{account_id}."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-lookup", + Some(r##"Looks up a Container by destination ID."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-lookup", + vec![ + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-move-tag-id", + Some(r##"Move Tag ID out of a Container."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-move-tag-id", + vec![ + (Some(r##"path"##), + None, + Some(r##"GTM Container's API relative path. Example: accounts/{account_id}/containers/{container_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-snippet", + Some(r##"Gets the tagging snippet for a Container."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-snippet", + vec![ + (Some(r##"path"##), + None, + Some(r##"Container snippet's API relative path. Example: accounts/{account_id}/containers/{container_id}:snippet"##), Some(true), Some(false)), @@ -7393,6 +8370,122 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-workspaces-gtag-config-create", + Some(r##"Creates a Google tag config."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-workspaces-gtag-config-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Workspace's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-workspaces-gtag-config-delete", + Some(r##"Deletes a Google tag config."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-workspaces-gtag-config-delete", + vec![ + (Some(r##"path"##), + None, + Some(r##"Google tag config's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}/gtag_config/{gtag_config_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + ]), + ("containers-workspaces-gtag-config-get", + Some(r##"Gets a Google tag config."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-workspaces-gtag-config-get", + vec![ + (Some(r##"path"##), + None, + Some(r##"Google tag config's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}/gtag_config/{gtag_config_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-workspaces-gtag-config-list", + Some(r##"Lists all Google tag configs in a Container."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-workspaces-gtag-config-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Workspace's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("containers-workspaces-gtag-config-update", + Some(r##"Updates a Google tag config."##), + "Details at http://byron.github.io/google-apis-rs/google_tagmanager2_cli/accounts_containers-workspaces-gtag-config-update", + vec![ + (Some(r##"path"##), + None, + Some(r##"Google tag config's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}/gtag_config/{gtag_config_id}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -7769,7 +8862,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"GTM Workspaces's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}"##), + Some(r##"GTM Workspace's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}"##), Some(true), Some(false)), @@ -7835,7 +8928,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"GTM Workspaces's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}"##), + Some(r##"GTM Workspace's API relative path. Example: accounts/{account_id}/containers/{container_id}/workspaces/{workspace_id}"##), Some(true), Some(false)), @@ -8211,7 +9304,7 @@ async fn main() { vec![ (Some(r##"path"##), None, - Some(r##"GTM Accounts's API relative path. Example: accounts/{account_id}"##), + Some(r##"GTM Account's API relative path. Example: accounts/{account_id}"##), Some(true), Some(false)), @@ -8249,7 +9342,7 @@ async fn main() { vec![ (Some(r##"path"##), None, - Some(r##"GTM Accounts's API relative path. Example: accounts/{account_id}"##), + Some(r##"GTM Account's API relative path. Example: accounts/{account_id}"##), Some(true), Some(false)), @@ -8343,7 +9436,7 @@ async fn main() { vec![ (Some(r##"parent"##), None, - Some(r##"GTM Accounts's API relative path. Example: accounts/{account_id}"##), + Some(r##"GTM Account's API relative path. Example: accounts/{account_id}"##), Some(true), Some(false)), @@ -8393,7 +9486,7 @@ async fn main() { let mut app = App::new("tagmanager2") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230123") .about("This API allows clients to access and modify container and tag configuration.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_tagmanager2_cli") .arg(Arg::with_name("url") diff --git a/gen/tagmanager2/Cargo.toml b/gen/tagmanager2/Cargo.toml index 3a5d3c0b50..49a52998be 100644 --- a/gen/tagmanager2/Cargo.toml +++ b/gen/tagmanager2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-tagmanager2" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Tag Manager (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tagmanager2" homepage = "https://developers.google.com/tag-manager" -documentation = "https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-tagmanager2/5.0.2+20230123" license = "MIT" keywords = ["tagmanager", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/tagmanager2/README.md b/gen/tagmanager2/README.md index c77b65d2e9..5cdd7f1e2c 100644 --- a/gen/tagmanager2/README.md +++ b/gen/tagmanager2/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-tagmanager2` library allows access to all features of the *Google Tag Manager* service. -This documentation was generated from *Tag Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *tagmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Tag Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *tagmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Tag Manager* *v2* API can be found at the [official documentation site](https://developers.google.com/tag-manager). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/TagManager) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/TagManager) ... -* [accounts](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::Account) - * [*containers combine*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerCombineCall), [*containers create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerCreateCall), [*containers delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerDeleteCall), [*containers destinations get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerDestinationGetCall), [*containers destinations link*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerDestinationLinkCall), [*containers destinations list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerDestinationListCall), [*containers environments create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerEnvironmentCreateCall), [*containers environments delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerEnvironmentDeleteCall), [*containers environments get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerEnvironmentGetCall), [*containers environments list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerEnvironmentListCall), [*containers environments reauthorize*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerEnvironmentReauthorizeCall), [*containers environments update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerEnvironmentUpdateCall), [*containers get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerGetCall), [*containers list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerListCall), [*containers lookup*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerLookupCall), [*containers move_tag_id*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerMoveTagIdCall), [*containers snippet*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerSnippetCall), [*containers update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerUpdateCall), [*containers version_headers latest*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionHeaderLatestCall), [*containers version_headers list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionHeaderListCall), [*containers versions delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionDeleteCall), [*containers versions get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionGetCall), [*containers versions live*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionLiveCall), [*containers versions publish*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionPublishCall), [*containers versions set_latest*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionSetLatestCall), [*containers versions undelete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionUndeleteCall), [*containers versions update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerVersionUpdateCall), [*containers workspaces built_in_variables create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableCreateCall), [*containers workspaces built_in_variables delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableDeleteCall), [*containers workspaces built_in_variables list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableListCall), [*containers workspaces built_in_variables revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableRevertCall), [*containers workspaces clients create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientCreateCall), [*containers workspaces clients delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientDeleteCall), [*containers workspaces clients get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientGetCall), [*containers workspaces clients list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientListCall), [*containers workspaces clients revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientRevertCall), [*containers workspaces clients update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientUpdateCall), [*containers workspaces create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceCreateCall), [*containers workspaces create_version*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceCreateVersionCall), [*containers workspaces delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceDeleteCall), [*containers workspaces folders create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderCreateCall), [*containers workspaces folders delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderDeleteCall), [*containers workspaces folders entities*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderEntityCall), [*containers workspaces folders get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderGetCall), [*containers workspaces folders list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderListCall), [*containers workspaces folders move_entities_to_folder*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderMoveEntitiesToFolderCall), [*containers workspaces folders revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderRevertCall), [*containers workspaces folders update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderUpdateCall), [*containers workspaces get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceGetCall), [*containers workspaces get status*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceGetStatuCall), [*containers workspaces gtag_config create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigCreateCall), [*containers workspaces gtag_config delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigDeleteCall), [*containers workspaces gtag_config get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigGetCall), [*containers workspaces gtag_config list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigListCall), [*containers workspaces gtag_config update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigUpdateCall), [*containers workspaces list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceListCall), [*containers workspaces quick_preview*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceQuickPreviewCall), [*containers workspaces resolve_conflict*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceResolveConflictCall), [*containers workspaces sync*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceSyncCall), [*containers workspaces tags create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagCreateCall), [*containers workspaces tags delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagDeleteCall), [*containers workspaces tags get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagGetCall), [*containers workspaces tags list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagListCall), [*containers workspaces tags revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagRevertCall), [*containers workspaces tags update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagUpdateCall), [*containers workspaces templates create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateCreateCall), [*containers workspaces templates delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateDeleteCall), [*containers workspaces templates get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateGetCall), [*containers workspaces templates list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateListCall), [*containers workspaces templates revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateRevertCall), [*containers workspaces templates update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateUpdateCall), [*containers workspaces triggers create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerCreateCall), [*containers workspaces triggers delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerDeleteCall), [*containers workspaces triggers get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerGetCall), [*containers workspaces triggers list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerListCall), [*containers workspaces triggers revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerRevertCall), [*containers workspaces triggers update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerUpdateCall), [*containers workspaces update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceUpdateCall), [*containers workspaces variables create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableCreateCall), [*containers workspaces variables delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableDeleteCall), [*containers workspaces variables get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableGetCall), [*containers workspaces variables list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableListCall), [*containers workspaces variables revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableRevertCall), [*containers workspaces variables update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableUpdateCall), [*containers workspaces zones create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneCreateCall), [*containers workspaces zones delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneDeleteCall), [*containers workspaces zones get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneGetCall), [*containers workspaces zones list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneListCall), [*containers workspaces zones revert*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneRevertCall), [*containers workspaces zones update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneUpdateCall), [*get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountGetCall), [*list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountListCall), [*update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountUpdateCall), [*user_permissions create*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountUserPermissionCreateCall), [*user_permissions delete*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountUserPermissionDeleteCall), [*user_permissions get*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountUserPermissionGetCall), [*user_permissions list*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountUserPermissionListCall) and [*user_permissions update*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/api::AccountUserPermissionUpdateCall) +* [accounts](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::Account) + * [*containers combine*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerCombineCall), [*containers create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerCreateCall), [*containers delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerDeleteCall), [*containers destinations get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerDestinationGetCall), [*containers destinations link*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerDestinationLinkCall), [*containers destinations list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerDestinationListCall), [*containers environments create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerEnvironmentCreateCall), [*containers environments delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerEnvironmentDeleteCall), [*containers environments get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerEnvironmentGetCall), [*containers environments list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerEnvironmentListCall), [*containers environments reauthorize*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerEnvironmentReauthorizeCall), [*containers environments update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerEnvironmentUpdateCall), [*containers get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerGetCall), [*containers list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerListCall), [*containers lookup*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerLookupCall), [*containers move_tag_id*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerMoveTagIdCall), [*containers snippet*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerSnippetCall), [*containers update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerUpdateCall), [*containers version_headers latest*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionHeaderLatestCall), [*containers version_headers list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionHeaderListCall), [*containers versions delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionDeleteCall), [*containers versions get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionGetCall), [*containers versions live*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionLiveCall), [*containers versions publish*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionPublishCall), [*containers versions set_latest*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionSetLatestCall), [*containers versions undelete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionUndeleteCall), [*containers versions update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerVersionUpdateCall), [*containers workspaces built_in_variables create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableCreateCall), [*containers workspaces built_in_variables delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableDeleteCall), [*containers workspaces built_in_variables list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableListCall), [*containers workspaces built_in_variables revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceBuiltInVariableRevertCall), [*containers workspaces clients create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientCreateCall), [*containers workspaces clients delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientDeleteCall), [*containers workspaces clients get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientGetCall), [*containers workspaces clients list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientListCall), [*containers workspaces clients revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientRevertCall), [*containers workspaces clients update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceClientUpdateCall), [*containers workspaces create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceCreateCall), [*containers workspaces create_version*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceCreateVersionCall), [*containers workspaces delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceDeleteCall), [*containers workspaces folders create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderCreateCall), [*containers workspaces folders delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderDeleteCall), [*containers workspaces folders entities*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderEntityCall), [*containers workspaces folders get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderGetCall), [*containers workspaces folders list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderListCall), [*containers workspaces folders move_entities_to_folder*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderMoveEntitiesToFolderCall), [*containers workspaces folders revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderRevertCall), [*containers workspaces folders update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceFolderUpdateCall), [*containers workspaces get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceGetCall), [*containers workspaces get status*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceGetStatuCall), [*containers workspaces gtag_config create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigCreateCall), [*containers workspaces gtag_config delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigDeleteCall), [*containers workspaces gtag_config get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigGetCall), [*containers workspaces gtag_config list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigListCall), [*containers workspaces gtag_config update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceGtagConfigUpdateCall), [*containers workspaces list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceListCall), [*containers workspaces quick_preview*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceQuickPreviewCall), [*containers workspaces resolve_conflict*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceResolveConflictCall), [*containers workspaces sync*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceSyncCall), [*containers workspaces tags create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagCreateCall), [*containers workspaces tags delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagDeleteCall), [*containers workspaces tags get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagGetCall), [*containers workspaces tags list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagListCall), [*containers workspaces tags revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagRevertCall), [*containers workspaces tags update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTagUpdateCall), [*containers workspaces templates create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateCreateCall), [*containers workspaces templates delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateDeleteCall), [*containers workspaces templates get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateGetCall), [*containers workspaces templates list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateListCall), [*containers workspaces templates revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateRevertCall), [*containers workspaces templates update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTemplateUpdateCall), [*containers workspaces triggers create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerCreateCall), [*containers workspaces triggers delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerDeleteCall), [*containers workspaces triggers get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerGetCall), [*containers workspaces triggers list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerListCall), [*containers workspaces triggers revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerRevertCall), [*containers workspaces triggers update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceTriggerUpdateCall), [*containers workspaces update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceUpdateCall), [*containers workspaces variables create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableCreateCall), [*containers workspaces variables delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableDeleteCall), [*containers workspaces variables get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableGetCall), [*containers workspaces variables list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableListCall), [*containers workspaces variables revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableRevertCall), [*containers workspaces variables update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceVariableUpdateCall), [*containers workspaces zones create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneCreateCall), [*containers workspaces zones delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneDeleteCall), [*containers workspaces zones get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneGetCall), [*containers workspaces zones list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneListCall), [*containers workspaces zones revert*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneRevertCall), [*containers workspaces zones update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountContainerWorkspaceZoneUpdateCall), [*get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountGetCall), [*list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountListCall), [*update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountUpdateCall), [*user_permissions create*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountUserPermissionCreateCall), [*user_permissions delete*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountUserPermissionDeleteCall), [*user_permissions get*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountUserPermissionGetCall), [*user_permissions list*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountUserPermissionListCall) and [*user_permissions update*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/api::AccountUserPermissionUpdateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/TagManager)** +* **[Hub](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/TagManager)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::CallBuilder) -* **[Resources](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::CallBuilder) +* **[Resources](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Part)** + * **[Parts](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -221,17 +221,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -241,29 +241,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Delegate) to the -[Method Builder](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Delegate) to the +[Method Builder](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::RequestValue) and -[decodable](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::RequestValue) and +[decodable](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-tagmanager2/5.0.2-beta-1+20230123/google_tagmanager2/client::RequestValue) are moved +* [request values](https://docs.rs/google-tagmanager2/5.0.2+20230123/google_tagmanager2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/tagmanager2/src/api.rs b/gen/tagmanager2/src/api.rs index 08d020da80..695daf2ff4 100644 --- a/gen/tagmanager2/src/api.rs +++ b/gen/tagmanager2/src/api.rs @@ -149,7 +149,7 @@ impl<'a, S> TagManager { TagManager { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://tagmanager.googleapis.com/".to_string(), _root_url: "https://tagmanager.googleapis.com/".to_string(), } @@ -160,7 +160,7 @@ impl<'a, S> TagManager { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/tagmanager2/src/client.rs b/gen/tagmanager2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/tagmanager2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/tagmanager2/src/lib.rs b/gen/tagmanager2/src/lib.rs index 71e1715bc7..76fabcd0de 100644 --- a/gen/tagmanager2/src/lib.rs +++ b/gen/tagmanager2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Tag Manager* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *tagmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Tag Manager* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *tagmanager:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Tag Manager* *v2* API can be found at the //! [official documentation site](https://developers.google.com/tag-manager). diff --git a/gen/taskqueue1_beta2-cli/Cargo.toml b/gen/taskqueue1_beta2-cli/Cargo.toml index a13b1ff7b7..322b56b871 100644 --- a/gen/taskqueue1_beta2-cli/Cargo.toml +++ b/gen/taskqueue1_beta2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-taskqueue1_beta2-cli" -version = "4.0.1+20160428" +version = "5.0.2+20160428" authors = ["Sebastian Thiel "] description = "A complete library to interact with taskqueue (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/taskqueue1_beta2-cli" @@ -20,13 +20,13 @@ name = "taskqueue1-beta2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-taskqueue1_beta2] path = "../taskqueue1_beta2" -version = "4.0.1+20160428" +version = "5.0.2+20160428" + diff --git a/gen/taskqueue1_beta2-cli/README.md b/gen/taskqueue1_beta2-cli/README.md index 89204be16c..9f6a104288 100644 --- a/gen/taskqueue1_beta2-cli/README.md +++ b/gen/taskqueue1_beta2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *taskqueue* API at revision *20160428*. The CLI is at version *4.0.1*. +This documentation was generated from the *taskqueue* API at revision *20160428*. The CLI is at version *5.0.2*. ```bash taskqueue1-beta2 [options] diff --git a/gen/taskqueue1_beta2-cli/mkdocs.yml b/gen/taskqueue1_beta2-cli/mkdocs.yml index b1a9fdd28f..fdc987e5a4 100644 --- a/gen/taskqueue1_beta2-cli/mkdocs.yml +++ b/gen/taskqueue1_beta2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: taskqueue v4.0.1+20160428 +site_name: taskqueue v5.0.2+20160428 site_url: http://byron.github.io/google-apis-rs/google-taskqueue1_beta2-cli site_description: A complete library to interact with taskqueue (protocol v1beta2) @@ -7,16 +7,18 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/taskqueue1_beta2 docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['taskqueues_get.md', 'Taskqueues', 'Get'] -- ['tasks_delete.md', 'Tasks', 'Delete'] -- ['tasks_get.md', 'Tasks', 'Get'] -- ['tasks_insert.md', 'Tasks', 'Insert'] -- ['tasks_lease.md', 'Tasks', 'Lease'] -- ['tasks_list.md', 'Tasks', 'List'] -- ['tasks_patch.md', 'Tasks', 'Patch'] -- ['tasks_update.md', 'Tasks', 'Update'] +nav: +- Home: 'index.md' +- 'Taskqueues': + - 'Get': 'taskqueues_get.md' +- 'Tasks': + - 'Delete': 'tasks_delete.md' + - 'Get': 'tasks_get.md' + - 'Insert': 'tasks_insert.md' + - 'Lease': 'tasks_lease.md' + - 'List': 'tasks_list.md' + - 'Patch': 'tasks_patch.md' + - 'Update': 'tasks_update.md' theme: readthedocs diff --git a/gen/taskqueue1_beta2-cli/src/client.rs b/gen/taskqueue1_beta2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/taskqueue1_beta2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/taskqueue1_beta2-cli/src/main.rs b/gen/taskqueue1_beta2-cli/src/main.rs index 4bcd285010..e37b2117a2 100644 --- a/gen/taskqueue1_beta2-cli/src/main.rs +++ b/gen/taskqueue1_beta2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_taskqueue1_beta2::{api, Error, oauth2}; +use google_taskqueue1_beta2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -58,7 +57,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "get-stats" => { - call = call.get_stats(arg_from_str(value.unwrap_or("false"), err, "get-stats", "boolean")); + call = call.get_stats( value.map(|v| arg_from_str(v, err, "get-stats", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -307,7 +306,7 @@ where call = call.tag(value.unwrap_or("")); }, "group-by-tag" => { - call = call.group_by_tag(arg_from_str(value.unwrap_or("false"), err, "group-by-tag", "boolean")); + call = call.group_by_tag( value.map(|v| arg_from_str(v, err, "group-by-tag", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1003,7 +1002,7 @@ async fn main() { let mut app = App::new("taskqueue1-beta2") .author("Sebastian Thiel ") - .version("4.0.1+20160428") + .version("5.0.2+20160428") .about("Accesses a Google App Engine Pull Task Queue over REST.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_taskqueue1_beta2_cli") .arg(Arg::with_name("url") diff --git a/gen/taskqueue1_beta2/Cargo.toml b/gen/taskqueue1_beta2/Cargo.toml index 16025823c2..ac703880b1 100644 --- a/gen/taskqueue1_beta2/Cargo.toml +++ b/gen/taskqueue1_beta2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-taskqueue1_beta2" -version = "5.0.2-beta-1+20160428" +version = "5.0.2+20160428" authors = ["Sebastian Thiel "] description = "A complete library to interact with taskqueue (protocol v1beta2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/taskqueue1_beta2" homepage = "https://developers.google.com/appengine/docs/python/taskqueue/rest" -documentation = "https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428" +documentation = "https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428" license = "MIT" keywords = ["taskqueue", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/taskqueue1_beta2/README.md b/gen/taskqueue1_beta2/README.md index 8071eacbec..3f63132f2e 100644 --- a/gen/taskqueue1_beta2/README.md +++ b/gen/taskqueue1_beta2/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-taskqueue1_beta2` library allows access to all features of the *Google taskqueue* service. -This documentation was generated from *taskqueue* crate version *5.0.2-beta-1+20160428*, where *20160428* is the exact revision of the *taskqueue:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *taskqueue* crate version *5.0.2+20160428*, where *20160428* is the exact revision of the *taskqueue:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *taskqueue* *v1_beta2* API can be found at the [official documentation site](https://developers.google.com/appengine/docs/python/taskqueue/rest). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/Taskqueue) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/Taskqueue) ... * taskqueues - * [*get*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskqueueGetCall) -* [tasks](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::Task) - * [*delete*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskDeleteCall), [*get*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskGetCall), [*insert*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskInsertCall), [*lease*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskLeaseCall), [*list*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskListCall), [*patch*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskPatchCall) and [*update*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/api::TaskUpdateCall) + * [*get*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskqueueGetCall) +* [tasks](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::Task) + * [*delete*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskDeleteCall), [*get*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskGetCall), [*insert*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskInsertCall), [*lease*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskLeaseCall), [*list*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskListCall), [*patch*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskPatchCall) and [*update*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/api::TaskUpdateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/Taskqueue)** +* **[Hub](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/Taskqueue)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::CallBuilder) -* **[Resources](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::CallBuilder) +* **[Resources](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Part)** + * **[Parts](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Delegate) to the -[Method Builder](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Delegate) to the +[Method Builder](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::RequestValue) and -[decodable](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::RequestValue) and +[decodable](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-taskqueue1_beta2/5.0.2-beta-1+20160428/google_taskqueue1_beta2/client::RequestValue) are moved +* [request values](https://docs.rs/google-taskqueue1_beta2/5.0.2+20160428/google_taskqueue1_beta2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/taskqueue1_beta2/src/api.rs b/gen/taskqueue1_beta2/src/api.rs index 1a99f9fdcc..ee4c63e9eb 100644 --- a/gen/taskqueue1_beta2/src/api.rs +++ b/gen/taskqueue1_beta2/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Taskqueue { Taskqueue { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/taskqueue/v1beta2/projects/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -139,7 +139,7 @@ impl<'a, S> Taskqueue { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/taskqueue1_beta2/src/client.rs b/gen/taskqueue1_beta2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/taskqueue1_beta2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/taskqueue1_beta2/src/lib.rs b/gen/taskqueue1_beta2/src/lib.rs index 00e1a74672..8a8b6db3c5 100644 --- a/gen/taskqueue1_beta2/src/lib.rs +++ b/gen/taskqueue1_beta2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *taskqueue* crate version *5.0.2-beta-1+20160428*, where *20160428* is the exact revision of the *taskqueue:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *taskqueue* crate version *5.0.2+20160428*, where *20160428* is the exact revision of the *taskqueue:v1beta2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *taskqueue* *v1_beta2* API can be found at the //! [official documentation site](https://developers.google.com/appengine/docs/python/taskqueue/rest). diff --git a/gen/tasks1-cli/Cargo.toml b/gen/tasks1-cli/Cargo.toml index 120644a7f3..d3dffd3a41 100644 --- a/gen/tasks1-cli/Cargo.toml +++ b/gen/tasks1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-tasks1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Tasks (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tasks1-cli" @@ -20,13 +20,13 @@ name = "tasks1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-tasks1] path = "../tasks1" -version = "4.0.1+20220305" +version = "5.0.2+20230122" + diff --git a/gen/tasks1-cli/README.md b/gen/tasks1-cli/README.md index cd6bab9943..06d0e25878 100644 --- a/gen/tasks1-cli/README.md +++ b/gen/tasks1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Tasks* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *Tasks* API at revision *20230122*. The CLI is at version *5.0.2*. ```bash tasks1 [options] diff --git a/gen/tasks1-cli/mkdocs.yml b/gen/tasks1-cli/mkdocs.yml index a4b226309b..d2c374bcbd 100644 --- a/gen/tasks1-cli/mkdocs.yml +++ b/gen/tasks1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Tasks v4.0.1+20220305 +site_name: Tasks v5.0.2+20230122 site_url: http://byron.github.io/google-apis-rs/google-tasks1-cli site_description: A complete library to interact with Tasks (protocol v1) @@ -7,22 +7,24 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/tasks1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['tasklists_delete.md', 'Tasklists', 'Delete'] -- ['tasklists_get.md', 'Tasklists', 'Get'] -- ['tasklists_insert.md', 'Tasklists', 'Insert'] -- ['tasklists_list.md', 'Tasklists', 'List'] -- ['tasklists_patch.md', 'Tasklists', 'Patch'] -- ['tasklists_update.md', 'Tasklists', 'Update'] -- ['tasks_clear.md', 'Tasks', 'Clear'] -- ['tasks_delete.md', 'Tasks', 'Delete'] -- ['tasks_get.md', 'Tasks', 'Get'] -- ['tasks_insert.md', 'Tasks', 'Insert'] -- ['tasks_list.md', 'Tasks', 'List'] -- ['tasks_move.md', 'Tasks', 'Move'] -- ['tasks_patch.md', 'Tasks', 'Patch'] -- ['tasks_update.md', 'Tasks', 'Update'] +nav: +- Home: 'index.md' +- 'Tasklists': + - 'Delete': 'tasklists_delete.md' + - 'Get': 'tasklists_get.md' + - 'Insert': 'tasklists_insert.md' + - 'List': 'tasklists_list.md' + - 'Patch': 'tasklists_patch.md' + - 'Update': 'tasklists_update.md' +- 'Tasks': + - 'Clear': 'tasks_clear.md' + - 'Delete': 'tasks_delete.md' + - 'Get': 'tasks_get.md' + - 'Insert': 'tasks_insert.md' + - 'List': 'tasks_list.md' + - 'Move': 'tasks_move.md' + - 'Patch': 'tasks_patch.md' + - 'Update': 'tasks_update.md' theme: readthedocs diff --git a/gen/tasks1-cli/src/client.rs b/gen/tasks1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/tasks1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/tasks1-cli/src/main.rs b/gen/tasks1-cli/src/main.rs index 3d9fd8340b..62f2205798 100644 --- a/gen/tasks1-cli/src/main.rs +++ b/gen/tasks1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_tasks1::{api, Error, oauth2}; +use google_tasks1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -247,7 +246,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -731,19 +730,19 @@ where call = call.updated_min(value.unwrap_or("")); }, "show-hidden" => { - call = call.show_hidden(arg_from_str(value.unwrap_or("false"), err, "show-hidden", "boolean")); + call = call.show_hidden( value.map(|v| arg_from_str(v, err, "show-hidden", "boolean")).unwrap_or(false)); }, "show-deleted" => { - call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); + call = call.show_deleted( value.map(|v| arg_from_str(v, err, "show-deleted", "boolean")).unwrap_or(false)); }, "show-completed" => { - call = call.show_completed(arg_from_str(value.unwrap_or("false"), err, "show-completed", "boolean")); + call = call.show_completed( value.map(|v| arg_from_str(v, err, "show-completed", "boolean")).unwrap_or(false)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "due-min" => { call = call.due_min(value.unwrap_or("")); @@ -1549,7 +1548,7 @@ async fn main() { let mut app = App::new("tasks1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230122") .about("The Google Tasks API lets you manage your tasks and task lists.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_tasks1_cli") .arg(Arg::with_name("url") diff --git a/gen/tasks1/Cargo.toml b/gen/tasks1/Cargo.toml index 0fb7c2ce76..8bb01de878 100644 --- a/gen/tasks1/Cargo.toml +++ b/gen/tasks1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-tasks1" -version = "5.0.2-beta-1+20230122" +version = "5.0.2+20230122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Tasks (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tasks1" homepage = "https://developers.google.com/tasks/" -documentation = "https://docs.rs/google-tasks1/5.0.2-beta-1+20230122" +documentation = "https://docs.rs/google-tasks1/5.0.2+20230122" license = "MIT" keywords = ["tasks", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/tasks1/README.md b/gen/tasks1/README.md index 91f0a1414a..7a02a36aab 100644 --- a/gen/tasks1/README.md +++ b/gen/tasks1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-tasks1` library allows access to all features of the *Google Tasks* service. -This documentation was generated from *Tasks* crate version *5.0.2-beta-1+20230122*, where *20230122* is the exact revision of the *tasks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Tasks* crate version *5.0.2+20230122*, where *20230122* is the exact revision of the *tasks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Tasks* *v1* API can be found at the [official documentation site](https://developers.google.com/tasks/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/TasksHub) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/TasksHub) ... * tasklists - * [*delete*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TasklistDeleteCall), [*get*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TasklistGetCall), [*insert*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TasklistInsertCall), [*list*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TasklistListCall), [*patch*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TasklistPatchCall) and [*update*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TasklistUpdateCall) -* [tasks](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::Task) - * [*clear*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskClearCall), [*delete*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskDeleteCall), [*get*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskGetCall), [*insert*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskInsertCall), [*list*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskListCall), [*move*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskMoveCall), [*patch*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskPatchCall) and [*update*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/api::TaskUpdateCall) + * [*delete*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TasklistDeleteCall), [*get*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TasklistGetCall), [*insert*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TasklistInsertCall), [*list*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TasklistListCall), [*patch*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TasklistPatchCall) and [*update*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TasklistUpdateCall) +* [tasks](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::Task) + * [*clear*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskClearCall), [*delete*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskDeleteCall), [*get*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskGetCall), [*insert*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskInsertCall), [*list*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskListCall), [*move*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskMoveCall), [*patch*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskPatchCall) and [*update*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/api::TaskUpdateCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/TasksHub)** +* **[Hub](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/TasksHub)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::CallBuilder) -* **[Resources](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::CallBuilder) +* **[Resources](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Part)** + * **[Parts](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Delegate) to the -[Method Builder](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Delegate) to the +[Method Builder](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::RequestValue) and -[decodable](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::RequestValue) and +[decodable](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-tasks1/5.0.2-beta-1+20230122/google_tasks1/client::RequestValue) are moved +* [request values](https://docs.rs/google-tasks1/5.0.2+20230122/google_tasks1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/tasks1/src/api.rs b/gen/tasks1/src/api.rs index 42f00e6924..2dd2fa0931 100644 --- a/gen/tasks1/src/api.rs +++ b/gen/tasks1/src/api.rs @@ -133,7 +133,7 @@ impl<'a, S> TasksHub { TasksHub { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://tasks.googleapis.com/".to_string(), _root_url: "https://tasks.googleapis.com/".to_string(), } @@ -147,7 +147,7 @@ impl<'a, S> TasksHub { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/tasks1/src/client.rs b/gen/tasks1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/tasks1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/tasks1/src/lib.rs b/gen/tasks1/src/lib.rs index 38db6dd74c..b354bc0a1f 100644 --- a/gen/tasks1/src/lib.rs +++ b/gen/tasks1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Tasks* crate version *5.0.2-beta-1+20230122*, where *20230122* is the exact revision of the *tasks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Tasks* crate version *5.0.2+20230122*, where *20230122* is the exact revision of the *tasks:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Tasks* *v1* API can be found at the //! [official documentation site](https://developers.google.com/tasks/). diff --git a/gen/testing1-cli/Cargo.toml b/gen/testing1-cli/Cargo.toml index ff5e402451..c13cc5fe1f 100644 --- a/gen/testing1-cli/Cargo.toml +++ b/gen/testing1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-testing1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with testing (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/testing1-cli" @@ -20,13 +20,13 @@ name = "testing1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-testing1] path = "../testing1" -version = "4.0.1+20220301" +version = "5.0.2+20230119" + diff --git a/gen/testing1-cli/README.md b/gen/testing1-cli/README.md index 0be01d2935..69e67913ab 100644 --- a/gen/testing1-cli/README.md +++ b/gen/testing1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *testing* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *testing* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash testing1 [options] diff --git a/gen/testing1-cli/mkdocs.yml b/gen/testing1-cli/mkdocs.yml index f61106abba..edb2984465 100644 --- a/gen/testing1-cli/mkdocs.yml +++ b/gen/testing1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: testing v4.0.1+20220301 +site_name: testing v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-testing1-cli site_description: A complete library to interact with testing (protocol v1) @@ -7,13 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/testing1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['application-detail-service_get-apk-details.md', 'Application Detail Service', 'Get Apk Details'] -- ['projects_test-matrices-cancel.md', 'Projects', 'Test Matrices Cancel'] -- ['projects_test-matrices-create.md', 'Projects', 'Test Matrices Create'] -- ['projects_test-matrices-get.md', 'Projects', 'Test Matrices Get'] -- ['test-environment-catalog_get.md', 'Test Environment Catalog', 'Get'] +nav: +- Home: 'index.md' +- 'Application Detail Service': + - 'Get Apk Details': 'application-detail-service_get-apk-details.md' +- 'Projects': + - 'Test Matrices Cancel': 'projects_test-matrices-cancel.md' + - 'Test Matrices Create': 'projects_test-matrices-create.md' + - 'Test Matrices Get': 'projects_test-matrices-get.md' +- 'Test Environment Catalog': + - 'Get': 'test-environment-catalog_get.md' theme: readthedocs diff --git a/gen/testing1-cli/src/client.rs b/gen/testing1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/testing1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/testing1-cli/src/main.rs b/gen/testing1-cli/src/main.rs index a38140d8c8..3463517c3b 100644 --- a/gen/testing1-cli/src/main.rs +++ b/gen/testing1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_testing1::{api, Error, oauth2}; +use google_testing1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -243,6 +242,8 @@ where "test-specification.android-robo-test.app-bundle.bundle-location.gcs-path" => Some(("testSpecification.androidRoboTest.appBundle.bundleLocation.gcsPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "test-specification.android-robo-test.app-initial-activity" => Some(("testSpecification.androidRoboTest.appInitialActivity", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "test-specification.android-robo-test.app-package-id" => Some(("testSpecification.androidRoboTest.appPackageId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "test-specification.android-robo-test.max-depth" => Some(("testSpecification.androidRoboTest.maxDepth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "test-specification.android-robo-test.max-steps" => Some(("testSpecification.androidRoboTest.maxSteps", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "test-specification.android-robo-test.robo-mode" => Some(("testSpecification.androidRoboTest.roboMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "test-specification.android-robo-test.robo-script.gcs-path" => Some(("testSpecification.androidRoboTest.roboScript.gcsPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "test-specification.android-test-loop.app-apk.gcs-path" => Some(("testSpecification.androidTestLoop.appApk.gcsPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -268,7 +269,7 @@ where "test-specification.test-timeout" => Some(("testSpecification.testTimeout", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "timestamp" => Some(("timestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["android-instrumentation-test", "android-matrix", "android-model-ids", "android-robo-test", "android-test-loop", "android-version-ids", "app-apk", "app-bundle", "app-bundle-id", "app-initial-activity", "app-ipa", "app-package-id", "bundle-location", "client-info", "directories-to-pull", "disable-performance-metrics", "disable-video-recording", "dont-autogrant-permissions", "duration-seconds", "environment-matrix", "execution-id", "fail-fast", "flaky-test-attempts", "gcs-path", "google-cloud-storage", "history-id", "invalid-matrix-details", "ios-test-loop", "ios-test-setup", "ios-xc-test", "locales", "name", "network-profile", "num-shards", "orchestrator-option", "orientations", "outcome-summary", "project-id", "result-storage", "results-url", "robo-mode", "robo-script", "scenario-labels", "scenarios", "sharding-option", "state", "systrace", "test-apk", "test-matrix-id", "test-package-id", "test-runner-class", "test-setup", "test-special-entitlements", "test-specification", "test-targets", "test-timeout", "tests-zip", "timestamp", "tool-results-execution", "tool-results-history", "uniform-sharding", "xcode-version", "xctestrun"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["android-instrumentation-test", "android-matrix", "android-model-ids", "android-robo-test", "android-test-loop", "android-version-ids", "app-apk", "app-bundle", "app-bundle-id", "app-initial-activity", "app-ipa", "app-package-id", "bundle-location", "client-info", "directories-to-pull", "disable-performance-metrics", "disable-video-recording", "dont-autogrant-permissions", "duration-seconds", "environment-matrix", "execution-id", "fail-fast", "flaky-test-attempts", "gcs-path", "google-cloud-storage", "history-id", "invalid-matrix-details", "ios-test-loop", "ios-test-setup", "ios-xc-test", "locales", "max-depth", "max-steps", "name", "network-profile", "num-shards", "orchestrator-option", "orientations", "outcome-summary", "project-id", "result-storage", "results-url", "robo-mode", "robo-script", "scenario-labels", "scenarios", "sharding-option", "state", "systrace", "test-apk", "test-matrix-id", "test-package-id", "test-runner-class", "test-setup", "test-special-entitlements", "test-specification", "test-targets", "test-timeout", "tests-zip", "timestamp", "tool-results-execution", "tool-results-history", "uniform-sharding", "xcode-version", "xctestrun"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -698,7 +699,7 @@ async fn main() { let mut app = App::new("testing1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230119") .about("Allows developers to run automated tests for their mobile applications on Google infrastructure.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_testing1_cli") .arg(Arg::with_name("url") diff --git a/gen/testing1/Cargo.toml b/gen/testing1/Cargo.toml index ab9273861e..fd7bfe1d55 100644 --- a/gen/testing1/Cargo.toml +++ b/gen/testing1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-testing1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with testing (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/testing1" homepage = "https://developers.google.com/cloud-test-lab/" -documentation = "https://docs.rs/google-testing1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-testing1/5.0.2+20230119" license = "MIT" keywords = ["testing", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/testing1/README.md b/gen/testing1/README.md index c97a1f2e3d..45dcf49227 100644 --- a/gen/testing1/README.md +++ b/gen/testing1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-testing1` library allows access to all features of the *Google testing* service. -This documentation was generated from *testing* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *testing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *testing* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *testing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *testing* *v1* API can be found at the [official documentation site](https://developers.google.com/cloud-test-lab/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/Testing) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/Testing) ... * application detail service - * [*get apk details*](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/api::ApplicationDetailServiceGetApkDetailCall) + * [*get apk details*](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/api::ApplicationDetailServiceGetApkDetailCall) * projects - * [*test matrices cancel*](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/api::ProjectTestMatriceCancelCall), [*test matrices create*](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/api::ProjectTestMatriceCreateCall) and [*test matrices get*](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/api::ProjectTestMatriceGetCall) -* [test environment catalog](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/api::TestEnvironmentCatalog) - * [*get*](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/api::TestEnvironmentCatalogGetCall) + * [*test matrices cancel*](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/api::ProjectTestMatriceCancelCall), [*test matrices create*](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/api::ProjectTestMatriceCreateCall) and [*test matrices get*](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/api::ProjectTestMatriceGetCall) +* [test environment catalog](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/api::TestEnvironmentCatalog) + * [*get*](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/api::TestEnvironmentCatalogGetCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/Testing)** +* **[Hub](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/Testing)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::CallBuilder) -* **[Resources](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::CallBuilder) +* **[Resources](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Part)** + * **[Parts](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Delegate) to the -[Method Builder](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Delegate) to the +[Method Builder](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::RequestValue) and -[decodable](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::RequestValue) and +[decodable](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-testing1/5.0.2-beta-1+20230119/google_testing1/client::RequestValue) are moved +* [request values](https://docs.rs/google-testing1/5.0.2+20230119/google_testing1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/testing1/src/api.rs b/gen/testing1/src/api.rs index 967133c2f1..c55a7e0e3b 100644 --- a/gen/testing1/src/api.rs +++ b/gen/testing1/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> Testing { Testing { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://testing.googleapis.com/".to_string(), _root_url: "https://testing.googleapis.com/".to_string(), } @@ -147,7 +147,7 @@ impl<'a, S> Testing { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/testing1/src/client.rs b/gen/testing1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/testing1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/testing1/src/lib.rs b/gen/testing1/src/lib.rs index deb2ca6933..d6d46590e8 100644 --- a/gen/testing1/src/lib.rs +++ b/gen/testing1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *testing* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *testing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *testing* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *testing:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *testing* *v1* API can be found at the //! [official documentation site](https://developers.google.com/cloud-test-lab/). diff --git a/gen/texttospeech1-cli/Cargo.toml b/gen/texttospeech1-cli/Cargo.toml index ebd3216511..ceb164a61f 100644 --- a/gen/texttospeech1-cli/Cargo.toml +++ b/gen/texttospeech1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-texttospeech1-cli" -version = "4.0.1+20220228" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Texttospeech (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/texttospeech1-cli" @@ -20,13 +20,13 @@ name = "texttospeech1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-texttospeech1] path = "../texttospeech1" -version = "4.0.1+20220228" +version = "5.0.2+20230118" + diff --git a/gen/texttospeech1-cli/README.md b/gen/texttospeech1-cli/README.md index 0128d745dd..74922ef115 100644 --- a/gen/texttospeech1-cli/README.md +++ b/gen/texttospeech1-cli/README.md @@ -25,10 +25,17 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Texttospeech* API at revision *20220228*. The CLI is at version *4.0.1*. +This documentation was generated from the *Texttospeech* API at revision *20230118*. The CLI is at version *5.0.2*. ```bash texttospeech1 [options] + operations + cancel (-r )... [-p ]... [-o ] + delete [-p ]... [-o ] + projects + locations-operations-get [-p ]... [-o ] + locations-operations-list [-p ]... [-o ] + locations-synthesize-long-audio (-r )... [-p ]... [-o ] text synthesize (-r )... [-p ]... [-o ] voices diff --git a/gen/texttospeech1-cli/mkdocs.yml b/gen/texttospeech1-cli/mkdocs.yml index 77dc5e851f..ea276e095b 100644 --- a/gen/texttospeech1-cli/mkdocs.yml +++ b/gen/texttospeech1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Texttospeech v4.0.1+20220228 +site_name: Texttospeech v5.0.2+20230118 site_url: http://byron.github.io/google-apis-rs/google-texttospeech1-cli site_description: A complete library to interact with Texttospeech (protocol v1) @@ -7,10 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/texttospeech1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['text_synthesize.md', 'Text', 'Synthesize'] -- ['voices_list.md', 'Voices', 'List'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' +- 'Projects': + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Synthesize Long Audio': 'projects_locations-synthesize-long-audio.md' +- 'Text': + - 'Synthesize': 'text_synthesize.md' +- 'Voices': + - 'List': 'voices_list.md' theme: readthedocs diff --git a/gen/texttospeech1-cli/src/client.rs b/gen/texttospeech1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/texttospeech1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/texttospeech1-cli/src/main.rs b/gen/texttospeech1-cli/src/main.rs index e42470f8bd..1157cfd0d2 100644 --- a/gen/texttospeech1-cli/src/main.rs +++ b/gen/texttospeech1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_texttospeech1::{api, Error, oauth2}; +use google_texttospeech1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -51,6 +50,354 @@ where S::Future: Send + Unpin + 'static, S::Error: Into>, { + async fn _operations_cancel(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::CancelOperationRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.operations().cancel(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _operations_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.operations().delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_operations_list(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_synthesize_long_audio(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "audio-config.audio-encoding" => Some(("audioConfig.audioEncoding", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "audio-config.effects-profile-id" => Some(("audioConfig.effectsProfileId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "audio-config.pitch" => Some(("audioConfig.pitch", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "audio-config.sample-rate-hertz" => Some(("audioConfig.sampleRateHertz", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "audio-config.speaking-rate" => Some(("audioConfig.speakingRate", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "audio-config.volume-gain-db" => Some(("audioConfig.volumeGainDb", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), + "input.ssml" => Some(("input.ssml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "input.text" => Some(("input.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "output-gcs-uri" => Some(("outputGcsUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "voice.custom-voice.model" => Some(("voice.customVoice.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "voice.custom-voice.reported-usage" => Some(("voice.customVoice.reportedUsage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "voice.language-code" => Some(("voice.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "voice.name" => Some(("voice.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "voice.ssml-gender" => Some(("voice.ssmlGender", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["audio-config", "audio-encoding", "custom-voice", "effects-profile-id", "input", "language-code", "model", "name", "output-gcs-uri", "pitch", "reported-usage", "sample-rate-hertz", "speaking-rate", "ssml", "ssml-gender", "text", "voice", "volume-gain-db"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::SynthesizeLongAudioRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_synthesize_long_audio(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _text_synthesize(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -82,11 +429,13 @@ where "audio-config.volume-gain-db" => Some(("audioConfig.volumeGainDb", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "input.ssml" => Some(("input.ssml", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "input.text" => Some(("input.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "voice.custom-voice.model" => Some(("voice.customVoice.model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "voice.custom-voice.reported-usage" => Some(("voice.customVoice.reportedUsage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "voice.language-code" => Some(("voice.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "voice.name" => Some(("voice.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "voice.ssml-gender" => Some(("voice.ssmlGender", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["audio-config", "audio-encoding", "effects-profile-id", "input", "language-code", "name", "pitch", "sample-rate-hertz", "speaking-rate", "ssml", "ssml-gender", "text", "voice", "volume-gain-db"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["audio-config", "audio-encoding", "custom-voice", "effects-profile-id", "input", "language-code", "model", "name", "pitch", "reported-usage", "sample-rate-hertz", "speaking-rate", "ssml", "ssml-gender", "text", "voice", "volume-gain-db"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -207,6 +556,37 @@ where let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option = None; match self.opt.subcommand() { + ("operations", Some(opt)) => { + match opt.subcommand() { + ("cancel", Some(opt)) => { + call_result = self._operations_cancel(opt, dry_run, &mut err).await; + }, + ("delete", Some(opt)) => { + call_result = self._operations_delete(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("operations".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, + ("projects", Some(opt)) => { + match opt.subcommand() { + ("locations-operations-get", Some(opt)) => { + call_result = self._projects_locations_operations_get(opt, dry_run, &mut err).await; + }, + ("locations-operations-list", Some(opt)) => { + call_result = self._projects_locations_operations_list(opt, dry_run, &mut err).await; + }, + ("locations-synthesize-long-audio", Some(opt)) => { + call_result = self._projects_locations_synthesize_long_audio(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("projects".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("text", Some(opt)) => { match opt.subcommand() { ("synthesize", Some(opt)) => { @@ -302,6 +682,134 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ + ("operations", "methods: 'cancel' and 'delete'", vec![ + ("cancel", + Some(r##"Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`."##), + "Details at http://byron.github.io/google-apis-rs/google_texttospeech1_cli/operations_cancel", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource to be cancelled."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("delete", + Some(r##"Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`."##), + "Details at http://byron.github.io/google-apis-rs/google_texttospeech1_cli/operations_delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource to be deleted."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + + ("projects", "methods: 'locations-operations-get', 'locations-operations-list' and 'locations-synthesize-long-audio'", vec![ + ("locations-operations-get", + Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), + "Details at http://byron.github.io/google-apis-rs/google_texttospeech1_cli/projects_locations-operations-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-operations-list", + Some(r##"Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id."##), + "Details at http://byron.github.io/google-apis-rs/google_texttospeech1_cli/projects_locations-operations-list", + vec![ + (Some(r##"name"##), + None, + Some(r##"The name of the operation's parent resource."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-synthesize-long-audio", + Some(r##"Synthesizes long form text asynchronously."##), + "Details at http://byron.github.io/google-apis-rs/google_texttospeech1_cli/projects_locations-synthesize-long-audio", + vec![ + (Some(r##"parent"##), + None, + Some(r##"The resource states of the request in the form of `projects/*/locations/*/voices/*`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("text", "methods: 'synthesize'", vec![ ("synthesize", Some(r##"Synthesizes speech synchronously: receive results after all text input has been processed."##), @@ -350,7 +858,7 @@ async fn main() { let mut app = App::new("texttospeech1") .author("Sebastian Thiel ") - .version("4.0.1+20220228") + .version("5.0.2+20230118") .about("Synthesizes natural-sounding speech by applying powerful neural network models.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_texttospeech1_cli") .arg(Arg::with_name("url") diff --git a/gen/texttospeech1/Cargo.toml b/gen/texttospeech1/Cargo.toml index 66337eaed9..e63081472b 100644 --- a/gen/texttospeech1/Cargo.toml +++ b/gen/texttospeech1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-texttospeech1" -version = "5.0.2-beta-1+20230118" +version = "5.0.2+20230118" authors = ["Sebastian Thiel "] description = "A complete library to interact with Texttospeech (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/texttospeech1" homepage = "https://cloud.google.com/text-to-speech/" -documentation = "https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118" +documentation = "https://docs.rs/google-texttospeech1/5.0.2+20230118" license = "MIT" keywords = ["texttospeech", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/texttospeech1/README.md b/gen/texttospeech1/README.md index 5849f22844..af6dbca770 100644 --- a/gen/texttospeech1/README.md +++ b/gen/texttospeech1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-texttospeech1` library allows access to all features of the *Google Texttospeech* service. -This documentation was generated from *Texttospeech* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *texttospeech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Texttospeech* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *texttospeech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Texttospeech* *v1* API can be found at the [official documentation site](https://cloud.google.com/text-to-speech/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/Texttospeech) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/Texttospeech) ... -* [operations](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::Operation) - * [*cancel*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::OperationCancelCall) and [*delete*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::OperationDeleteCall) +* [operations](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::Operation) + * [*cancel*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::OperationCancelCall) and [*delete*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::OperationDeleteCall) * projects - * [*locations operations get*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::ProjectLocationOperationListCall) and [*locations synthesize long audio*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::ProjectLocationSynthesizeLongAudioCall) + * [*locations operations get*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::ProjectLocationOperationListCall) and [*locations synthesize long audio*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::ProjectLocationSynthesizeLongAudioCall) * text - * [*synthesize*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::TextSynthesizeCall) -* [voices](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::Voice) - * [*list*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/api::VoiceListCall) + * [*synthesize*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::TextSynthesizeCall) +* [voices](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::Voice) + * [*list*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/api::VoiceListCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/Texttospeech)** +* **[Hub](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/Texttospeech)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::CallBuilder) -* **[Resources](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::CallBuilder) +* **[Resources](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Part)** + * **[Parts](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -133,17 +133,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -153,29 +153,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Delegate) to the -[Method Builder](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Delegate) to the +[Method Builder](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::RequestValue) and -[decodable](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::RequestValue) and +[decodable](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-texttospeech1/5.0.2-beta-1+20230118/google_texttospeech1/client::RequestValue) are moved +* [request values](https://docs.rs/google-texttospeech1/5.0.2+20230118/google_texttospeech1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/texttospeech1/src/api.rs b/gen/texttospeech1/src/api.rs index 26d0738988..b964596beb 100644 --- a/gen/texttospeech1/src/api.rs +++ b/gen/texttospeech1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Texttospeech { Texttospeech { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://texttospeech.googleapis.com/".to_string(), _root_url: "https://texttospeech.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> Texttospeech { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/texttospeech1/src/client.rs b/gen/texttospeech1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/texttospeech1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/texttospeech1/src/lib.rs b/gen/texttospeech1/src/lib.rs index 6c5c62de96..895149ca56 100644 --- a/gen/texttospeech1/src/lib.rs +++ b/gen/texttospeech1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Texttospeech* crate version *5.0.2-beta-1+20230118*, where *20230118* is the exact revision of the *texttospeech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Texttospeech* crate version *5.0.2+20230118*, where *20230118* is the exact revision of the *texttospeech:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Texttospeech* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/text-to-speech/). diff --git a/gen/tpu1-cli/Cargo.toml b/gen/tpu1-cli/Cargo.toml index bc942d265d..2c3a101166 100644 --- a/gen/tpu1-cli/Cargo.toml +++ b/gen/tpu1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-tpu1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with TPU (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tpu1-cli" @@ -20,13 +20,13 @@ name = "tpu1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-tpu1] path = "../tpu1" -version = "4.0.1+20220301" +version = "5.0.2+20230105" + diff --git a/gen/tpu1-cli/README.md b/gen/tpu1-cli/README.md index 90e2ab8662..304d9e4e6b 100644 --- a/gen/tpu1-cli/README.md +++ b/gen/tpu1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *TPU* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *TPU* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash tpu1 [options] diff --git a/gen/tpu1-cli/mkdocs.yml b/gen/tpu1-cli/mkdocs.yml index 85eebc6090..40324b5023 100644 --- a/gen/tpu1-cli/mkdocs.yml +++ b/gen/tpu1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: TPU v4.0.1+20220301 +site_name: TPU v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-tpu1-cli site_description: A complete library to interact with TPU (protocol v1) @@ -7,25 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/tpu1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-accelerator-types-get.md', 'Projects', 'Locations Accelerator Types Get'] -- ['projects_locations-accelerator-types-list.md', 'Projects', 'Locations Accelerator Types List'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-nodes-create.md', 'Projects', 'Locations Nodes Create'] -- ['projects_locations-nodes-delete.md', 'Projects', 'Locations Nodes Delete'] -- ['projects_locations-nodes-get.md', 'Projects', 'Locations Nodes Get'] -- ['projects_locations-nodes-list.md', 'Projects', 'Locations Nodes List'] -- ['projects_locations-nodes-reimage.md', 'Projects', 'Locations Nodes Reimage'] -- ['projects_locations-nodes-start.md', 'Projects', 'Locations Nodes Start'] -- ['projects_locations-nodes-stop.md', 'Projects', 'Locations Nodes Stop'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-tensorflow-versions-get.md', 'Projects', 'Locations Tensorflow Versions Get'] -- ['projects_locations-tensorflow-versions-list.md', 'Projects', 'Locations Tensorflow Versions List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Accelerator Types Get': 'projects_locations-accelerator-types-get.md' + - 'Locations Accelerator Types List': 'projects_locations-accelerator-types-list.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Nodes Create': 'projects_locations-nodes-create.md' + - 'Locations Nodes Delete': 'projects_locations-nodes-delete.md' + - 'Locations Nodes Get': 'projects_locations-nodes-get.md' + - 'Locations Nodes List': 'projects_locations-nodes-list.md' + - 'Locations Nodes Reimage': 'projects_locations-nodes-reimage.md' + - 'Locations Nodes Start': 'projects_locations-nodes-start.md' + - 'Locations Nodes Stop': 'projects_locations-nodes-stop.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Tensorflow Versions Get': 'projects_locations-tensorflow-versions-get.md' + - 'Locations Tensorflow Versions List': 'projects_locations-tensorflow-versions-list.md' theme: readthedocs diff --git a/gen/tpu1-cli/src/client.rs b/gen/tpu1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/tpu1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/tpu1-cli/src/main.rs b/gen/tpu1-cli/src/main.rs index a226d00b5f..3a941c9c2f 100644 --- a/gen/tpu1-cli/src/main.rs +++ b/gen/tpu1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_tpu1::{api, Error, oauth2}; +use google_tpu1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -230,7 +229,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -502,7 +501,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -970,7 +969,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1084,7 +1083,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1681,7 +1680,7 @@ async fn main() { let mut app = App::new("tpu1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230105") .about("TPU API provides customers with access to Google TPU technology.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_tpu1_cli") .arg(Arg::with_name("url") diff --git a/gen/tpu1/Cargo.toml b/gen/tpu1/Cargo.toml index 4689bc8378..c889e673f2 100644 --- a/gen/tpu1/Cargo.toml +++ b/gen/tpu1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-tpu1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with TPU (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tpu1" homepage = "https://cloud.google.com/tpu/" -documentation = "https://docs.rs/google-tpu1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-tpu1/5.0.2+20230105" license = "MIT" keywords = ["tpu", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/tpu1/README.md b/gen/tpu1/README.md index 76e25f333c..04b6eb380d 100644 --- a/gen/tpu1/README.md +++ b/gen/tpu1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-tpu1` library allows access to all features of the *Google TPU* service. -This documentation was generated from *TPU* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *tpu:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *TPU* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *tpu:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *TPU* *v1* API can be found at the [official documentation site](https://cloud.google.com/tpu/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/TPU) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/TPU) ... * projects - * [*locations accelerator types get*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationAcceleratorTypeGetCall), [*locations accelerator types list*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationAcceleratorTypeListCall), [*locations get*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationListCall), [*locations nodes create*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationNodeCreateCall), [*locations nodes delete*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationNodeDeleteCall), [*locations nodes get*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationNodeGetCall), [*locations nodes list*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationNodeListCall), [*locations nodes reimage*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationNodeReimageCall), [*locations nodes start*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationNodeStartCall), [*locations nodes stop*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationNodeStopCall), [*locations operations cancel*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationOperationListCall), [*locations tensorflow versions get*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationTensorflowVersionGetCall) and [*locations tensorflow versions list*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/api::ProjectLocationTensorflowVersionListCall) + * [*locations accelerator types get*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationAcceleratorTypeGetCall), [*locations accelerator types list*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationAcceleratorTypeListCall), [*locations get*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationListCall), [*locations nodes create*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationNodeCreateCall), [*locations nodes delete*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationNodeDeleteCall), [*locations nodes get*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationNodeGetCall), [*locations nodes list*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationNodeListCall), [*locations nodes reimage*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationNodeReimageCall), [*locations nodes start*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationNodeStartCall), [*locations nodes stop*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationNodeStopCall), [*locations operations cancel*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationOperationListCall), [*locations tensorflow versions get*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationTensorflowVersionGetCall) and [*locations tensorflow versions list*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/api::ProjectLocationTensorflowVersionListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/TPU)** +* **[Hub](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/TPU)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::CallBuilder) -* **[Resources](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::CallBuilder) +* **[Resources](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Part)** + * **[Parts](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -130,17 +130,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -150,29 +150,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Delegate) to the -[Method Builder](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Delegate) to the +[Method Builder](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::RequestValue) and -[decodable](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::RequestValue) and +[decodable](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-tpu1/5.0.2-beta-1+20230105/google_tpu1/client::RequestValue) are moved +* [request values](https://docs.rs/google-tpu1/5.0.2+20230105/google_tpu1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/tpu1/src/api.rs b/gen/tpu1/src/api.rs index 0fe20a6291..097fbcd252 100644 --- a/gen/tpu1/src/api.rs +++ b/gen/tpu1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> TPU { TPU { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://tpu.googleapis.com/".to_string(), _root_url: "https://tpu.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> TPU { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/tpu1/src/client.rs b/gen/tpu1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/tpu1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/tpu1/src/lib.rs b/gen/tpu1/src/lib.rs index 0d32242942..23942ef539 100644 --- a/gen/tpu1/src/lib.rs +++ b/gen/tpu1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *TPU* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *tpu:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *TPU* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *tpu:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *TPU* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/tpu/). diff --git a/gen/tpu1_alpha1-cli/Cargo.toml b/gen/tpu1_alpha1-cli/Cargo.toml index 4d9066b6d0..201d1f4245 100644 --- a/gen/tpu1_alpha1-cli/Cargo.toml +++ b/gen/tpu1_alpha1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-tpu1_alpha1-cli" -version = "4.0.1+20220301" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with TPU (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tpu1_alpha1-cli" @@ -20,13 +20,13 @@ name = "tpu1-alpha1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-tpu1_alpha1] path = "../tpu1_alpha1" -version = "4.0.1+20220301" +version = "5.0.2+20230105" + diff --git a/gen/tpu1_alpha1-cli/README.md b/gen/tpu1_alpha1-cli/README.md index 97d4c8acf4..45724d2349 100644 --- a/gen/tpu1_alpha1-cli/README.md +++ b/gen/tpu1_alpha1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *TPU* API at revision *20220301*. The CLI is at version *4.0.1*. +This documentation was generated from the *TPU* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash tpu1-alpha1 [options] diff --git a/gen/tpu1_alpha1-cli/mkdocs.yml b/gen/tpu1_alpha1-cli/mkdocs.yml index 458e1e14d4..6740d1bb87 100644 --- a/gen/tpu1_alpha1-cli/mkdocs.yml +++ b/gen/tpu1_alpha1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: TPU v4.0.1+20220301 +site_name: TPU v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-tpu1_alpha1-cli site_description: A complete library to interact with TPU (protocol v1alpha1) @@ -7,25 +7,26 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/tpu1_alpha1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-accelerator-types-get.md', 'Projects', 'Locations Accelerator Types Get'] -- ['projects_locations-accelerator-types-list.md', 'Projects', 'Locations Accelerator Types List'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-nodes-create.md', 'Projects', 'Locations Nodes Create'] -- ['projects_locations-nodes-delete.md', 'Projects', 'Locations Nodes Delete'] -- ['projects_locations-nodes-get.md', 'Projects', 'Locations Nodes Get'] -- ['projects_locations-nodes-list.md', 'Projects', 'Locations Nodes List'] -- ['projects_locations-nodes-reimage.md', 'Projects', 'Locations Nodes Reimage'] -- ['projects_locations-nodes-start.md', 'Projects', 'Locations Nodes Start'] -- ['projects_locations-nodes-stop.md', 'Projects', 'Locations Nodes Stop'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-tensorflow-versions-get.md', 'Projects', 'Locations Tensorflow Versions Get'] -- ['projects_locations-tensorflow-versions-list.md', 'Projects', 'Locations Tensorflow Versions List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Accelerator Types Get': 'projects_locations-accelerator-types-get.md' + - 'Locations Accelerator Types List': 'projects_locations-accelerator-types-list.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Nodes Create': 'projects_locations-nodes-create.md' + - 'Locations Nodes Delete': 'projects_locations-nodes-delete.md' + - 'Locations Nodes Get': 'projects_locations-nodes-get.md' + - 'Locations Nodes List': 'projects_locations-nodes-list.md' + - 'Locations Nodes Reimage': 'projects_locations-nodes-reimage.md' + - 'Locations Nodes Start': 'projects_locations-nodes-start.md' + - 'Locations Nodes Stop': 'projects_locations-nodes-stop.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Tensorflow Versions Get': 'projects_locations-tensorflow-versions-get.md' + - 'Locations Tensorflow Versions List': 'projects_locations-tensorflow-versions-list.md' theme: readthedocs diff --git a/gen/tpu1_alpha1-cli/src/client.rs b/gen/tpu1_alpha1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/tpu1_alpha1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/tpu1_alpha1-cli/src/main.rs b/gen/tpu1_alpha1-cli/src/main.rs index c3844da1cd..5cd77d70af 100644 --- a/gen/tpu1_alpha1-cli/src/main.rs +++ b/gen/tpu1_alpha1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_tpu1_alpha1::{api, Error, oauth2}; +use google_tpu1_alpha1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -230,7 +229,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -509,7 +508,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -977,7 +976,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1091,7 +1090,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1688,7 +1687,7 @@ async fn main() { let mut app = App::new("tpu1-alpha1") .author("Sebastian Thiel ") - .version("4.0.1+20220301") + .version("5.0.2+20230105") .about("TPU API provides customers with access to Google TPU technology.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_tpu1_alpha1_cli") .arg(Arg::with_name("url") diff --git a/gen/tpu1_alpha1/Cargo.toml b/gen/tpu1_alpha1/Cargo.toml index 7fb9b65463..70918c5947 100644 --- a/gen/tpu1_alpha1/Cargo.toml +++ b/gen/tpu1_alpha1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-tpu1_alpha1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with TPU (protocol v1alpha1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/tpu1_alpha1" homepage = "https://cloud.google.com/tpu/" -documentation = "https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-tpu1_alpha1/5.0.2+20230105" license = "MIT" keywords = ["tpu", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/tpu1_alpha1/README.md b/gen/tpu1_alpha1/README.md index 1fa78095e7..b5696f5bdc 100644 --- a/gen/tpu1_alpha1/README.md +++ b/gen/tpu1_alpha1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-tpu1_alpha1` library allows access to all features of the *Google TPU* service. -This documentation was generated from *TPU* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *tpu:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *TPU* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *tpu:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *TPU* *v1_alpha1* API can be found at the [official documentation site](https://cloud.google.com/tpu/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/TPU) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/TPU) ... * projects - * [*locations accelerator types get*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationAcceleratorTypeGetCall), [*locations accelerator types list*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationAcceleratorTypeListCall), [*locations get*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationListCall), [*locations nodes create*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationNodeCreateCall), [*locations nodes delete*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationNodeDeleteCall), [*locations nodes get*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationNodeGetCall), [*locations nodes list*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationNodeListCall), [*locations nodes reimage*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationNodeReimageCall), [*locations nodes start*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationNodeStartCall), [*locations nodes stop*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationNodeStopCall), [*locations operations cancel*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationOperationListCall), [*locations tensorflow versions get*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationTensorflowVersionGetCall) and [*locations tensorflow versions list*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/api::ProjectLocationTensorflowVersionListCall) + * [*locations accelerator types get*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationAcceleratorTypeGetCall), [*locations accelerator types list*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationAcceleratorTypeListCall), [*locations get*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationListCall), [*locations nodes create*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationNodeCreateCall), [*locations nodes delete*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationNodeDeleteCall), [*locations nodes get*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationNodeGetCall), [*locations nodes list*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationNodeListCall), [*locations nodes reimage*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationNodeReimageCall), [*locations nodes start*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationNodeStartCall), [*locations nodes stop*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationNodeStopCall), [*locations operations cancel*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationOperationListCall), [*locations tensorflow versions get*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationTensorflowVersionGetCall) and [*locations tensorflow versions list*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/api::ProjectLocationTensorflowVersionListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/TPU)** +* **[Hub](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/TPU)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::CallBuilder) -* **[Resources](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::CallBuilder) +* **[Resources](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Part)** + * **[Parts](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Delegate) to the -[Method Builder](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Delegate) to the +[Method Builder](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::RequestValue) and -[decodable](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::RequestValue) and +[decodable](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-tpu1_alpha1/5.0.2-beta-1+20230105/google_tpu1_alpha1/client::RequestValue) are moved +* [request values](https://docs.rs/google-tpu1_alpha1/5.0.2+20230105/google_tpu1_alpha1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/tpu1_alpha1/src/api.rs b/gen/tpu1_alpha1/src/api.rs index b540ed1c97..cf576f0f22 100644 --- a/gen/tpu1_alpha1/src/api.rs +++ b/gen/tpu1_alpha1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> TPU { TPU { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://tpu.googleapis.com/".to_string(), _root_url: "https://tpu.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> TPU { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/tpu1_alpha1/src/client.rs b/gen/tpu1_alpha1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/tpu1_alpha1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/tpu1_alpha1/src/lib.rs b/gen/tpu1_alpha1/src/lib.rs index 04d463f4f0..911e8a0bdd 100644 --- a/gen/tpu1_alpha1/src/lib.rs +++ b/gen/tpu1_alpha1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *TPU* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *tpu:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *TPU* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *tpu:v1alpha1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *TPU* *v1_alpha1* API can be found at the //! [official documentation site](https://cloud.google.com/tpu/). diff --git a/gen/transcoder1-cli/Cargo.toml b/gen/transcoder1-cli/Cargo.toml index 176d3c9413..4d7e10284e 100644 --- a/gen/transcoder1-cli/Cargo.toml +++ b/gen/transcoder1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-transcoder1-cli" -version = "4.0.1+20220201" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Transcoder (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/transcoder1-cli" @@ -20,13 +20,13 @@ name = "transcoder1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-transcoder1] path = "../transcoder1" -version = "4.0.1+20220201" +version = "5.0.2+20230105" + diff --git a/gen/transcoder1-cli/README.md b/gen/transcoder1-cli/README.md index 74077d9274..0362f1ac8e 100644 --- a/gen/transcoder1-cli/README.md +++ b/gen/transcoder1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Transcoder* API at revision *20220201*. The CLI is at version *4.0.1*. +This documentation was generated from the *Transcoder* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash transcoder1 [options] diff --git a/gen/transcoder1-cli/mkdocs.yml b/gen/transcoder1-cli/mkdocs.yml index 8364e4f7af..26b400d58b 100644 --- a/gen/transcoder1-cli/mkdocs.yml +++ b/gen/transcoder1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Transcoder v4.0.1+20220201 +site_name: Transcoder v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-transcoder1-cli site_description: A complete library to interact with Transcoder (protocol v1) @@ -7,16 +7,17 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/transcoder1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-job-templates-create.md', 'Projects', 'Locations Job Templates Create'] -- ['projects_locations-job-templates-delete.md', 'Projects', 'Locations Job Templates Delete'] -- ['projects_locations-job-templates-get.md', 'Projects', 'Locations Job Templates Get'] -- ['projects_locations-job-templates-list.md', 'Projects', 'Locations Job Templates List'] -- ['projects_locations-jobs-create.md', 'Projects', 'Locations Jobs Create'] -- ['projects_locations-jobs-delete.md', 'Projects', 'Locations Jobs Delete'] -- ['projects_locations-jobs-get.md', 'Projects', 'Locations Jobs Get'] -- ['projects_locations-jobs-list.md', 'Projects', 'Locations Jobs List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Job Templates Create': 'projects_locations-job-templates-create.md' + - 'Locations Job Templates Delete': 'projects_locations-job-templates-delete.md' + - 'Locations Job Templates Get': 'projects_locations-job-templates-get.md' + - 'Locations Job Templates List': 'projects_locations-job-templates-list.md' + - 'Locations Jobs Create': 'projects_locations-jobs-create.md' + - 'Locations Jobs Delete': 'projects_locations-jobs-delete.md' + - 'Locations Jobs Get': 'projects_locations-jobs-get.md' + - 'Locations Jobs List': 'projects_locations-jobs-list.md' theme: readthedocs diff --git a/gen/transcoder1-cli/src/client.rs b/gen/transcoder1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/transcoder1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/transcoder1-cli/src/main.rs b/gen/transcoder1-cli/src/main.rs index c693a124ee..1b76bdd3be 100644 --- a/gen/transcoder1-cli/src/main.rs +++ b/gen/transcoder1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_transcoder1::{api, Error, oauth2}; +use google_transcoder1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -76,9 +75,10 @@ where match &temp_cursor.to_string()[..] { "config.output.uri" => Some(("config.output.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "config.pubsub-destination.topic" => Some(("config.pubsubDestination.topic", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "name", "output", "pubsub-destination", "topic", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["config", "labels", "name", "output", "pubsub-destination", "topic", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -149,7 +149,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -260,7 +260,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -345,6 +345,7 @@ where "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "error.message" => Some(("error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "input-uri" => Some(("inputUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "output-uri" => Some(("outputUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "start-time" => Some(("startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -352,7 +353,7 @@ where "template-id" => Some(("templateId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ttl-after-completion-days" => Some(("ttlAfterCompletionDays", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "config", "create-time", "end-time", "error", "input-uri", "message", "name", "output", "output-uri", "pubsub-destination", "start-time", "state", "template-id", "topic", "ttl-after-completion-days", "uri"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "config", "create-time", "end-time", "error", "input-uri", "labels", "message", "name", "output", "output-uri", "pubsub-destination", "start-time", "state", "template-id", "topic", "ttl-after-completion-days", "uri"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -419,7 +420,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "allow-missing" => { - call = call.allow_missing(arg_from_str(value.unwrap_or("false"), err, "allow-missing", "boolean")); + call = call.allow_missing( value.map(|v| arg_from_str(v, err, "allow-missing", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -530,7 +531,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -890,8 +891,8 @@ async fn main() { let mut app = App::new("transcoder1") .author("Sebastian Thiel ") - .version("4.0.1+20220201") - .about("This API converts video files into formats suitable for consumer distribution. ") + .version("5.0.2+20230105") + .about("This API converts video files into formats suitable for consumer distribution. For more information, see the Transcoder API overview. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_transcoder1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/transcoder1/Cargo.toml b/gen/transcoder1/Cargo.toml index 6363f93fa6..a588e0ee70 100644 --- a/gen/transcoder1/Cargo.toml +++ b/gen/transcoder1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-transcoder1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Transcoder (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/transcoder1" homepage = "https://cloud.google.com/transcoder/docs/" -documentation = "https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-transcoder1/5.0.2+20230105" license = "MIT" keywords = ["transcoder", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/transcoder1/README.md b/gen/transcoder1/README.md index 9ec6ca3fa2..4cf039fea2 100644 --- a/gen/transcoder1/README.md +++ b/gen/transcoder1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-transcoder1` library allows access to all features of the *Google Transcoder* service. -This documentation was generated from *Transcoder* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *transcoder:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Transcoder* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *transcoder:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Transcoder* *v1* API can be found at the [official documentation site](https://cloud.google.com/transcoder/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/Transcoder) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/Transcoder) ... * projects - * [*locations job templates create*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobTemplateCreateCall), [*locations job templates delete*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobTemplateDeleteCall), [*locations job templates get*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobTemplateGetCall), [*locations job templates list*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobTemplateListCall), [*locations jobs create*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobGetCall) and [*locations jobs list*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/api::ProjectLocationJobListCall) + * [*locations job templates create*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobTemplateCreateCall), [*locations job templates delete*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobTemplateDeleteCall), [*locations job templates get*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobTemplateGetCall), [*locations job templates list*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobTemplateListCall), [*locations jobs create*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobGetCall) and [*locations jobs list*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/api::ProjectLocationJobListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/Transcoder)** +* **[Hub](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/Transcoder)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::CallBuilder) -* **[Resources](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::CallBuilder) +* **[Resources](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Part)** + * **[Parts](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Delegate) to the -[Method Builder](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Delegate) to the +[Method Builder](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::RequestValue) and -[decodable](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::RequestValue) and +[decodable](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-transcoder1/5.0.2-beta-1+20230105/google_transcoder1/client::RequestValue) are moved +* [request values](https://docs.rs/google-transcoder1/5.0.2+20230105/google_transcoder1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/transcoder1/src/api.rs b/gen/transcoder1/src/api.rs index f3080fd6f8..65601038c5 100644 --- a/gen/transcoder1/src/api.rs +++ b/gen/transcoder1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Transcoder { Transcoder { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://transcoder.googleapis.com/".to_string(), _root_url: "https://transcoder.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Transcoder { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/transcoder1/src/client.rs b/gen/transcoder1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/transcoder1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/transcoder1/src/lib.rs b/gen/transcoder1/src/lib.rs index f2b00b6352..03ceadd805 100644 --- a/gen/transcoder1/src/lib.rs +++ b/gen/transcoder1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Transcoder* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *transcoder:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Transcoder* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *transcoder:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Transcoder* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/transcoder/docs/). diff --git a/gen/transcoder1_beta1-cli/Cargo.toml b/gen/transcoder1_beta1-cli/Cargo.toml index a80632c178..1bafe78d5b 100644 --- a/gen/transcoder1_beta1-cli/Cargo.toml +++ b/gen/transcoder1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-transcoder1_beta1-cli" -version = "4.0.1+20210323" +version = "5.0.2+20210323" authors = ["Sebastian Thiel "] description = "A complete library to interact with Transcoder (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/transcoder1_beta1-cli" @@ -20,13 +20,13 @@ name = "transcoder1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-transcoder1_beta1] path = "../transcoder1_beta1" -version = "4.0.1+20210323" +version = "5.0.2+20210323" + diff --git a/gen/transcoder1_beta1-cli/README.md b/gen/transcoder1_beta1-cli/README.md index 912b3e8e40..26bec981d6 100644 --- a/gen/transcoder1_beta1-cli/README.md +++ b/gen/transcoder1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Transcoder* API at revision *20210323*. The CLI is at version *4.0.1*. +This documentation was generated from the *Transcoder* API at revision *20210323*. The CLI is at version *5.0.2*. ```bash transcoder1-beta1 [options] diff --git a/gen/transcoder1_beta1-cli/mkdocs.yml b/gen/transcoder1_beta1-cli/mkdocs.yml index 38cebfc1e6..9ece9a70bb 100644 --- a/gen/transcoder1_beta1-cli/mkdocs.yml +++ b/gen/transcoder1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Transcoder v4.0.1+20210323 +site_name: Transcoder v5.0.2+20210323 site_url: http://byron.github.io/google-apis-rs/google-transcoder1_beta1-cli site_description: A complete library to interact with Transcoder (protocol v1beta1) @@ -7,16 +7,17 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/transcoder1_beta docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-job-templates-create.md', 'Projects', 'Locations Job Templates Create'] -- ['projects_locations-job-templates-delete.md', 'Projects', 'Locations Job Templates Delete'] -- ['projects_locations-job-templates-get.md', 'Projects', 'Locations Job Templates Get'] -- ['projects_locations-job-templates-list.md', 'Projects', 'Locations Job Templates List'] -- ['projects_locations-jobs-create.md', 'Projects', 'Locations Jobs Create'] -- ['projects_locations-jobs-delete.md', 'Projects', 'Locations Jobs Delete'] -- ['projects_locations-jobs-get.md', 'Projects', 'Locations Jobs Get'] -- ['projects_locations-jobs-list.md', 'Projects', 'Locations Jobs List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Job Templates Create': 'projects_locations-job-templates-create.md' + - 'Locations Job Templates Delete': 'projects_locations-job-templates-delete.md' + - 'Locations Job Templates Get': 'projects_locations-job-templates-get.md' + - 'Locations Job Templates List': 'projects_locations-job-templates-list.md' + - 'Locations Jobs Create': 'projects_locations-jobs-create.md' + - 'Locations Jobs Delete': 'projects_locations-jobs-delete.md' + - 'Locations Jobs Get': 'projects_locations-jobs-get.md' + - 'Locations Jobs List': 'projects_locations-jobs-list.md' theme: readthedocs diff --git a/gen/transcoder1_beta1-cli/src/client.rs b/gen/transcoder1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/transcoder1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/transcoder1_beta1-cli/src/main.rs b/gen/transcoder1_beta1-cli/src/main.rs index fa264b157f..4c0417c02e 100644 --- a/gen/transcoder1_beta1-cli/src/main.rs +++ b/gen/transcoder1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_transcoder1_beta1::{api, Error, oauth2}; +use google_transcoder1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -256,7 +255,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -522,7 +521,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -876,7 +875,7 @@ async fn main() { let mut app = App::new("transcoder1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20210323") + .version("5.0.2+20210323") .about("This API converts video files into formats suitable for consumer distribution. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_transcoder1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/transcoder1_beta1/Cargo.toml b/gen/transcoder1_beta1/Cargo.toml index 955058bf75..05266f291c 100644 --- a/gen/transcoder1_beta1/Cargo.toml +++ b/gen/transcoder1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-transcoder1_beta1" -version = "5.0.2-beta-1+20210323" +version = "5.0.2+20210323" authors = ["Sebastian Thiel "] description = "A complete library to interact with Transcoder (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/transcoder1_beta1" homepage = "https://cloud.google.com/transcoder/docs/" -documentation = "https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323" +documentation = "https://docs.rs/google-transcoder1_beta1/5.0.2+20210323" license = "MIT" keywords = ["transcoder", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/transcoder1_beta1/README.md b/gen/transcoder1_beta1/README.md index 688a665cbe..682aed13b3 100644 --- a/gen/transcoder1_beta1/README.md +++ b/gen/transcoder1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-transcoder1_beta1` library allows access to all features of the *Google Transcoder* service. -This documentation was generated from *Transcoder* crate version *5.0.2-beta-1+20210323*, where *20210323* is the exact revision of the *transcoder:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Transcoder* crate version *5.0.2+20210323*, where *20210323* is the exact revision of the *transcoder:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Transcoder* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/transcoder/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/Transcoder) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/Transcoder) ... * projects - * [*locations job templates create*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateCreateCall), [*locations job templates delete*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateDeleteCall), [*locations job templates get*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateGetCall), [*locations job templates list*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateListCall), [*locations jobs create*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobGetCall) and [*locations jobs list*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/api::ProjectLocationJobListCall) + * [*locations job templates create*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateCreateCall), [*locations job templates delete*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateDeleteCall), [*locations job templates get*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateGetCall), [*locations job templates list*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobTemplateListCall), [*locations jobs create*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobCreateCall), [*locations jobs delete*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobDeleteCall), [*locations jobs get*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobGetCall) and [*locations jobs list*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/api::ProjectLocationJobListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/Transcoder)** +* **[Hub](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/Transcoder)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-transcoder1_beta1/5.0.2-beta-1+20210323/google_transcoder1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-transcoder1_beta1/5.0.2+20210323/google_transcoder1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/transcoder1_beta1/src/api.rs b/gen/transcoder1_beta1/src/api.rs index d645044a68..26e59099cb 100644 --- a/gen/transcoder1_beta1/src/api.rs +++ b/gen/transcoder1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Transcoder { Transcoder { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://transcoder.googleapis.com/".to_string(), _root_url: "https://transcoder.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Transcoder { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/transcoder1_beta1/src/client.rs b/gen/transcoder1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/transcoder1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/transcoder1_beta1/src/lib.rs b/gen/transcoder1_beta1/src/lib.rs index 52feafa359..0d2bfa03e3 100644 --- a/gen/transcoder1_beta1/src/lib.rs +++ b/gen/transcoder1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Transcoder* crate version *5.0.2-beta-1+20210323*, where *20210323* is the exact revision of the *transcoder:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Transcoder* crate version *5.0.2+20210323*, where *20210323* is the exact revision of the *transcoder:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Transcoder* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/transcoder/docs/). diff --git a/gen/translate2-cli/Cargo.toml b/gen/translate2-cli/Cargo.toml index de49199de0..0e45e80303 100644 --- a/gen/translate2-cli/Cargo.toml +++ b/gen/translate2-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-translate2-cli" -version = "4.0.1+20170525" +version = "5.0.2+20170525" authors = ["Sebastian Thiel "] description = "A complete library to interact with Translate (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/translate2-cli" @@ -20,13 +20,13 @@ name = "translate2" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-translate2] path = "../translate2" -version = "4.0.1+20170525" +version = "5.0.2+20170525" + diff --git a/gen/translate2-cli/README.md b/gen/translate2-cli/README.md index 7e62eb216b..2237829c9c 100644 --- a/gen/translate2-cli/README.md +++ b/gen/translate2-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Translate* API at revision *20170525*. The CLI is at version *4.0.1*. +This documentation was generated from the *Translate* API at revision *20170525*. The CLI is at version *5.0.2*. ```bash translate2 [options] diff --git a/gen/translate2-cli/mkdocs.yml b/gen/translate2-cli/mkdocs.yml index b33b11cd4f..bd9c83bc8d 100644 --- a/gen/translate2-cli/mkdocs.yml +++ b/gen/translate2-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Translate v4.0.1+20170525 +site_name: Translate v5.0.2+20170525 site_url: http://byron.github.io/google-apis-rs/google-translate2-cli site_description: A complete library to interact with Translate (protocol v2) @@ -7,13 +7,16 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/translate2-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['detections_detect.md', 'Detections', 'Detect'] -- ['detections_list.md', 'Detections', 'List'] -- ['languages_list.md', 'Languages', 'List'] -- ['translations_list.md', 'Translations', 'List'] -- ['translations_translate.md', 'Translations', 'Translate'] +nav: +- Home: 'index.md' +- 'Detections': + - 'Detect': 'detections_detect.md' + - 'List': 'detections_list.md' +- 'Languages': + - 'List': 'languages_list.md' +- 'Translations': + - 'List': 'translations_list.md' + - 'Translate': 'translations_translate.md' theme: readthedocs diff --git a/gen/translate2-cli/src/client.rs b/gen/translate2-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/translate2-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/translate2-cli/src/main.rs b/gen/translate2-cli/src/main.rs index b5f28ab34c..536174a2f7 100644 --- a/gen/translate2-cli/src/main.rs +++ b/gen/translate2-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_translate2::{api, Error, oauth2}; +use google_translate2::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -645,7 +644,7 @@ async fn main() { let mut app = App::new("translate2") .author("Sebastian Thiel ") - .version("4.0.1+20170525") + .version("5.0.2+20170525") .about("The Google Cloud Translation API lets websites and programs integrate with Google Translate programmatically.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_translate2_cli") diff --git a/gen/translate2/Cargo.toml b/gen/translate2/Cargo.toml index e6fae498c4..fc61a0bae1 100644 --- a/gen/translate2/Cargo.toml +++ b/gen/translate2/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-translate2" -version = "5.0.2-beta-1+20170525" +version = "5.0.2+20170525" authors = ["Sebastian Thiel "] description = "A complete library to interact with Translate (protocol v2)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/translate2" homepage = "https://code.google.com/apis/language/translate/v2/getting_started.html" -documentation = "https://docs.rs/google-translate2/5.0.2-beta-1+20170525" +documentation = "https://docs.rs/google-translate2/5.0.2+20170525" license = "MIT" keywords = ["translate", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/translate2/README.md b/gen/translate2/README.md index 7d01ed6a63..026e235766 100644 --- a/gen/translate2/README.md +++ b/gen/translate2/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-translate2` library allows access to all features of the *Google Translate* service. -This documentation was generated from *Translate* crate version *5.0.2-beta-1+20170525*, where *20170525* is the exact revision of the *translate:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Translate* crate version *5.0.2+20170525*, where *20170525* is the exact revision of the *translate:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Translate* *v2* API can be found at the [official documentation site](https://code.google.com/apis/language/translate/v2/getting_started.html). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/Translate) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/Translate) ... * detections - * [*detect*](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/api::DetectionDetectCall) and [*list*](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/api::DetectionListCall) + * [*detect*](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/api::DetectionDetectCall) and [*list*](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/api::DetectionListCall) * languages - * [*list*](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/api::LanguageListCall) + * [*list*](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/api::LanguageListCall) * translations - * [*list*](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/api::TranslationListCall) and [*translate*](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/api::TranslationTranslateCall) + * [*list*](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/api::TranslationListCall) and [*translate*](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/api::TranslationTranslateCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/Translate)** +* **[Hub](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/Translate)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::CallBuilder) -* **[Resources](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::CallBuilder) +* **[Resources](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Part)** + * **[Parts](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::CallBuilder)** +* **[Activities](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -129,17 +129,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -149,29 +149,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Delegate) to the -[Method Builder](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Delegate) to the +[Method Builder](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::RequestValue) and -[decodable](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::RequestValue) and +[decodable](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-translate2/5.0.2-beta-1+20170525/google_translate2/client::RequestValue) are moved +* [request values](https://docs.rs/google-translate2/5.0.2+20170525/google_translate2/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/translate2/src/api.rs b/gen/translate2/src/api.rs index c5dabb315f..0e30894293 100644 --- a/gen/translate2/src/api.rs +++ b/gen/translate2/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Translate { Translate { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://translation.googleapis.com/language/translate/".to_string(), _root_url: "https://translation.googleapis.com/".to_string(), } @@ -146,7 +146,7 @@ impl<'a, S> Translate { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/translate2/src/client.rs b/gen/translate2/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/translate2/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/translate2/src/lib.rs b/gen/translate2/src/lib.rs index f38ca71e77..ba7f38ff9a 100644 --- a/gen/translate2/src/lib.rs +++ b/gen/translate2/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Translate* crate version *5.0.2-beta-1+20170525*, where *20170525* is the exact revision of the *translate:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Translate* crate version *5.0.2+20170525*, where *20170525* is the exact revision of the *translate:v2* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Translate* *v2* API can be found at the //! [official documentation site](https://code.google.com/apis/language/translate/v2/getting_started.html). diff --git a/gen/translate3-cli/Cargo.toml b/gen/translate3-cli/Cargo.toml index e464c46498..dd8bf4cd40 100644 --- a/gen/translate3-cli/Cargo.toml +++ b/gen/translate3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-translate3-cli" -version = "4.0.1+20220121" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Translate (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/translate3-cli" @@ -20,13 +20,13 @@ name = "translate3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-translate3] path = "../translate3" -version = "4.0.1+20220121" +version = "5.0.2+20230106" + diff --git a/gen/translate3-cli/README.md b/gen/translate3-cli/README.md index e73202f3dd..aa6fd471cd 100644 --- a/gen/translate3-cli/README.md +++ b/gen/translate3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Translate* API at revision *20220121*. The CLI is at version *4.0.1*. +This documentation was generated from the *Translate* API at revision *20230106*. The CLI is at version *5.0.2*. ```bash translate3 [options] @@ -34,14 +34,31 @@ translate3 [options] get-supported-languages [-p ]... [-o ] locations-batch-translate-document (-r )... [-p ]... [-o ] locations-batch-translate-text (-r )... [-p ]... [-o ] + locations-datasets-create (-r )... [-p ]... [-o ] + locations-datasets-delete [-p ]... [-o ] + locations-datasets-examples-list [-p ]... [-o ] + locations-datasets-export-data (-r )... [-p ]... [-o ] + locations-datasets-get [-p ]... [-o ] + locations-datasets-import-data (-r )... [-p ]... [-o ] + locations-datasets-list [-p ]... [-o ] locations-detect-language (-r )... [-p ]... [-o ] locations-get [-p ]... [-o ] locations-get-supported-languages [-p ]... [-o ] locations-glossaries-create (-r )... [-p ]... [-o ] locations-glossaries-delete [-p ]... [-o ] locations-glossaries-get [-p ]... [-o ] + locations-glossaries-glossary-entries-create (-r )... [-p ]... [-o ] + locations-glossaries-glossary-entries-delete [-p ]... [-o ] + locations-glossaries-glossary-entries-get [-p ]... [-o ] + locations-glossaries-glossary-entries-list [-p ]... [-o ] + locations-glossaries-glossary-entries-patch (-r )... [-p ]... [-o ] locations-glossaries-list [-p ]... [-o ] + locations-glossaries-patch (-r )... [-p ]... [-o ] locations-list [-p ]... [-o ] + locations-models-create (-r )... [-p ]... [-o ] + locations-models-delete [-p ]... [-o ] + locations-models-get [-p ]... [-o ] + locations-models-list [-p ]... [-o ] locations-operations-cancel (-r )... [-p ]... [-o ] locations-operations-delete [-p ]... [-o ] locations-operations-get [-p ]... [-o ] diff --git a/gen/translate3-cli/mkdocs.yml b/gen/translate3-cli/mkdocs.yml index 537096538b..aade4ee32b 100644 --- a/gen/translate3-cli/mkdocs.yml +++ b/gen/translate3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Translate v4.0.1+20220121 +site_name: Translate v5.0.2+20230106 site_url: http://byron.github.io/google-apis-rs/google-translate3-cli site_description: A complete library to interact with Translate (protocol v3) @@ -7,28 +7,46 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/translate3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_detect-language.md', 'Projects', 'Detect Language'] -- ['projects_get-supported-languages.md', 'Projects', 'Get Supported Languages'] -- ['projects_locations-batch-translate-document.md', 'Projects', 'Locations Batch Translate Document'] -- ['projects_locations-batch-translate-text.md', 'Projects', 'Locations Batch Translate Text'] -- ['projects_locations-detect-language.md', 'Projects', 'Locations Detect Language'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-get-supported-languages.md', 'Projects', 'Locations Get Supported Languages'] -- ['projects_locations-glossaries-create.md', 'Projects', 'Locations Glossaries Create'] -- ['projects_locations-glossaries-delete.md', 'Projects', 'Locations Glossaries Delete'] -- ['projects_locations-glossaries-get.md', 'Projects', 'Locations Glossaries Get'] -- ['projects_locations-glossaries-list.md', 'Projects', 'Locations Glossaries List'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-operations-wait.md', 'Projects', 'Locations Operations Wait'] -- ['projects_locations-translate-document.md', 'Projects', 'Locations Translate Document'] -- ['projects_locations-translate-text.md', 'Projects', 'Locations Translate Text'] -- ['projects_translate-text.md', 'Projects', 'Translate Text'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Detect Language': 'projects_detect-language.md' + - 'Get Supported Languages': 'projects_get-supported-languages.md' + - 'Locations Batch Translate Document': 'projects_locations-batch-translate-document.md' + - 'Locations Batch Translate Text': 'projects_locations-batch-translate-text.md' + - 'Locations Datasets Create': 'projects_locations-datasets-create.md' + - 'Locations Datasets Delete': 'projects_locations-datasets-delete.md' + - 'Locations Datasets Examples List': 'projects_locations-datasets-examples-list.md' + - 'Locations Datasets Export Data': 'projects_locations-datasets-export-data.md' + - 'Locations Datasets Get': 'projects_locations-datasets-get.md' + - 'Locations Datasets Import Data': 'projects_locations-datasets-import-data.md' + - 'Locations Datasets List': 'projects_locations-datasets-list.md' + - 'Locations Detect Language': 'projects_locations-detect-language.md' + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Get Supported Languages': 'projects_locations-get-supported-languages.md' + - 'Locations Glossaries Create': 'projects_locations-glossaries-create.md' + - 'Locations Glossaries Delete': 'projects_locations-glossaries-delete.md' + - 'Locations Glossaries Get': 'projects_locations-glossaries-get.md' + - 'Locations Glossaries Glossary Entries Create': 'projects_locations-glossaries-glossary-entries-create.md' + - 'Locations Glossaries Glossary Entries Delete': 'projects_locations-glossaries-glossary-entries-delete.md' + - 'Locations Glossaries Glossary Entries Get': 'projects_locations-glossaries-glossary-entries-get.md' + - 'Locations Glossaries Glossary Entries List': 'projects_locations-glossaries-glossary-entries-list.md' + - 'Locations Glossaries Glossary Entries Patch': 'projects_locations-glossaries-glossary-entries-patch.md' + - 'Locations Glossaries List': 'projects_locations-glossaries-list.md' + - 'Locations Glossaries Patch': 'projects_locations-glossaries-patch.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Models Create': 'projects_locations-models-create.md' + - 'Locations Models Delete': 'projects_locations-models-delete.md' + - 'Locations Models Get': 'projects_locations-models-get.md' + - 'Locations Models List': 'projects_locations-models-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Operations Wait': 'projects_locations-operations-wait.md' + - 'Locations Translate Document': 'projects_locations-translate-document.md' + - 'Locations Translate Text': 'projects_locations-translate-text.md' + - 'Translate Text': 'projects_translate-text.md' theme: readthedocs diff --git a/gen/translate3-cli/src/client.rs b/gen/translate3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/translate3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/translate3-cli/src/main.rs b/gen/translate3-cli/src/main.rs index 41e6f0a734..ff3df95e09 100644 --- a/gen/translate3-cli/src/main.rs +++ b/gen/translate3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_translate3::{api, Error, oauth2}; +use google_translate3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -221,13 +220,14 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "customized-attribution" => Some(("customizedAttribution", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "format-conversions" => Some(("formatConversions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "models" => Some(("models", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "output-config.gcs-destination.output-uri-prefix" => Some(("outputConfig.gcsDestination.outputUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-language-code" => Some(("sourceLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target-language-codes" => Some(("targetLanguageCodes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["format-conversions", "gcs-destination", "models", "output-config", "output-uri-prefix", "source-language-code", "target-language-codes"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["customized-attribution", "format-conversions", "gcs-destination", "models", "output-config", "output-uri-prefix", "source-language-code", "target-language-codes"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -376,6 +376,494 @@ where } } + async fn _projects_locations_datasets_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "example-count" => Some(("exampleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-language-code" => Some(("sourceLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-language-code" => Some(("targetLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "test-example-count" => Some(("testExampleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "train-example-count" => Some(("trainExampleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "validate-example-count" => Some(("validateExampleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "example-count", "name", "source-language-code", "target-language-code", "test-example-count", "train-example-count", "update-time", "validate-example-count"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Dataset = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_datasets_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_datasets_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_datasets_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_datasets_examples_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_datasets_examples_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_datasets_export_data(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "output-config.gcs-destination.output-uri-prefix" => Some(("outputConfig.gcsDestination.outputUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["gcs-destination", "output-config", "output-uri-prefix"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ExportDataRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_datasets_export_data(request, opt.value_of("dataset").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_datasets_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_datasets_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_datasets_import_data(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec![]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::ImportDataRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_datasets_import_data(request, opt.value_of("dataset").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_datasets_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_datasets_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_detect_language(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -598,6 +1086,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "entry-count" => Some(("entryCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "input-config.gcs-source.input-uri" => Some(("inputConfig.gcsSource.inputUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -607,7 +1096,7 @@ where "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "submit-time" => Some(("submitTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["end-time", "entry-count", "gcs-source", "input-config", "input-uri", "language-codes", "language-codes-set", "language-pair", "name", "source-language-code", "submit-time", "target-language-code"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["display-name", "end-time", "entry-count", "gcs-source", "input-config", "input-uri", "language-codes", "language-codes-set", "language-pair", "name", "source-language-code", "submit-time", "target-language-code"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -771,6 +1260,349 @@ where } } + async fn _projects_locations_glossaries_glossary_entries_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.source-term.language-code" => Some(("termsPair.sourceTerm.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.source-term.text" => Some(("termsPair.sourceTerm.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.target-term.language-code" => Some(("termsPair.targetTerm.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.target-term.text" => Some(("termsPair.targetTerm.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "language-code", "name", "source-term", "target-term", "terms-pair", "text"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GlossaryEntry = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_glossaries_glossary_entries_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_glossaries_glossary_entries_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_glossaries_glossary_entries_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_glossaries_glossary_entries_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_glossaries_glossary_entries_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_glossaries_glossary_entries_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_glossaries_glossary_entries_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_glossaries_glossary_entries_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.source-term.language-code" => Some(("termsPair.sourceTerm.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.source-term.text" => Some(("termsPair.sourceTerm.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.target-term.language-code" => Some(("termsPair.targetTerm.languageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "terms-pair.target-term.text" => Some(("termsPair.targetTerm.text", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["description", "language-code", "name", "source-term", "target-term", "terms-pair", "text"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::GlossaryEntry = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_glossaries_glossary_entries_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_glossaries_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_glossaries_list(opt.value_of("parent").unwrap_or("")); @@ -781,7 +1613,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -833,6 +1665,103 @@ where } } + async fn _projects_locations_glossaries_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "entry-count" => Some(("entryCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "input-config.gcs-source.input-uri" => Some(("inputConfig.gcsSource.inputUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "language-codes-set.language-codes" => Some(("languageCodesSet.languageCodes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "language-pair.source-language-code" => Some(("languagePair.sourceLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "language-pair.target-language-code" => Some(("languagePair.targetLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "submit-time" => Some(("submitTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["display-name", "end-time", "entry-count", "gcs-source", "input-config", "input-uri", "language-codes", "language-codes-set", "language-pair", "name", "source-language-code", "submit-time", "target-language-code"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Glossary = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_glossaries_patch(request, opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "update-mask" => { + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["update-mask"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.projects().locations_list(opt.value_of("name").unwrap_or("")); @@ -843,7 +1772,268 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_models_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "dataset" => Some(("dataset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "deploy-time" => Some(("deployTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "source-language-code" => Some(("sourceLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "target-language-code" => Some(("targetLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "test-example-count" => Some(("testExampleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "train-example-count" => Some(("trainExampleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "validate-example-count" => Some(("validateExampleCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "dataset", "deploy-time", "display-name", "name", "source-language-code", "target-language-code", "test-example-count", "train-example-count", "update-time", "validate-example-count"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Model = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_models_create(request, opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_models_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_models_delete(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_models_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_models_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_models_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_models_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1093,7 +2283,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1253,19 +2443,22 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "customized-attribution" => Some(("customizedAttribution", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document-input-config.content" => Some(("documentInputConfig.content", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document-input-config.gcs-source.input-uri" => Some(("documentInputConfig.gcsSource.inputUri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document-input-config.mime-type" => Some(("documentInputConfig.mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document-output-config.gcs-destination.output-uri-prefix" => Some(("documentOutputConfig.gcsDestination.outputUriPrefix", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "document-output-config.mime-type" => Some(("documentOutputConfig.mimeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "enable-shadow-removal-native-pdf" => Some(("enableShadowRemovalNativePdf", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "glossary-config.glossary" => Some(("glossaryConfig.glossary", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "glossary-config.ignore-case" => Some(("glossaryConfig.ignoreCase", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "is-translate-native-pdf-only" => Some(("isTranslateNativePdfOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "model" => Some(("model", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "source-language-code" => Some(("sourceLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "target-language-code" => Some(("targetLanguageCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["content", "document-input-config", "document-output-config", "gcs-destination", "gcs-source", "glossary", "glossary-config", "ignore-case", "input-uri", "labels", "mime-type", "model", "output-uri-prefix", "source-language-code", "target-language-code"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["content", "customized-attribution", "document-input-config", "document-output-config", "enable-shadow-removal-native-pdf", "gcs-destination", "gcs-source", "glossary", "glossary-config", "ignore-case", "input-uri", "is-translate-native-pdf-only", "labels", "mime-type", "model", "output-uri-prefix", "source-language-code", "target-language-code"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1528,6 +2721,27 @@ where ("locations-batch-translate-text", Some(opt)) => { call_result = self._projects_locations_batch_translate_text(opt, dry_run, &mut err).await; }, + ("locations-datasets-create", Some(opt)) => { + call_result = self._projects_locations_datasets_create(opt, dry_run, &mut err).await; + }, + ("locations-datasets-delete", Some(opt)) => { + call_result = self._projects_locations_datasets_delete(opt, dry_run, &mut err).await; + }, + ("locations-datasets-examples-list", Some(opt)) => { + call_result = self._projects_locations_datasets_examples_list(opt, dry_run, &mut err).await; + }, + ("locations-datasets-export-data", Some(opt)) => { + call_result = self._projects_locations_datasets_export_data(opt, dry_run, &mut err).await; + }, + ("locations-datasets-get", Some(opt)) => { + call_result = self._projects_locations_datasets_get(opt, dry_run, &mut err).await; + }, + ("locations-datasets-import-data", Some(opt)) => { + call_result = self._projects_locations_datasets_import_data(opt, dry_run, &mut err).await; + }, + ("locations-datasets-list", Some(opt)) => { + call_result = self._projects_locations_datasets_list(opt, dry_run, &mut err).await; + }, ("locations-detect-language", Some(opt)) => { call_result = self._projects_locations_detect_language(opt, dry_run, &mut err).await; }, @@ -1546,12 +2760,42 @@ where ("locations-glossaries-get", Some(opt)) => { call_result = self._projects_locations_glossaries_get(opt, dry_run, &mut err).await; }, + ("locations-glossaries-glossary-entries-create", Some(opt)) => { + call_result = self._projects_locations_glossaries_glossary_entries_create(opt, dry_run, &mut err).await; + }, + ("locations-glossaries-glossary-entries-delete", Some(opt)) => { + call_result = self._projects_locations_glossaries_glossary_entries_delete(opt, dry_run, &mut err).await; + }, + ("locations-glossaries-glossary-entries-get", Some(opt)) => { + call_result = self._projects_locations_glossaries_glossary_entries_get(opt, dry_run, &mut err).await; + }, + ("locations-glossaries-glossary-entries-list", Some(opt)) => { + call_result = self._projects_locations_glossaries_glossary_entries_list(opt, dry_run, &mut err).await; + }, + ("locations-glossaries-glossary-entries-patch", Some(opt)) => { + call_result = self._projects_locations_glossaries_glossary_entries_patch(opt, dry_run, &mut err).await; + }, ("locations-glossaries-list", Some(opt)) => { call_result = self._projects_locations_glossaries_list(opt, dry_run, &mut err).await; }, + ("locations-glossaries-patch", Some(opt)) => { + call_result = self._projects_locations_glossaries_patch(opt, dry_run, &mut err).await; + }, ("locations-list", Some(opt)) => { call_result = self._projects_locations_list(opt, dry_run, &mut err).await; }, + ("locations-models-create", Some(opt)) => { + call_result = self._projects_locations_models_create(opt, dry_run, &mut err).await; + }, + ("locations-models-delete", Some(opt)) => { + call_result = self._projects_locations_models_delete(opt, dry_run, &mut err).await; + }, + ("locations-models-get", Some(opt)) => { + call_result = self._projects_locations_models_get(opt, dry_run, &mut err).await; + }, + ("locations-models-list", Some(opt)) => { + call_result = self._projects_locations_models_list(opt, dry_run, &mut err).await; + }, ("locations-operations-cancel", Some(opt)) => { call_result = self._projects_locations_operations_cancel(opt, dry_run, &mut err).await; }, @@ -1655,7 +2899,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'detect-language', 'get-supported-languages', 'locations-batch-translate-document', 'locations-batch-translate-text', 'locations-detect-language', 'locations-get', 'locations-get-supported-languages', 'locations-glossaries-create', 'locations-glossaries-delete', 'locations-glossaries-get', 'locations-glossaries-list', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-operations-wait', 'locations-translate-document', 'locations-translate-text' and 'translate-text'", vec![ + ("projects", "methods: 'detect-language', 'get-supported-languages', 'locations-batch-translate-document', 'locations-batch-translate-text', 'locations-datasets-create', 'locations-datasets-delete', 'locations-datasets-examples-list', 'locations-datasets-export-data', 'locations-datasets-get', 'locations-datasets-import-data', 'locations-datasets-list', 'locations-detect-language', 'locations-get', 'locations-get-supported-languages', 'locations-glossaries-create', 'locations-glossaries-delete', 'locations-glossaries-get', 'locations-glossaries-glossary-entries-create', 'locations-glossaries-glossary-entries-delete', 'locations-glossaries-glossary-entries-get', 'locations-glossaries-glossary-entries-list', 'locations-glossaries-glossary-entries-patch', 'locations-glossaries-list', 'locations-glossaries-patch', 'locations-list', 'locations-models-create', 'locations-models-delete', 'locations-models-get', 'locations-models-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-operations-wait', 'locations-translate-document', 'locations-translate-text' and 'translate-text'", vec![ ("detect-language", Some(r##"Detects the language of text within a request."##), "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_detect-language", @@ -1756,6 +3000,178 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-datasets-create", + Some(r##"Creates a Dataset."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-datasets-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project name."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-datasets-delete", + Some(r##"Deletes a dataset and all of its contents."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-datasets-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the dataset to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-datasets-examples-list", + Some(r##"Lists sentence pairs in the dataset."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-datasets-examples-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the parent dataset. In form of `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-datasets-export-data", + Some(r##"Exports dataset's data to the provided output location."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-datasets-export-data", + vec![ + (Some(r##"dataset"##), + None, + Some(r##"Required. Name of the dataset. In form of `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-datasets-get", + Some(r##"Gets a Dataset."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-datasets-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the dataset to retrieve."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-datasets-import-data", + Some(r##"Import sentence pairs into translation Dataset."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-datasets-import-data", + vec![ + (Some(r##"dataset"##), + None, + Some(r##"Required. Name of the dataset. In form of `projects/{project-number-or-id}/locations/{location-id}/datasets/{dataset-id}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-datasets-list", + Some(r##"Lists datasets."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-datasets-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the parent project. In form of `projects/{project-number-or-id}/locations/{location-id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1900,6 +3316,128 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-glossaries-glossary-entries-create", + Some(r##"Creates a glossary entry."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-glossaries-glossary-entries-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The resource name of the glossary to create the entry under."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-glossaries-glossary-entries-delete", + Some(r##"Deletes a single entry from the glossary"##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-glossaries-glossary-entries-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the glossary entry to delete"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-glossaries-glossary-entries-get", + Some(r##"Gets a single glossary entry by the given id."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-glossaries-glossary-entries-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the glossary entry to get"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-glossaries-glossary-entries-list", + Some(r##"List the entries for the glossary."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-glossaries-glossary-entries-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent glossary resource name for listing the glossary's entries."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-glossaries-glossary-entries-patch", + Some(r##"Updates a glossary entry."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-glossaries-glossary-entries-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the entry. Format: "projects/*/locations/*/glossaries/*/glossaryEntries/*""##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1922,6 +3460,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-glossaries-patch", + Some(r##"Updates a glossary. A LRO is used since the update can be async if the glossary's entry file is updated."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-glossaries-patch", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the glossary. Glossary names have the form `projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}`."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -1944,6 +3510,100 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-models-create", + Some(r##"Creates a Model."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-models-create", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The project name, in form of `projects/{project}/locations/{location}`"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-models-delete", + Some(r##"Deletes a model."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-models-delete", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the model to delete."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-models-get", + Some(r##"Gets a model."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-models-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The resource name of the model to retrieve."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-models-list", + Some(r##"Lists models."##), + "Details at http://byron.github.io/google-apis-rs/google_translate3_cli/projects_locations-models-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. Name of the parent project. In form of `projects/{project-number-or-id}/locations/{location-id}`"##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -2162,7 +3822,7 @@ async fn main() { let mut app = App::new("translate3") .author("Sebastian Thiel ") - .version("4.0.1+20220121") + .version("5.0.2+20230106") .about("Integrates text translation into your website or application.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_translate3_cli") .arg(Arg::with_name("url") diff --git a/gen/translate3/Cargo.toml b/gen/translate3/Cargo.toml index f9bf292858..3ce5e965aa 100644 --- a/gen/translate3/Cargo.toml +++ b/gen/translate3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-translate3" -version = "5.0.2-beta-1+20230106" +version = "5.0.2+20230106" authors = ["Sebastian Thiel "] description = "A complete library to interact with Translate (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/translate3" homepage = "https://cloud.google.com/translate/docs/quickstarts" -documentation = "https://docs.rs/google-translate3/5.0.2-beta-1+20230106" +documentation = "https://docs.rs/google-translate3/5.0.2+20230106" license = "MIT" keywords = ["translate", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/translate3/README.md b/gen/translate3/README.md index b110ba6937..4815e60ff9 100644 --- a/gen/translate3/README.md +++ b/gen/translate3/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-translate3` library allows access to all features of the *Google Translate* service. -This documentation was generated from *Translate* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *translate:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Translate* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *translate:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Translate* *v3* API can be found at the [official documentation site](https://cloud.google.com/translate/docs/quickstarts). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/Translate) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/Translate) ... * projects - * [*detect language*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectDetectLanguageCall), [*get supported languages*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectGetSupportedLanguageCall), [*locations batch translate document*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationBatchTranslateDocumentCall), [*locations batch translate text*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationBatchTranslateTextCall), [*locations datasets create*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDatasetCreateCall), [*locations datasets delete*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDatasetDeleteCall), [*locations datasets examples list*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDatasetExampleListCall), [*locations datasets export data*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDatasetExportDataCall), [*locations datasets get*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDatasetGetCall), [*locations datasets import data*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDatasetImportDataCall), [*locations datasets list*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDatasetListCall), [*locations detect language*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationDetectLanguageCall), [*locations get*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGetCall), [*locations get supported languages*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGetSupportedLanguageCall), [*locations glossaries create*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryCreateCall), [*locations glossaries delete*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryDeleteCall), [*locations glossaries get*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryGetCall), [*locations glossaries glossary entries create*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryCreateCall), [*locations glossaries glossary entries delete*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryDeleteCall), [*locations glossaries glossary entries get*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryGetCall), [*locations glossaries glossary entries list*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryListCall), [*locations glossaries glossary entries patch*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryPatchCall), [*locations glossaries list*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryListCall), [*locations glossaries patch*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationGlossaryPatchCall), [*locations list*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationListCall), [*locations models create*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationModelCreateCall), [*locations models delete*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationModelDeleteCall), [*locations models get*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationModelGetCall), [*locations models list*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationModelListCall), [*locations operations cancel*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationOperationListCall), [*locations operations wait*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationOperationWaitCall), [*locations translate document*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationTranslateDocumentCall), [*locations translate text*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectLocationTranslateTextCall) and [*translate text*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/api::ProjectTranslateTextCall) + * [*detect language*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectDetectLanguageCall), [*get supported languages*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectGetSupportedLanguageCall), [*locations batch translate document*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationBatchTranslateDocumentCall), [*locations batch translate text*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationBatchTranslateTextCall), [*locations datasets create*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDatasetCreateCall), [*locations datasets delete*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDatasetDeleteCall), [*locations datasets examples list*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDatasetExampleListCall), [*locations datasets export data*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDatasetExportDataCall), [*locations datasets get*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDatasetGetCall), [*locations datasets import data*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDatasetImportDataCall), [*locations datasets list*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDatasetListCall), [*locations detect language*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationDetectLanguageCall), [*locations get*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGetCall), [*locations get supported languages*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGetSupportedLanguageCall), [*locations glossaries create*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryCreateCall), [*locations glossaries delete*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryDeleteCall), [*locations glossaries get*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryGetCall), [*locations glossaries glossary entries create*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryCreateCall), [*locations glossaries glossary entries delete*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryDeleteCall), [*locations glossaries glossary entries get*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryGetCall), [*locations glossaries glossary entries list*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryListCall), [*locations glossaries glossary entries patch*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryGlossaryEntryPatchCall), [*locations glossaries list*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryListCall), [*locations glossaries patch*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationGlossaryPatchCall), [*locations list*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationListCall), [*locations models create*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationModelCreateCall), [*locations models delete*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationModelDeleteCall), [*locations models get*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationModelGetCall), [*locations models list*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationModelListCall), [*locations operations cancel*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationOperationListCall), [*locations operations wait*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationOperationWaitCall), [*locations translate document*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationTranslateDocumentCall), [*locations translate text*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectLocationTranslateTextCall) and [*translate text*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/api::ProjectTranslateTextCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/Translate)** +* **[Hub](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/Translate)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::CallBuilder) -* **[Resources](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::CallBuilder) +* **[Resources](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Part)** + * **[Parts](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Delegate) to the -[Method Builder](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Delegate) to the +[Method Builder](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::RequestValue) and -[decodable](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::RequestValue) and +[decodable](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-translate3/5.0.2-beta-1+20230106/google_translate3/client::RequestValue) are moved +* [request values](https://docs.rs/google-translate3/5.0.2+20230106/google_translate3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/translate3/src/api.rs b/gen/translate3/src/api.rs index e0324e4e6d..6b08c0c5af 100644 --- a/gen/translate3/src/api.rs +++ b/gen/translate3/src/api.rs @@ -130,7 +130,7 @@ impl<'a, S> Translate { Translate { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://translation.googleapis.com/".to_string(), _root_url: "https://translation.googleapis.com/".to_string(), } @@ -141,7 +141,7 @@ impl<'a, S> Translate { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/translate3/src/client.rs b/gen/translate3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/translate3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/translate3/src/lib.rs b/gen/translate3/src/lib.rs index 3c1e05d99d..4aa83567df 100644 --- a/gen/translate3/src/lib.rs +++ b/gen/translate3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Translate* crate version *5.0.2-beta-1+20230106*, where *20230106* is the exact revision of the *translate:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Translate* crate version *5.0.2+20230106*, where *20230106* is the exact revision of the *translate:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Translate* *v3* API can be found at the //! [official documentation site](https://cloud.google.com/translate/docs/quickstarts). diff --git a/gen/urlshortener1-cli/Cargo.toml b/gen/urlshortener1-cli/Cargo.toml index 970fdd4bb0..c996e26134 100644 --- a/gen/urlshortener1-cli/Cargo.toml +++ b/gen/urlshortener1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-urlshortener1-cli" -version = "4.0.1+20150519" +version = "5.0.2+20150519" authors = ["Sebastian Thiel "] description = "A complete library to interact with urlshortener (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/urlshortener1-cli" @@ -20,13 +20,13 @@ name = "urlshortener1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-urlshortener1] path = "../urlshortener1" -version = "4.0.1+20150519" +version = "5.0.2+20150519" + diff --git a/gen/urlshortener1-cli/README.md b/gen/urlshortener1-cli/README.md index 0f59fbd1e8..ccd939b452 100644 --- a/gen/urlshortener1-cli/README.md +++ b/gen/urlshortener1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *urlshortener* API at revision *20150519*. The CLI is at version *4.0.1*. +This documentation was generated from the *urlshortener* API at revision *20150519*. The CLI is at version *5.0.2*. ```bash urlshortener1 [options] diff --git a/gen/urlshortener1-cli/mkdocs.yml b/gen/urlshortener1-cli/mkdocs.yml index d7be2fb5c8..5fd3489439 100644 --- a/gen/urlshortener1-cli/mkdocs.yml +++ b/gen/urlshortener1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: urlshortener v4.0.1+20150519 +site_name: urlshortener v5.0.2+20150519 site_url: http://byron.github.io/google-apis-rs/google-urlshortener1-cli site_description: A complete library to interact with urlshortener (protocol v1) @@ -7,11 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/urlshortener1-cl docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['url_get.md', 'Url', 'Get'] -- ['url_insert.md', 'Url', 'Insert'] -- ['url_list.md', 'Url', 'List'] +nav: +- Home: 'index.md' +- 'Url': + - 'Get': 'url_get.md' + - 'Insert': 'url_insert.md' + - 'List': 'url_list.md' theme: readthedocs diff --git a/gen/urlshortener1-cli/src/client.rs b/gen/urlshortener1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/urlshortener1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/urlshortener1-cli/src/main.rs b/gen/urlshortener1-cli/src/main.rs index 66c5b6c122..a1badc001a 100644 --- a/gen/urlshortener1-cli/src/main.rs +++ b/gen/urlshortener1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_urlshortener1::{api, Error, oauth2}; +use google_urlshortener1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -424,7 +423,7 @@ async fn main() { let mut app = App::new("urlshortener1") .author("Sebastian Thiel ") - .version("4.0.1+20150519") + .version("5.0.2+20150519") .about("Lets you create, inspect, and manage goo.gl short URLs") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_urlshortener1_cli") .arg(Arg::with_name("url") diff --git a/gen/urlshortener1/Cargo.toml b/gen/urlshortener1/Cargo.toml index 4007034f62..7a0aae2493 100644 --- a/gen/urlshortener1/Cargo.toml +++ b/gen/urlshortener1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-urlshortener1" -version = "5.0.2-beta-1+20150519" +version = "5.0.2+20150519" authors = ["Sebastian Thiel "] description = "A complete library to interact with urlshortener (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/urlshortener1" homepage = "https://developers.google.com/url-shortener/v1/getting_started" -documentation = "https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519" +documentation = "https://docs.rs/google-urlshortener1/5.0.2+20150519" license = "MIT" keywords = ["urlshortener", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/urlshortener1/README.md b/gen/urlshortener1/README.md index 90b7a01ce5..bd4f41d75d 100644 --- a/gen/urlshortener1/README.md +++ b/gen/urlshortener1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-urlshortener1` library allows access to all features of the *Google urlshortener* service. -This documentation was generated from *urlshortener* crate version *5.0.2-beta-1+20150519*, where *20150519* is the exact revision of the *urlshortener:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *urlshortener* crate version *5.0.2+20150519*, where *20150519* is the exact revision of the *urlshortener:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *urlshortener* *v1* API can be found at the [official documentation site](https://developers.google.com/url-shortener/v1/getting_started). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/Urlshortener) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/Urlshortener) ... -* [url](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/api::Url) - * [*get*](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/api::UrlGetCall), [*insert*](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/api::UrlInsertCall) and [*list*](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/api::UrlListCall) +* [url](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/api::Url) + * [*get*](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/api::UrlGetCall), [*insert*](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/api::UrlInsertCall) and [*list*](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/api::UrlListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/Urlshortener)** +* **[Hub](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/Urlshortener)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::CallBuilder) -* **[Resources](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::CallBuilder) +* **[Resources](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Part)** + * **[Parts](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -120,17 +120,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -140,29 +140,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Delegate) to the -[Method Builder](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Delegate) to the +[Method Builder](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::RequestValue) and -[decodable](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::RequestValue) and +[decodable](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-urlshortener1/5.0.2-beta-1+20150519/google_urlshortener1/client::RequestValue) are moved +* [request values](https://docs.rs/google-urlshortener1/5.0.2+20150519/google_urlshortener1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/urlshortener1/src/api.rs b/gen/urlshortener1/src/api.rs index 764e7f6cb9..8dba41321a 100644 --- a/gen/urlshortener1/src/api.rs +++ b/gen/urlshortener1/src/api.rs @@ -120,7 +120,7 @@ impl<'a, S> Urlshortener { Urlshortener { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/urlshortener/v1/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -131,7 +131,7 @@ impl<'a, S> Urlshortener { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/urlshortener1/src/client.rs b/gen/urlshortener1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/urlshortener1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/urlshortener1/src/lib.rs b/gen/urlshortener1/src/lib.rs index 5077bf0abb..736c961cff 100644 --- a/gen/urlshortener1/src/lib.rs +++ b/gen/urlshortener1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *urlshortener* crate version *5.0.2-beta-1+20150519*, where *20150519* is the exact revision of the *urlshortener:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *urlshortener* crate version *5.0.2+20150519*, where *20150519* is the exact revision of the *urlshortener:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *urlshortener* *v1* API can be found at the //! [official documentation site](https://developers.google.com/url-shortener/v1/getting_started). diff --git a/gen/vault1-cli/Cargo.toml b/gen/vault1-cli/Cargo.toml index c3c41da9cf..c9b989690d 100644 --- a/gen/vault1-cli/Cargo.toml +++ b/gen/vault1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-vault1-cli" -version = "4.0.1+20220222" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Vault (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vault1-cli" @@ -20,13 +20,13 @@ name = "vault1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-vault1] path = "../vault1" -version = "4.0.1+20220222" +version = "5.0.2+20230123" + diff --git a/gen/vault1-cli/README.md b/gen/vault1-cli/README.md index 2ca5e8777d..1c772ef622 100644 --- a/gen/vault1-cli/README.md +++ b/gen/vault1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Vault* API at revision *20220222*. The CLI is at version *4.0.1*. +This documentation was generated from the *Vault* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash vault1 [options] diff --git a/gen/vault1-cli/mkdocs.yml b/gen/vault1-cli/mkdocs.yml index 5e9c7b6fe5..4788c9a531 100644 --- a/gen/vault1-cli/mkdocs.yml +++ b/gen/vault1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Vault v4.0.1+20220222 +site_name: Vault v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-vault1-cli site_description: A complete library to interact with Vault (protocol v1) @@ -7,41 +7,43 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/vault1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['matters_add-permissions.md', 'Matters', 'Add Permissions'] -- ['matters_close.md', 'Matters', 'Close'] -- ['matters_count.md', 'Matters', 'Count'] -- ['matters_create.md', 'Matters', 'Create'] -- ['matters_delete.md', 'Matters', 'Delete'] -- ['matters_exports-create.md', 'Matters', 'Exports Create'] -- ['matters_exports-delete.md', 'Matters', 'Exports Delete'] -- ['matters_exports-get.md', 'Matters', 'Exports Get'] -- ['matters_exports-list.md', 'Matters', 'Exports List'] -- ['matters_get.md', 'Matters', 'Get'] -- ['matters_holds-accounts-create.md', 'Matters', 'Holds Accounts Create'] -- ['matters_holds-accounts-delete.md', 'Matters', 'Holds Accounts Delete'] -- ['matters_holds-accounts-list.md', 'Matters', 'Holds Accounts List'] -- ['matters_holds-add-held-accounts.md', 'Matters', 'Holds Add Held Accounts'] -- ['matters_holds-create.md', 'Matters', 'Holds Create'] -- ['matters_holds-delete.md', 'Matters', 'Holds Delete'] -- ['matters_holds-get.md', 'Matters', 'Holds Get'] -- ['matters_holds-list.md', 'Matters', 'Holds List'] -- ['matters_holds-remove-held-accounts.md', 'Matters', 'Holds Remove Held Accounts'] -- ['matters_holds-update.md', 'Matters', 'Holds Update'] -- ['matters_list.md', 'Matters', 'List'] -- ['matters_remove-permissions.md', 'Matters', 'Remove Permissions'] -- ['matters_reopen.md', 'Matters', 'Reopen'] -- ['matters_saved-queries-create.md', 'Matters', 'Saved Queries Create'] -- ['matters_saved-queries-delete.md', 'Matters', 'Saved Queries Delete'] -- ['matters_saved-queries-get.md', 'Matters', 'Saved Queries Get'] -- ['matters_saved-queries-list.md', 'Matters', 'Saved Queries List'] -- ['matters_undelete.md', 'Matters', 'Undelete'] -- ['matters_update.md', 'Matters', 'Update'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] +nav: +- Home: 'index.md' +- 'Matters': + - 'Add Permissions': 'matters_add-permissions.md' + - 'Close': 'matters_close.md' + - 'Count': 'matters_count.md' + - 'Create': 'matters_create.md' + - 'Delete': 'matters_delete.md' + - 'Exports Create': 'matters_exports-create.md' + - 'Exports Delete': 'matters_exports-delete.md' + - 'Exports Get': 'matters_exports-get.md' + - 'Exports List': 'matters_exports-list.md' + - 'Get': 'matters_get.md' + - 'Holds Accounts Create': 'matters_holds-accounts-create.md' + - 'Holds Accounts Delete': 'matters_holds-accounts-delete.md' + - 'Holds Accounts List': 'matters_holds-accounts-list.md' + - 'Holds Add Held Accounts': 'matters_holds-add-held-accounts.md' + - 'Holds Create': 'matters_holds-create.md' + - 'Holds Delete': 'matters_holds-delete.md' + - 'Holds Get': 'matters_holds-get.md' + - 'Holds List': 'matters_holds-list.md' + - 'Holds Remove Held Accounts': 'matters_holds-remove-held-accounts.md' + - 'Holds Update': 'matters_holds-update.md' + - 'List': 'matters_list.md' + - 'Remove Permissions': 'matters_remove-permissions.md' + - 'Reopen': 'matters_reopen.md' + - 'Saved Queries Create': 'matters_saved-queries-create.md' + - 'Saved Queries Delete': 'matters_saved-queries-delete.md' + - 'Saved Queries Get': 'matters_saved-queries-get.md' + - 'Saved Queries List': 'matters_saved-queries-list.md' + - 'Undelete': 'matters_undelete.md' + - 'Update': 'matters_update.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' theme: readthedocs diff --git a/gen/vault1-cli/src/client.rs b/gen/vault1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/vault1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/vault1-cli/src/main.rs b/gen/vault1-cli/src/main.rs index 7baf8ebf30..a2dff3adb9 100644 --- a/gen/vault1-cli/src/main.rs +++ b/gen/vault1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_vault1::{api, Error, oauth2}; +use google_vault1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -249,6 +248,7 @@ where "query.account-info.emails" => Some(("query.accountInfo.emails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.corpus" => Some(("query.corpus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.data-scope" => Some(("query.dataScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "query.drive-options.client-side-encrypted-option" => Some(("query.driveOptions.clientSideEncryptedOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.drive-options.include-shared-drives" => Some(("query.driveOptions.includeSharedDrives", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.drive-options.include-team-drives" => Some(("query.driveOptions.includeTeamDrives", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.drive-options.version-date" => Some(("query.driveOptions.versionDate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -260,6 +260,7 @@ where "query.org-unit-info.org-unit-id" => Some(("query.orgUnitInfo.orgUnitId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.search-method" => Some(("query.searchMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.shared-drive-info.shared-drive-ids" => Some(("query.sharedDriveInfo.sharedDriveIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "query.sites-url-info.urls" => Some(("query.sitesUrlInfo.urls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.start-time" => Some(("query.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.team-drive-info.team-drive-ids" => Some(("query.teamDriveInfo.teamDriveIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.terms" => Some(("query.terms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -267,7 +268,7 @@ where "query.voice-options.covered-data" => Some(("query.voiceOptions.coveredData", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "view" => Some(("view", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-info", "corpus", "covered-data", "data-scope", "drive-options", "emails", "end-time", "exclude-drafts", "hangouts-chat-info", "hangouts-chat-options", "include-rooms", "include-shared-drives", "include-team-drives", "mail-options", "method", "org-unit-id", "org-unit-info", "query", "room-id", "search-method", "shared-drive-ids", "shared-drive-info", "start-time", "team-drive-ids", "team-drive-info", "terms", "time-zone", "version-date", "view", "voice-options"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-info", "client-side-encrypted-option", "corpus", "covered-data", "data-scope", "drive-options", "emails", "end-time", "exclude-drafts", "hangouts-chat-info", "hangouts-chat-options", "include-rooms", "include-shared-drives", "include-team-drives", "mail-options", "method", "org-unit-id", "org-unit-info", "query", "room-id", "search-method", "shared-drive-ids", "shared-drive-info", "sites-url-info", "start-time", "team-drive-ids", "team-drive-info", "terms", "time-zone", "urls", "version-date", "view", "voice-options"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -505,6 +506,7 @@ where "query.account-info.emails" => Some(("query.accountInfo.emails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.corpus" => Some(("query.corpus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.data-scope" => Some(("query.dataScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "query.drive-options.client-side-encrypted-option" => Some(("query.driveOptions.clientSideEncryptedOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.drive-options.include-shared-drives" => Some(("query.driveOptions.includeSharedDrives", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.drive-options.include-team-drives" => Some(("query.driveOptions.includeTeamDrives", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.drive-options.version-date" => Some(("query.driveOptions.versionDate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -516,6 +518,7 @@ where "query.org-unit-info.org-unit-id" => Some(("query.orgUnitInfo.orgUnitId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.search-method" => Some(("query.searchMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.shared-drive-info.shared-drive-ids" => Some(("query.sharedDriveInfo.sharedDriveIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "query.sites-url-info.urls" => Some(("query.sitesUrlInfo.urls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.start-time" => Some(("query.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.team-drive-info.team-drive-ids" => Some(("query.teamDriveInfo.teamDriveIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.terms" => Some(("query.terms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -528,7 +531,7 @@ where "stats.total-artifact-count" => Some(("stats.totalArtifactCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-info", "corpus", "covered-data", "create-time", "data-scope", "display-name", "drive-options", "email", "emails", "end-time", "exclude-drafts", "export-format", "export-options", "exported-artifact-count", "groups-options", "hangouts-chat-info", "hangouts-chat-options", "id", "include-access-info", "include-rooms", "include-shared-drives", "include-team-drives", "mail-options", "matter-id", "method", "name", "org-unit-id", "org-unit-info", "query", "region", "requester", "room-id", "search-method", "shared-drive-ids", "shared-drive-info", "show-confidential-mode-content", "size-in-bytes", "start-time", "stats", "status", "team-drive-ids", "team-drive-info", "terms", "time-zone", "total-artifact-count", "use-new-export", "version-date", "voice-options"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-info", "client-side-encrypted-option", "corpus", "covered-data", "create-time", "data-scope", "display-name", "drive-options", "email", "emails", "end-time", "exclude-drafts", "export-format", "export-options", "exported-artifact-count", "groups-options", "hangouts-chat-info", "hangouts-chat-options", "id", "include-access-info", "include-rooms", "include-shared-drives", "include-team-drives", "mail-options", "matter-id", "method", "name", "org-unit-id", "org-unit-info", "query", "region", "requester", "room-id", "search-method", "shared-drive-ids", "shared-drive-info", "show-confidential-mode-content", "sites-url-info", "size-in-bytes", "start-time", "stats", "status", "team-drive-ids", "team-drive-info", "terms", "time-zone", "total-artifact-count", "urls", "use-new-export", "version-date", "voice-options"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -702,7 +705,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1307,7 +1310,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1557,7 +1560,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1804,6 +1807,7 @@ where "query.account-info.emails" => Some(("query.accountInfo.emails", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.corpus" => Some(("query.corpus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.data-scope" => Some(("query.dataScope", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "query.drive-options.client-side-encrypted-option" => Some(("query.driveOptions.clientSideEncryptedOption", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.drive-options.include-shared-drives" => Some(("query.driveOptions.includeSharedDrives", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.drive-options.include-team-drives" => Some(("query.driveOptions.includeTeamDrives", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "query.drive-options.version-date" => Some(("query.driveOptions.versionDate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1815,6 +1819,7 @@ where "query.org-unit-info.org-unit-id" => Some(("query.orgUnitInfo.orgUnitId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.search-method" => Some(("query.searchMethod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.shared-drive-info.shared-drive-ids" => Some(("query.sharedDriveInfo.sharedDriveIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "query.sites-url-info.urls" => Some(("query.sitesUrlInfo.urls", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.start-time" => Some(("query.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "query.team-drive-info.team-drive-ids" => Some(("query.teamDriveInfo.teamDriveIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "query.terms" => Some(("query.terms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1822,7 +1827,7 @@ where "query.voice-options.covered-data" => Some(("query.voiceOptions.coveredData", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "saved-query-id" => Some(("savedQueryId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-info", "corpus", "covered-data", "create-time", "data-scope", "display-name", "drive-options", "emails", "end-time", "exclude-drafts", "hangouts-chat-info", "hangouts-chat-options", "include-rooms", "include-shared-drives", "include-team-drives", "mail-options", "matter-id", "method", "org-unit-id", "org-unit-info", "query", "room-id", "saved-query-id", "search-method", "shared-drive-ids", "shared-drive-info", "start-time", "team-drive-ids", "team-drive-info", "terms", "time-zone", "version-date", "voice-options"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-info", "client-side-encrypted-option", "corpus", "covered-data", "create-time", "data-scope", "display-name", "drive-options", "emails", "end-time", "exclude-drafts", "hangouts-chat-info", "hangouts-chat-options", "include-rooms", "include-shared-drives", "include-team-drives", "mail-options", "matter-id", "method", "org-unit-id", "org-unit-info", "query", "room-id", "saved-query-id", "search-method", "shared-drive-ids", "shared-drive-info", "sites-url-info", "start-time", "team-drive-ids", "team-drive-info", "terms", "time-zone", "urls", "version-date", "voice-options"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1996,7 +2001,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2409,7 +2414,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -3549,7 +3554,7 @@ async fn main() { let mut app = App::new("vault1") .author("Sebastian Thiel ") - .version("4.0.1+20220222") + .version("5.0.2+20230123") .about("Retention and eDiscovery for Google Workspace. To work with Vault resources, the account must have the [required Vault privileges](https://support.google.com/vault/answer/2799699) and access to the matter. To access a matter, the account must have created the matter, have the matter shared with them, or have the **View All Matters** privilege. For example, to download an export, an account needs the **Manage Exports** privilege and the matter shared with them. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_vault1_cli") .arg(Arg::with_name("url") diff --git a/gen/vault1/Cargo.toml b/gen/vault1/Cargo.toml index 491348fe1d..a3a58598bc 100644 --- a/gen/vault1/Cargo.toml +++ b/gen/vault1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-vault1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Vault (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vault1" homepage = "https://developers.google.com/vault" -documentation = "https://docs.rs/google-vault1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-vault1/5.0.2+20230123" license = "MIT" keywords = ["vault", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/vault1/README.md b/gen/vault1/README.md index 7725aff0b0..aa7dfb316b 100644 --- a/gen/vault1/README.md +++ b/gen/vault1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-vault1` library allows access to all features of the *Google Vault* service. -This documentation was generated from *Vault* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *vault:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Vault* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *vault:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Vault* *v1* API can be found at the [official documentation site](https://developers.google.com/vault). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/Vault) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/Vault) ... -* [matters](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::Matter) - * [*add permissions*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterAddPermissionCall), [*close*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterCloseCall), [*count*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterCountCall), [*create*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterCreateCall), [*delete*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterDeleteCall), [*exports create*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterExportCreateCall), [*exports delete*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterExportDeleteCall), [*exports get*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterExportGetCall), [*exports list*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterExportListCall), [*get*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterGetCall), [*holds accounts create*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldAccountCreateCall), [*holds accounts delete*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldAccountDeleteCall), [*holds accounts list*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldAccountListCall), [*holds add held accounts*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldAddHeldAccountCall), [*holds create*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldCreateCall), [*holds delete*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldDeleteCall), [*holds get*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldGetCall), [*holds list*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldListCall), [*holds remove held accounts*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldRemoveHeldAccountCall), [*holds update*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterHoldUpdateCall), [*list*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterListCall), [*remove permissions*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterRemovePermissionCall), [*reopen*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterReopenCall), [*saved queries create*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterSavedQueryCreateCall), [*saved queries delete*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterSavedQueryDeleteCall), [*saved queries get*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterSavedQueryGetCall), [*saved queries list*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterSavedQueryListCall), [*undelete*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterUndeleteCall) and [*update*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::MatterUpdateCall) -* [operations](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::Operation) - * [*cancel*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::OperationCancelCall), [*delete*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::OperationDeleteCall), [*get*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::OperationGetCall) and [*list*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/api::OperationListCall) +* [matters](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::Matter) + * [*add permissions*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterAddPermissionCall), [*close*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterCloseCall), [*count*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterCountCall), [*create*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterCreateCall), [*delete*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterDeleteCall), [*exports create*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterExportCreateCall), [*exports delete*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterExportDeleteCall), [*exports get*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterExportGetCall), [*exports list*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterExportListCall), [*get*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterGetCall), [*holds accounts create*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldAccountCreateCall), [*holds accounts delete*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldAccountDeleteCall), [*holds accounts list*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldAccountListCall), [*holds add held accounts*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldAddHeldAccountCall), [*holds create*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldCreateCall), [*holds delete*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldDeleteCall), [*holds get*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldGetCall), [*holds list*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldListCall), [*holds remove held accounts*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldRemoveHeldAccountCall), [*holds update*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterHoldUpdateCall), [*list*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterListCall), [*remove permissions*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterRemovePermissionCall), [*reopen*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterReopenCall), [*saved queries create*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterSavedQueryCreateCall), [*saved queries delete*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterSavedQueryDeleteCall), [*saved queries get*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterSavedQueryGetCall), [*saved queries list*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterSavedQueryListCall), [*undelete*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterUndeleteCall) and [*update*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::MatterUpdateCall) +* [operations](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::Operation) + * [*cancel*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::OperationCancelCall), [*delete*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::OperationDeleteCall), [*get*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::OperationGetCall) and [*list*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/api::OperationListCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/Vault)** +* **[Hub](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/Vault)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::CallBuilder) -* **[Resources](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::CallBuilder) +* **[Resources](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Part)** + * **[Parts](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -151,17 +151,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -171,29 +171,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Delegate) to the -[Method Builder](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Delegate) to the +[Method Builder](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::RequestValue) and -[decodable](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::RequestValue) and +[decodable](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-vault1/5.0.2-beta-1+20230123/google_vault1/client::RequestValue) are moved +* [request values](https://docs.rs/google-vault1/5.0.2+20230123/google_vault1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/vault1/src/api.rs b/gen/vault1/src/api.rs index 77a4244e99..ca740276f3 100644 --- a/gen/vault1/src/api.rs +++ b/gen/vault1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Vault { Vault { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://vault.googleapis.com/".to_string(), _root_url: "https://vault.googleapis.com/".to_string(), } @@ -140,7 +140,7 @@ impl<'a, S> Vault { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/vault1/src/client.rs b/gen/vault1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/vault1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/vault1/src/lib.rs b/gen/vault1/src/lib.rs index 1331f689cd..bd7e043f4b 100644 --- a/gen/vault1/src/lib.rs +++ b/gen/vault1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Vault* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *vault:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Vault* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *vault:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Vault* *v1* API can be found at the //! [official documentation site](https://developers.google.com/vault). diff --git a/gen/vectortile1-cli/Cargo.toml b/gen/vectortile1-cli/Cargo.toml index fbda3014e6..bbd2a6e54f 100644 --- a/gen/vectortile1-cli/Cargo.toml +++ b/gen/vectortile1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-vectortile1-cli" -version = "4.0.1+20210331" +version = "5.0.2+20210331" authors = ["Sebastian Thiel "] description = "A complete library to interact with Semantic Tile (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vectortile1-cli" @@ -20,13 +20,13 @@ name = "vectortile1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-vectortile1] path = "../vectortile1" -version = "4.0.1+20210331" +version = "5.0.2+20210331" + diff --git a/gen/vectortile1-cli/README.md b/gen/vectortile1-cli/README.md index 02d6a2639e..ff73826943 100644 --- a/gen/vectortile1-cli/README.md +++ b/gen/vectortile1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Semantic Tile* API at revision *20210331*. The CLI is at version *4.0.1*. +This documentation was generated from the *Semantic Tile* API at revision *20210331*. The CLI is at version *5.0.2*. ```bash vectortile1 [options] diff --git a/gen/vectortile1-cli/mkdocs.yml b/gen/vectortile1-cli/mkdocs.yml index 0b69035faa..0e98159c13 100644 --- a/gen/vectortile1-cli/mkdocs.yml +++ b/gen/vectortile1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Semantic Tile v4.0.1+20210331 +site_name: Semantic Tile v5.0.2+20210331 site_url: http://byron.github.io/google-apis-rs/google-vectortile1-cli site_description: A complete library to interact with Semantic Tile (protocol v1) @@ -7,10 +7,12 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/vectortile1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['featuretiles_get.md', 'Featuretiles', 'Get'] -- ['terraintiles_get.md', 'Terraintiles', 'Get'] +nav: +- Home: 'index.md' +- 'Featuretiles': + - 'Get': 'featuretiles_get.md' +- 'Terraintiles': + - 'Get': 'terraintiles_get.md' theme: readthedocs diff --git a/gen/vectortile1-cli/src/client.rs b/gen/vectortile1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/vectortile1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/vectortile1-cli/src/main.rs b/gen/vectortile1-cli/src/main.rs index cc1383d6eb..af42c91921 100644 --- a/gen/vectortile1-cli/src/main.rs +++ b/gen/vectortile1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_vectortile1::{api, Error, oauth2}; +use google_vectortile1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -64,22 +63,22 @@ where call = call.language_code(value.unwrap_or("")); }, "enable-unclipped-buildings" => { - call = call.enable_unclipped_buildings(arg_from_str(value.unwrap_or("false"), err, "enable-unclipped-buildings", "boolean")); + call = call.enable_unclipped_buildings( value.map(|v| arg_from_str(v, err, "enable-unclipped-buildings", "boolean")).unwrap_or(false)); }, "enable-private-roads" => { - call = call.enable_private_roads(arg_from_str(value.unwrap_or("false"), err, "enable-private-roads", "boolean")); + call = call.enable_private_roads( value.map(|v| arg_from_str(v, err, "enable-private-roads", "boolean")).unwrap_or(false)); }, "enable-political-features" => { - call = call.enable_political_features(arg_from_str(value.unwrap_or("false"), err, "enable-political-features", "boolean")); + call = call.enable_political_features( value.map(|v| arg_from_str(v, err, "enable-political-features", "boolean")).unwrap_or(false)); }, "enable-modeled-volumes" => { - call = call.enable_modeled_volumes(arg_from_str(value.unwrap_or("false"), err, "enable-modeled-volumes", "boolean")); + call = call.enable_modeled_volumes( value.map(|v| arg_from_str(v, err, "enable-modeled-volumes", "boolean")).unwrap_or(false)); }, "enable-feature-names" => { - call = call.enable_feature_names(arg_from_str(value.unwrap_or("false"), err, "enable-feature-names", "boolean")); + call = call.enable_feature_names( value.map(|v| arg_from_str(v, err, "enable-feature-names", "boolean")).unwrap_or(false)); }, "enable-detailed-highway-types" => { - call = call.enable_detailed_highway_types(arg_from_str(value.unwrap_or("false"), err, "enable-detailed-highway-types", "boolean")); + call = call.enable_detailed_highway_types( value.map(|v| arg_from_str(v, err, "enable-detailed-highway-types", "boolean")).unwrap_or(false)); }, "client-tile-version-id" => { call = call.client_tile_version_id(value.unwrap_or("")); @@ -106,7 +105,7 @@ where call = call.client_info_api_client(value.unwrap_or("")); }, "always-include-building-footprints" => { - call = call.always_include_building_footprints(arg_from_str(value.unwrap_or("false"), err, "always-include-building-footprints", "boolean")); + call = call.always_include_building_footprints( value.map(|v| arg_from_str(v, err, "always-include-building-footprints", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -162,10 +161,10 @@ where call = call.add_terrain_formats(value.unwrap_or("")); }, "min-elevation-resolution-cells" => { - call = call.min_elevation_resolution_cells(arg_from_str(value.unwrap_or("-0"), err, "min-elevation-resolution-cells", "integer")); + call = call.min_elevation_resolution_cells( value.map(|v| arg_from_str(v, err, "min-elevation-resolution-cells", "int32")).unwrap_or(-0)); }, "max-elevation-resolution-cells" => { - call = call.max_elevation_resolution_cells(arg_from_str(value.unwrap_or("-0"), err, "max-elevation-resolution-cells", "integer")); + call = call.max_elevation_resolution_cells( value.map(|v| arg_from_str(v, err, "max-elevation-resolution-cells", "int32")).unwrap_or(-0)); }, "client-info-user-id" => { call = call.client_info_user_id(value.unwrap_or("")); @@ -189,7 +188,7 @@ where call = call.client_info_api_client(value.unwrap_or("")); }, "altitude-precision-centimeters" => { - call = call.altitude_precision_centimeters(arg_from_str(value.unwrap_or("-0"), err, "altitude-precision-centimeters", "integer")); + call = call.altitude_precision_centimeters( value.map(|v| arg_from_str(v, err, "altitude-precision-centimeters", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -389,7 +388,7 @@ async fn main() { let mut app = App::new("vectortile1") .author("Sebastian Thiel ") - .version("4.0.1+20210331") + .version("5.0.2+20210331") .about("Serves vector tiles containing geospatial data. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_vectortile1_cli") .arg(Arg::with_name("folder") diff --git a/gen/vectortile1/Cargo.toml b/gen/vectortile1/Cargo.toml index 592ebdae91..4c6fb671d3 100644 --- a/gen/vectortile1/Cargo.toml +++ b/gen/vectortile1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-vectortile1" -version = "5.0.2-beta-1+20210331" +version = "5.0.2+20210331" authors = ["Sebastian Thiel "] description = "A complete library to interact with Semantic Tile (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vectortile1" homepage = "https://developers.google.com/maps/contact-sales/" -documentation = "https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331" +documentation = "https://docs.rs/google-vectortile1/5.0.2+20210331" license = "MIT" keywords = ["vectortile", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/vectortile1/README.md b/gen/vectortile1/README.md index cc8d2ae2cd..9888748242 100644 --- a/gen/vectortile1/README.md +++ b/gen/vectortile1/README.md @@ -5,18 +5,18 @@ DO NOT EDIT ! --> The `google-vectortile1` library allows access to all features of the *Google Semantic Tile* service. -This documentation was generated from *Semantic Tile* crate version *5.0.2-beta-1+20210331*, where *20210331* is the exact revision of the *vectortile:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Semantic Tile* crate version *5.0.2+20210331*, where *20210331* is the exact revision of the *vectortile:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Semantic Tile* *v1* API can be found at the [official documentation site](https://developers.google.com/maps/contact-sales/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/SemanticTile) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/SemanticTile) ... * featuretiles - * [*get*](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/api::FeaturetileGetCall) + * [*get*](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/api::FeaturetileGetCall) * terraintiles - * [*get*](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/api::TerraintileGetCall) + * [*get*](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/api::TerraintileGetCall) @@ -25,17 +25,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/SemanticTile)** +* **[Hub](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/SemanticTile)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::CallBuilder) -* **[Resources](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::CallBuilder) +* **[Resources](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Part)** + * **[Parts](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Delegate) to the -[Method Builder](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Delegate) to the +[Method Builder](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::RequestValue) and -[decodable](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::RequestValue) and +[decodable](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-vectortile1/5.0.2-beta-1+20210331/google_vectortile1/client::RequestValue) are moved +* [request values](https://docs.rs/google-vectortile1/5.0.2+20210331/google_vectortile1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/vectortile1/src/api.rs b/gen/vectortile1/src/api.rs index 7c5fe12513..b13ec75713 100644 --- a/gen/vectortile1/src/api.rs +++ b/gen/vectortile1/src/api.rs @@ -114,7 +114,7 @@ impl<'a, S> SemanticTile { SemanticTile { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://vectortile.googleapis.com/".to_string(), _root_url: "https://vectortile.googleapis.com/".to_string(), } @@ -128,7 +128,7 @@ impl<'a, S> SemanticTile { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/vectortile1/src/client.rs b/gen/vectortile1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/vectortile1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/vectortile1/src/lib.rs b/gen/vectortile1/src/lib.rs index a550a4a09d..589e0969d6 100644 --- a/gen/vectortile1/src/lib.rs +++ b/gen/vectortile1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Semantic Tile* crate version *5.0.2-beta-1+20210331*, where *20210331* is the exact revision of the *vectortile:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Semantic Tile* crate version *5.0.2+20210331*, where *20210331* is the exact revision of the *vectortile:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Semantic Tile* *v1* API can be found at the //! [official documentation site](https://developers.google.com/maps/contact-sales/). diff --git a/gen/verifiedaccess1-cli/Cargo.toml b/gen/verifiedaccess1-cli/Cargo.toml index 3c32911362..601d7c2f58 100644 --- a/gen/verifiedaccess1-cli/Cargo.toml +++ b/gen/verifiedaccess1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-verifiedaccess1-cli" -version = "4.0.1+20220215" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with verifiedaccess (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/verifiedaccess1-cli" @@ -20,13 +20,13 @@ name = "verifiedaccess1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-verifiedaccess1] path = "../verifiedaccess1" -version = "4.0.1+20220215" +version = "5.0.2+20230117" + diff --git a/gen/verifiedaccess1-cli/README.md b/gen/verifiedaccess1-cli/README.md index 88c0e2644a..94a62fd91c 100644 --- a/gen/verifiedaccess1-cli/README.md +++ b/gen/verifiedaccess1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *verifiedaccess* API at revision *20220215*. The CLI is at version *4.0.1*. +This documentation was generated from the *verifiedaccess* API at revision *20230117*. The CLI is at version *5.0.2*. ```bash verifiedaccess1 [options] diff --git a/gen/verifiedaccess1-cli/mkdocs.yml b/gen/verifiedaccess1-cli/mkdocs.yml index 7ff2a756bf..2a57d50b8e 100644 --- a/gen/verifiedaccess1-cli/mkdocs.yml +++ b/gen/verifiedaccess1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: verifiedaccess v4.0.1+20220215 +site_name: verifiedaccess v5.0.2+20230117 site_url: http://byron.github.io/google-apis-rs/google-verifiedaccess1-cli site_description: A complete library to interact with verifiedaccess (protocol v1) @@ -7,10 +7,11 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/verifiedaccess1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['challenge_create.md', 'Challenge', 'Create'] -- ['challenge_verify.md', 'Challenge', 'Verify'] +nav: +- Home: 'index.md' +- 'Challenge': + - 'Create': 'challenge_create.md' + - 'Verify': 'challenge_verify.md' theme: readthedocs diff --git a/gen/verifiedaccess1-cli/src/client.rs b/gen/verifiedaccess1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/verifiedaccess1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/verifiedaccess1-cli/src/main.rs b/gen/verifiedaccess1-cli/src/main.rs index b836a677a9..1e60e34d45 100644 --- a/gen/verifiedaccess1-cli/src/main.rs +++ b/gen/verifiedaccess1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_verifiedaccess1::{api, Error, oauth2}; +use google_verifiedaccess1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -365,7 +364,7 @@ async fn main() { let mut app = App::new("verifiedaccess1") .author("Sebastian Thiel ") - .version("4.0.1+20220215") + .version("5.0.2+20230117") .about("API for Verified Access chrome extension to provide credential verification for chrome devices connecting to an enterprise network") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_verifiedaccess1_cli") .arg(Arg::with_name("url") diff --git a/gen/verifiedaccess1/Cargo.toml b/gen/verifiedaccess1/Cargo.toml index a55828f84d..26d46a460f 100644 --- a/gen/verifiedaccess1/Cargo.toml +++ b/gen/verifiedaccess1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-verifiedaccess1" -version = "5.0.2-beta-1+20230117" +version = "5.0.2+20230117" authors = ["Sebastian Thiel "] description = "A complete library to interact with verifiedaccess (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/verifiedaccess1" homepage = "https://developers.google.com/chrome/verified-access" -documentation = "https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117" +documentation = "https://docs.rs/google-verifiedaccess1/5.0.2+20230117" license = "MIT" keywords = ["verifiedaccess", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/verifiedaccess1/README.md b/gen/verifiedaccess1/README.md index 535eb3365e..76d36c5be1 100644 --- a/gen/verifiedaccess1/README.md +++ b/gen/verifiedaccess1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-verifiedaccess1` library allows access to all features of the *Google verifiedaccess* service. -This documentation was generated from *verifiedaccess* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *verifiedaccess:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *verifiedaccess* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *verifiedaccess:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *verifiedaccess* *v1* API can be found at the [official documentation site](https://developers.google.com/chrome/verified-access). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/Verifiedaccess) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/Verifiedaccess) ... -* [challenge](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/api::Challenge) - * [*create*](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/api::ChallengeCreateCall) and [*verify*](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/api::ChallengeVerifyCall) +* [challenge](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/api::Challenge) + * [*create*](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/api::ChallengeCreateCall) and [*verify*](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/api::ChallengeVerifyCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/Verifiedaccess)** +* **[Hub](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/Verifiedaccess)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::CallBuilder) -* **[Resources](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::CallBuilder) +* **[Resources](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Part)** + * **[Parts](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Delegate) to the -[Method Builder](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Delegate) to the +[Method Builder](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::RequestValue) and -[decodable](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::RequestValue) and +[decodable](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-verifiedaccess1/5.0.2-beta-1+20230117/google_verifiedaccess1/client::RequestValue) are moved +* [request values](https://docs.rs/google-verifiedaccess1/5.0.2+20230117/google_verifiedaccess1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/verifiedaccess1/src/api.rs b/gen/verifiedaccess1/src/api.rs index b76ff4baf9..d87663064f 100644 --- a/gen/verifiedaccess1/src/api.rs +++ b/gen/verifiedaccess1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Verifiedaccess { Verifiedaccess { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://verifiedaccess.googleapis.com/".to_string(), _root_url: "https://verifiedaccess.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> Verifiedaccess { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/verifiedaccess1/src/client.rs b/gen/verifiedaccess1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/verifiedaccess1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/verifiedaccess1/src/lib.rs b/gen/verifiedaccess1/src/lib.rs index 520836c057..dd50b07eb9 100644 --- a/gen/verifiedaccess1/src/lib.rs +++ b/gen/verifiedaccess1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *verifiedaccess* crate version *5.0.2-beta-1+20230117*, where *20230117* is the exact revision of the *verifiedaccess:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *verifiedaccess* crate version *5.0.2+20230117*, where *20230117* is the exact revision of the *verifiedaccess:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *verifiedaccess* *v1* API can be found at the //! [official documentation site](https://developers.google.com/chrome/verified-access). diff --git a/gen/versionhistory1-cli/Cargo.toml b/gen/versionhistory1-cli/Cargo.toml index 8e0100a14d..cbeb893289 100644 --- a/gen/versionhistory1-cli/Cargo.toml +++ b/gen/versionhistory1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-versionhistory1-cli" -version = "4.0.1+20220307" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Version History (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/versionhistory1-cli" @@ -20,13 +20,13 @@ name = "versionhistory1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-versionhistory1] path = "../versionhistory1" -version = "4.0.1+20220307" +version = "5.0.2+20230123" + diff --git a/gen/versionhistory1-cli/README.md b/gen/versionhistory1-cli/README.md index 3e5fdfcfc5..cc78ef6804 100644 --- a/gen/versionhistory1-cli/README.md +++ b/gen/versionhistory1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Version History* API at revision *20220307*. The CLI is at version *4.0.1*. +This documentation was generated from the *Version History* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash versionhistory1 [options] diff --git a/gen/versionhistory1-cli/mkdocs.yml b/gen/versionhistory1-cli/mkdocs.yml index 1541623238..bb5aa4be85 100644 --- a/gen/versionhistory1-cli/mkdocs.yml +++ b/gen/versionhistory1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Version History v4.0.1+20220307 +site_name: Version History v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-versionhistory1-cli site_description: A complete library to interact with Version History (protocol v1) @@ -7,12 +7,13 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/versionhistory1- docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['platforms_channels-list.md', 'Platforms', 'Channels List'] -- ['platforms_channels-versions-list.md', 'Platforms', 'Channels Versions List'] -- ['platforms_channels-versions-releases-list.md', 'Platforms', 'Channels Versions Releases List'] -- ['platforms_list.md', 'Platforms', 'List'] +nav: +- Home: 'index.md' +- 'Platforms': + - 'Channels List': 'platforms_channels-list.md' + - 'Channels Versions List': 'platforms_channels-versions-list.md' + - 'Channels Versions Releases List': 'platforms_channels-versions-releases-list.md' + - 'List': 'platforms_list.md' theme: readthedocs diff --git a/gen/versionhistory1-cli/src/client.rs b/gen/versionhistory1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/versionhistory1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/versionhistory1-cli/src/main.rs b/gen/versionhistory1-cli/src/main.rs index e126098de5..92fffcb97e 100644 --- a/gen/versionhistory1-cli/src/main.rs +++ b/gen/versionhistory1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_versionhistory1::{api, Error, oauth2}; +use google_versionhistory1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -117,7 +116,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -179,7 +178,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -241,7 +240,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -480,7 +479,7 @@ async fn main() { let mut app = App::new("versionhistory1") .author("Sebastian Thiel ") - .version("4.0.1+20220307") + .version("5.0.2+20230123") .about("Version History API - Prod") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_versionhistory1_cli") .arg(Arg::with_name("folder") diff --git a/gen/versionhistory1/Cargo.toml b/gen/versionhistory1/Cargo.toml index c0bc4d9106..517fb90abd 100644 --- a/gen/versionhistory1/Cargo.toml +++ b/gen/versionhistory1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-versionhistory1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with Version History (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/versionhistory1" homepage = "https://developers.chrome.com/versionhistory" -documentation = "https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-versionhistory1/5.0.2+20230123" license = "MIT" keywords = ["versionhistory", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/versionhistory1/README.md b/gen/versionhistory1/README.md index edafb12948..0a69b30239 100644 --- a/gen/versionhistory1/README.md +++ b/gen/versionhistory1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-versionhistory1` library allows access to all features of the *Google Version History* service. -This documentation was generated from *Version History* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *versionhistory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Version History* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *versionhistory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Version History* *v1* API can be found at the [official documentation site](https://developers.chrome.com/versionhistory). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/VersionHistory) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/VersionHistory) ... -* [platforms](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/api::Platform) - * [*channels list*](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/api::PlatformChannelListCall), [*channels versions list*](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/api::PlatformChannelVersionListCall), [*channels versions releases list*](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/api::PlatformChannelVersionReleaseListCall) and [*list*](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/api::PlatformListCall) +* [platforms](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/api::Platform) + * [*channels list*](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/api::PlatformChannelListCall), [*channels versions list*](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/api::PlatformChannelVersionListCall), [*channels versions releases list*](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/api::PlatformChannelVersionReleaseListCall) and [*list*](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/api::PlatformListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/VersionHistory)** +* **[Hub](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/VersionHistory)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::CallBuilder) -* **[Resources](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::CallBuilder) +* **[Resources](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Part)** + * **[Parts](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -125,17 +125,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -145,29 +145,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Delegate) to the -[Method Builder](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Delegate) to the +[Method Builder](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::RequestValue) and -[decodable](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::RequestValue) and +[decodable](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-versionhistory1/5.0.2-beta-1+20230123/google_versionhistory1/client::RequestValue) are moved +* [request values](https://docs.rs/google-versionhistory1/5.0.2+20230123/google_versionhistory1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/versionhistory1/src/api.rs b/gen/versionhistory1/src/api.rs index 7787c88e0d..cc24fde2e3 100644 --- a/gen/versionhistory1/src/api.rs +++ b/gen/versionhistory1/src/api.rs @@ -101,7 +101,7 @@ impl<'a, S> VersionHistory { VersionHistory { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://versionhistory.googleapis.com/".to_string(), _root_url: "https://versionhistory.googleapis.com/".to_string(), } @@ -112,7 +112,7 @@ impl<'a, S> VersionHistory { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/versionhistory1/src/client.rs b/gen/versionhistory1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/versionhistory1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/versionhistory1/src/lib.rs b/gen/versionhistory1/src/lib.rs index ece19a7bd0..ffbb8671c3 100644 --- a/gen/versionhistory1/src/lib.rs +++ b/gen/versionhistory1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Version History* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *versionhistory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Version History* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *versionhistory:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Version History* *v1* API can be found at the //! [official documentation site](https://developers.chrome.com/versionhistory). diff --git a/gen/videointelligence1-cli/Cargo.toml b/gen/videointelligence1-cli/Cargo.toml index 719cc86ce9..2d0fdf33d0 100644 --- a/gen/videointelligence1-cli/Cargo.toml +++ b/gen/videointelligence1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-videointelligence1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Video Intelligence (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/videointelligence1-cli" @@ -20,13 +20,13 @@ name = "videointelligence1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-videointelligence1] path = "../videointelligence1" -version = "4.0.1+20220225" +version = "5.0.2+20230119" + diff --git a/gen/videointelligence1-cli/README.md b/gen/videointelligence1-cli/README.md index d0a1ac8f44..62f6121aee 100644 --- a/gen/videointelligence1-cli/README.md +++ b/gen/videointelligence1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Video Intelligence* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Video Intelligence* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash videointelligence1 [options] diff --git a/gen/videointelligence1-cli/mkdocs.yml b/gen/videointelligence1-cli/mkdocs.yml index 36dee4738a..6784245301 100644 --- a/gen/videointelligence1-cli/mkdocs.yml +++ b/gen/videointelligence1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Video Intelligence v4.0.1+20220225 +site_name: Cloud Video Intelligence v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-videointelligence1-cli site_description: A complete library to interact with Cloud Video Intelligence (protocol v1) @@ -7,16 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/videointelligenc docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['operations_projects-locations-operations-cancel.md', 'Operations', 'Projects Locations Operations Cancel'] -- ['operations_projects-locations-operations-delete.md', 'Operations', 'Projects Locations Operations Delete'] -- ['operations_projects-locations-operations-get.md', 'Operations', 'Projects Locations Operations Get'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['videos_annotate.md', 'Videos', 'Annotate'] +nav: +- Home: 'index.md' +- 'Operations': + - 'Projects Locations Operations Cancel': 'operations_projects-locations-operations-cancel.md' + - 'Projects Locations Operations Delete': 'operations_projects-locations-operations-delete.md' + - 'Projects Locations Operations Get': 'operations_projects-locations-operations-get.md' +- 'Projects': + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' +- 'Videos': + - 'Annotate': 'videos_annotate.md' theme: readthedocs diff --git a/gen/videointelligence1-cli/src/client.rs b/gen/videointelligence1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/videointelligence1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/videointelligence1-cli/src/main.rs b/gen/videointelligence1-cli/src/main.rs index 8bdbc8c9d2..e878fb6a92 100644 --- a/gen/videointelligence1-cli/src/main.rs +++ b/gen/videointelligence1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_videointelligence1::{api, Error, oauth2}; +use google_videointelligence1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -405,7 +404,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -891,7 +890,7 @@ async fn main() { let mut app = App::new("videointelligence1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230119") .about("Detects objects, explicit content, and scene changes in videos. It also specifies the region for annotation and transcribes speech to text. Supports both asynchronous API and streaming API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_videointelligence1_cli") .arg(Arg::with_name("url") diff --git a/gen/videointelligence1/Cargo.toml b/gen/videointelligence1/Cargo.toml index 2f942f149d..2a492176ab 100644 --- a/gen/videointelligence1/Cargo.toml +++ b/gen/videointelligence1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-videointelligence1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Video Intelligence (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/videointelligence1" homepage = "https://cloud.google.com/video-intelligence/docs/" -documentation = "https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-videointelligence1/5.0.2+20230119" license = "MIT" keywords = ["videointelligence", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/videointelligence1/README.md b/gen/videointelligence1/README.md index 91b875dff4..98002e2da2 100644 --- a/gen/videointelligence1/README.md +++ b/gen/videointelligence1/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-videointelligence1` library allows access to all features of the *Google Cloud Video Intelligence* service. -This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *videointelligence:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *videointelligence:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Video Intelligence* *v1* API can be found at the [official documentation site](https://cloud.google.com/video-intelligence/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/CloudVideoIntelligence) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/CloudVideoIntelligence) ... * operations - * [*projects locations operations cancel*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::OperationProjectLocationOperationCancelCall), [*projects locations operations delete*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::OperationProjectLocationOperationDeleteCall) and [*projects locations operations get*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::OperationProjectLocationOperationGetCall) + * [*projects locations operations cancel*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::OperationProjectLocationOperationCancelCall), [*projects locations operations delete*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::OperationProjectLocationOperationDeleteCall) and [*projects locations operations get*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::OperationProjectLocationOperationGetCall) * projects - * [*locations operations cancel*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::ProjectLocationOperationListCall) + * [*locations operations cancel*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::ProjectLocationOperationGetCall) and [*locations operations list*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::ProjectLocationOperationListCall) * videos - * [*annotate*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/api::VideoAnnotateCall) + * [*annotate*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/api::VideoAnnotateCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/CloudVideoIntelligence)** +* **[Hub](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/CloudVideoIntelligence)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::CallBuilder) -* **[Resources](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::CallBuilder) +* **[Resources](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Part)** + * **[Parts](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Delegate) to the -[Method Builder](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Delegate) to the +[Method Builder](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::RequestValue) and -[decodable](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::RequestValue) and +[decodable](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-videointelligence1/5.0.2-beta-1+20230119/google_videointelligence1/client::RequestValue) are moved +* [request values](https://docs.rs/google-videointelligence1/5.0.2+20230119/google_videointelligence1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/videointelligence1/src/api.rs b/gen/videointelligence1/src/api.rs index a80cbdf50a..63bcaa6f06 100644 --- a/gen/videointelligence1/src/api.rs +++ b/gen/videointelligence1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudVideoIntelligence { CloudVideoIntelligence { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://videointelligence.googleapis.com/".to_string(), _root_url: "https://videointelligence.googleapis.com/".to_string(), } @@ -142,7 +142,7 @@ impl<'a, S> CloudVideoIntelligence { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/videointelligence1/src/client.rs b/gen/videointelligence1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/videointelligence1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/videointelligence1/src/lib.rs b/gen/videointelligence1/src/lib.rs index 519ab9c928..1fe59105cf 100644 --- a/gen/videointelligence1/src/lib.rs +++ b/gen/videointelligence1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *videointelligence:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *videointelligence:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Video Intelligence* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/video-intelligence/docs/). diff --git a/gen/videointelligence1_beta1-cli/Cargo.toml b/gen/videointelligence1_beta1-cli/Cargo.toml index 3f0005bdbe..1fd0388fbb 100644 --- a/gen/videointelligence1_beta1-cli/Cargo.toml +++ b/gen/videointelligence1_beta1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-videointelligence1_beta1-cli" -version = "4.0.1+20171122" +version = "5.0.2+20171122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Video Intelligence (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/videointelligence1_beta1-cli" @@ -20,13 +20,13 @@ name = "videointelligence1-beta1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-videointelligence1_beta1] path = "../videointelligence1_beta1" -version = "4.0.1+20171122" +version = "5.0.2+20171122" + diff --git a/gen/videointelligence1_beta1-cli/README.md b/gen/videointelligence1_beta1-cli/README.md index bdd13f5576..fc6be2662f 100644 --- a/gen/videointelligence1_beta1-cli/README.md +++ b/gen/videointelligence1_beta1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Cloud Video Intelligence* API at revision *20171122*. The CLI is at version *4.0.1*. +This documentation was generated from the *Cloud Video Intelligence* API at revision *20171122*. The CLI is at version *5.0.2*. ```bash videointelligence1-beta1 [options] diff --git a/gen/videointelligence1_beta1-cli/mkdocs.yml b/gen/videointelligence1_beta1-cli/mkdocs.yml index 632ec03a75..d39aecb7cf 100644 --- a/gen/videointelligence1_beta1-cli/mkdocs.yml +++ b/gen/videointelligence1_beta1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Cloud Video Intelligence v4.0.1+20171122 +site_name: Cloud Video Intelligence v5.0.2+20171122 site_url: http://byron.github.io/google-apis-rs/google-videointelligence1_beta1-cli site_description: A complete library to interact with Cloud Video Intelligence (protocol v1beta1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/videointelligenc docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['videos_annotate.md', 'Videos', 'Annotate'] +nav: +- Home: 'index.md' +- 'Videos': + - 'Annotate': 'videos_annotate.md' theme: readthedocs diff --git a/gen/videointelligence1_beta1-cli/src/client.rs b/gen/videointelligence1_beta1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/videointelligence1_beta1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/videointelligence1_beta1-cli/src/main.rs b/gen/videointelligence1_beta1-cli/src/main.rs index 2ab2faca6c..cc9bb5f980 100644 --- a/gen/videointelligence1_beta1-cli/src/main.rs +++ b/gen/videointelligence1_beta1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_videointelligence1_beta1::{api, Error, oauth2}; +use google_videointelligence1_beta1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -267,7 +266,7 @@ async fn main() { let mut app = App::new("videointelligence1-beta1") .author("Sebastian Thiel ") - .version("4.0.1+20171122") + .version("5.0.2+20171122") .about("Cloud Video Intelligence API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_videointelligence1_beta1_cli") .arg(Arg::with_name("url") diff --git a/gen/videointelligence1_beta1/Cargo.toml b/gen/videointelligence1_beta1/Cargo.toml index b17b2c55a5..3d53bb209e 100644 --- a/gen/videointelligence1_beta1/Cargo.toml +++ b/gen/videointelligence1_beta1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-videointelligence1_beta1" -version = "5.0.2-beta-1+20171122" +version = "5.0.2+20171122" authors = ["Sebastian Thiel "] description = "A complete library to interact with Cloud Video Intelligence (protocol v1beta1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/videointelligence1_beta1" homepage = "https://cloud.google.com/video-intelligence/docs/" -documentation = "https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122" +documentation = "https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122" license = "MIT" keywords = ["videointelligence", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/videointelligence1_beta1/README.md b/gen/videointelligence1_beta1/README.md index cfcb10ebd5..163837de03 100644 --- a/gen/videointelligence1_beta1/README.md +++ b/gen/videointelligence1_beta1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-videointelligence1_beta1` library allows access to all features of the *Google Cloud Video Intelligence* service. -This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2-beta-1+20171122*, where *20171122* is the exact revision of the *videointelligence:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2+20171122*, where *20171122* is the exact revision of the *videointelligence:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Cloud Video Intelligence* *v1_beta1* API can be found at the [official documentation site](https://cloud.google.com/video-intelligence/docs/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/CloudVideoIntelligence) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/CloudVideoIntelligence) ... * videos - * [*annotate*](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/api::VideoAnnotateCall) + * [*annotate*](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/api::VideoAnnotateCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/CloudVideoIntelligence)** +* **[Hub](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/CloudVideoIntelligence)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::CallBuilder) -* **[Resources](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::CallBuilder) +* **[Resources](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Part)** + * **[Parts](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -124,17 +124,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -144,29 +144,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Delegate) to the -[Method Builder](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Delegate) to the +[Method Builder](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::RequestValue) and -[decodable](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::RequestValue) and +[decodable](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-videointelligence1_beta1/5.0.2-beta-1+20171122/google_videointelligence1_beta1/client::RequestValue) are moved +* [request values](https://docs.rs/google-videointelligence1_beta1/5.0.2+20171122/google_videointelligence1_beta1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/videointelligence1_beta1/src/api.rs b/gen/videointelligence1_beta1/src/api.rs index b0a4d9bd41..75054ef206 100644 --- a/gen/videointelligence1_beta1/src/api.rs +++ b/gen/videointelligence1_beta1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> CloudVideoIntelligence { CloudVideoIntelligence { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://videointelligence.googleapis.com/".to_string(), _root_url: "https://videointelligence.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> CloudVideoIntelligence { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/videointelligence1_beta1/src/client.rs b/gen/videointelligence1_beta1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/videointelligence1_beta1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/videointelligence1_beta1/src/lib.rs b/gen/videointelligence1_beta1/src/lib.rs index e5990ac0fe..0dc5ef16a4 100644 --- a/gen/videointelligence1_beta1/src/lib.rs +++ b/gen/videointelligence1_beta1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2-beta-1+20171122*, where *20171122* is the exact revision of the *videointelligence:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Cloud Video Intelligence* crate version *5.0.2+20171122*, where *20171122* is the exact revision of the *videointelligence:v1beta1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Cloud Video Intelligence* *v1_beta1* API can be found at the //! [official documentation site](https://cloud.google.com/video-intelligence/docs/). diff --git a/gen/vision1-cli/Cargo.toml b/gen/vision1-cli/Cargo.toml index 60b002ab2f..3016ecadfe 100644 --- a/gen/vision1-cli/Cargo.toml +++ b/gen/vision1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-vision1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Vision (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vision1-cli" @@ -20,13 +20,13 @@ name = "vision1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-vision1] path = "../vision1" -version = "4.0.1+20220225" +version = "5.0.2+20230113" + diff --git a/gen/vision1-cli/README.md b/gen/vision1-cli/README.md index 25e75d6309..cdaa4daf22 100644 --- a/gen/vision1-cli/README.md +++ b/gen/vision1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Vision* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *Vision* API at revision *20230113*. The CLI is at version *5.0.2*. ```bash vision1 [options] diff --git a/gen/vision1-cli/mkdocs.yml b/gen/vision1-cli/mkdocs.yml index 8b90d67170..e28520bda8 100644 --- a/gen/vision1-cli/mkdocs.yml +++ b/gen/vision1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Vision v4.0.1+20220225 +site_name: Vision v5.0.2+20230113 site_url: http://byron.github.io/google-apis-rs/google-vision1-cli site_description: A complete library to interact with Vision (protocol v1) @@ -7,46 +7,51 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/vision1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['files_annotate.md', 'Files', 'Annotate'] -- ['files_async-batch-annotate.md', 'Files', 'Async Batch Annotate'] -- ['images_annotate.md', 'Images', 'Annotate'] -- ['images_async-batch-annotate.md', 'Images', 'Async Batch Annotate'] -- ['locations_operations-get.md', 'Locations', 'Operations Get'] -- ['operations_cancel.md', 'Operations', 'Cancel'] -- ['operations_delete.md', 'Operations', 'Delete'] -- ['operations_get.md', 'Operations', 'Get'] -- ['operations_list.md', 'Operations', 'List'] -- ['projects_files-annotate.md', 'Projects', 'Files Annotate'] -- ['projects_files-async-batch-annotate.md', 'Projects', 'Files Async Batch Annotate'] -- ['projects_images-annotate.md', 'Projects', 'Images Annotate'] -- ['projects_images-async-batch-annotate.md', 'Projects', 'Images Async Batch Annotate'] -- ['projects_locations-files-annotate.md', 'Projects', 'Locations Files Annotate'] -- ['projects_locations-files-async-batch-annotate.md', 'Projects', 'Locations Files Async Batch Annotate'] -- ['projects_locations-images-annotate.md', 'Projects', 'Locations Images Annotate'] -- ['projects_locations-images-async-batch-annotate.md', 'Projects', 'Locations Images Async Batch Annotate'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-product-sets-add-product.md', 'Projects', 'Locations Product Sets Add Product'] -- ['projects_locations-product-sets-create.md', 'Projects', 'Locations Product Sets Create'] -- ['projects_locations-product-sets-delete.md', 'Projects', 'Locations Product Sets Delete'] -- ['projects_locations-product-sets-get.md', 'Projects', 'Locations Product Sets Get'] -- ['projects_locations-product-sets-import.md', 'Projects', 'Locations Product Sets Import'] -- ['projects_locations-product-sets-list.md', 'Projects', 'Locations Product Sets List'] -- ['projects_locations-product-sets-patch.md', 'Projects', 'Locations Product Sets Patch'] -- ['projects_locations-product-sets-products-list.md', 'Projects', 'Locations Product Sets Products List'] -- ['projects_locations-product-sets-remove-product.md', 'Projects', 'Locations Product Sets Remove Product'] -- ['projects_locations-products-create.md', 'Projects', 'Locations Products Create'] -- ['projects_locations-products-delete.md', 'Projects', 'Locations Products Delete'] -- ['projects_locations-products-get.md', 'Projects', 'Locations Products Get'] -- ['projects_locations-products-list.md', 'Projects', 'Locations Products List'] -- ['projects_locations-products-patch.md', 'Projects', 'Locations Products Patch'] -- ['projects_locations-products-purge.md', 'Projects', 'Locations Products Purge'] -- ['projects_locations-products-reference-images-create.md', 'Projects', 'Locations Products Reference Images Create'] -- ['projects_locations-products-reference-images-delete.md', 'Projects', 'Locations Products Reference Images Delete'] -- ['projects_locations-products-reference-images-get.md', 'Projects', 'Locations Products Reference Images Get'] -- ['projects_locations-products-reference-images-list.md', 'Projects', 'Locations Products Reference Images List'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] +nav: +- Home: 'index.md' +- 'Files': + - 'Annotate': 'files_annotate.md' + - 'Async Batch Annotate': 'files_async-batch-annotate.md' +- 'Images': + - 'Annotate': 'images_annotate.md' + - 'Async Batch Annotate': 'images_async-batch-annotate.md' +- 'Locations': + - 'Operations Get': 'locations_operations-get.md' +- 'Operations': + - 'Cancel': 'operations_cancel.md' + - 'Delete': 'operations_delete.md' + - 'Get': 'operations_get.md' + - 'List': 'operations_list.md' +- 'Projects': + - 'Files Annotate': 'projects_files-annotate.md' + - 'Files Async Batch Annotate': 'projects_files-async-batch-annotate.md' + - 'Images Annotate': 'projects_images-annotate.md' + - 'Images Async Batch Annotate': 'projects_images-async-batch-annotate.md' + - 'Locations Files Annotate': 'projects_locations-files-annotate.md' + - 'Locations Files Async Batch Annotate': 'projects_locations-files-async-batch-annotate.md' + - 'Locations Images Annotate': 'projects_locations-images-annotate.md' + - 'Locations Images Async Batch Annotate': 'projects_locations-images-async-batch-annotate.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Product Sets Add Product': 'projects_locations-product-sets-add-product.md' + - 'Locations Product Sets Create': 'projects_locations-product-sets-create.md' + - 'Locations Product Sets Delete': 'projects_locations-product-sets-delete.md' + - 'Locations Product Sets Get': 'projects_locations-product-sets-get.md' + - 'Locations Product Sets Import': 'projects_locations-product-sets-import.md' + - 'Locations Product Sets List': 'projects_locations-product-sets-list.md' + - 'Locations Product Sets Patch': 'projects_locations-product-sets-patch.md' + - 'Locations Product Sets Products List': 'projects_locations-product-sets-products-list.md' + - 'Locations Product Sets Remove Product': 'projects_locations-product-sets-remove-product.md' + - 'Locations Products Create': 'projects_locations-products-create.md' + - 'Locations Products Delete': 'projects_locations-products-delete.md' + - 'Locations Products Get': 'projects_locations-products-get.md' + - 'Locations Products List': 'projects_locations-products-list.md' + - 'Locations Products Patch': 'projects_locations-products-patch.md' + - 'Locations Products Purge': 'projects_locations-products-purge.md' + - 'Locations Products Reference Images Create': 'projects_locations-products-reference-images-create.md' + - 'Locations Products Reference Images Delete': 'projects_locations-products-reference-images-delete.md' + - 'Locations Products Reference Images Get': 'projects_locations-products-reference-images-get.md' + - 'Locations Products Reference Images List': 'projects_locations-products-reference-images-list.md' + - 'Operations Get': 'projects_operations-get.md' theme: readthedocs diff --git a/gen/vision1-cli/src/client.rs b/gen/vision1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/vision1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/vision1-cli/src/main.rs b/gen/vision1-cli/src/main.rs index 64132063c5..cba5fc2bac 100644 --- a/gen/vision1-cli/src/main.rs +++ b/gen/vision1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_vision1::{api, Error, oauth2}; +use google_vision1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -643,7 +642,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -1808,7 +1807,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1901,7 +1900,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -1960,7 +1959,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2300,7 +2299,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -2392,7 +2391,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -2732,7 +2731,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -4028,7 +4027,7 @@ async fn main() { let mut app = App::new("vision1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") + .version("5.0.2+20230113") .about("Integrates Google Vision features, including image labeling, face, logo, and landmark detection, optical character recognition (OCR), and detection of explicit content, into applications.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_vision1_cli") .arg(Arg::with_name("url") diff --git a/gen/vision1/Cargo.toml b/gen/vision1/Cargo.toml index 242a6298d0..68f8c62785 100644 --- a/gen/vision1/Cargo.toml +++ b/gen/vision1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-vision1" -version = "5.0.2-beta-1+20230113" +version = "5.0.2+20230113" authors = ["Sebastian Thiel "] description = "A complete library to interact with Vision (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vision1" homepage = "https://cloud.google.com/vision/" -documentation = "https://docs.rs/google-vision1/5.0.2-beta-1+20230113" +documentation = "https://docs.rs/google-vision1/5.0.2+20230113" license = "MIT" keywords = ["vision", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/vision1/README.md b/gen/vision1/README.md index c2045e2840..deac045f66 100644 --- a/gen/vision1/README.md +++ b/gen/vision1/README.md @@ -5,24 +5,24 @@ DO NOT EDIT ! --> The `google-vision1` library allows access to all features of the *Google Vision* service. -This documentation was generated from *Vision* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *vision:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Vision* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *vision:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Vision* *v1* API can be found at the [official documentation site](https://cloud.google.com/vision/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/Vision) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/Vision) ... * files - * [*annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::FileAnnotateCall) and [*async batch annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::FileAsyncBatchAnnotateCall) -* [images](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::Image) - * [*annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ImageAnnotateCall) and [*async batch annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ImageAsyncBatchAnnotateCall) + * [*annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::FileAnnotateCall) and [*async batch annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::FileAsyncBatchAnnotateCall) +* [images](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::Image) + * [*annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ImageAnnotateCall) and [*async batch annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ImageAsyncBatchAnnotateCall) * locations - * [*operations get*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::LocationOperationGetCall) -* [operations](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::Operation) - * [*cancel*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::OperationCancelCall), [*delete*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::OperationDeleteCall), [*get*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::OperationGetCall) and [*list*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::OperationListCall) + * [*operations get*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::LocationOperationGetCall) +* [operations](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::Operation) + * [*cancel*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::OperationCancelCall), [*delete*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::OperationDeleteCall), [*get*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::OperationGetCall) and [*list*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::OperationListCall) * projects - * [*files annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectFileAnnotateCall), [*files async batch annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectFileAsyncBatchAnnotateCall), [*images annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectImageAnnotateCall), [*images async batch annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectImageAsyncBatchAnnotateCall), [*locations files annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationFileAnnotateCall), [*locations files async batch annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationFileAsyncBatchAnnotateCall), [*locations images annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationImageAnnotateCall), [*locations images async batch annotate*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationImageAsyncBatchAnnotateCall), [*locations operations get*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationOperationGetCall), [*locations product sets add product*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetAddProductCall), [*locations product sets create*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetCreateCall), [*locations product sets delete*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetDeleteCall), [*locations product sets get*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetGetCall), [*locations product sets import*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetImportCall), [*locations product sets list*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetListCall), [*locations product sets patch*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetPatchCall), [*locations product sets products list*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetProductListCall), [*locations product sets remove product*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductSetRemoveProductCall), [*locations products create*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductCreateCall), [*locations products delete*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductDeleteCall), [*locations products get*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductGetCall), [*locations products list*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductListCall), [*locations products patch*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductPatchCall), [*locations products purge*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductPurgeCall), [*locations products reference images create*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductReferenceImageCreateCall), [*locations products reference images delete*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductReferenceImageDeleteCall), [*locations products reference images get*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductReferenceImageGetCall), [*locations products reference images list*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectLocationProductReferenceImageListCall) and [*operations get*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/api::ProjectOperationGetCall) + * [*files annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectFileAnnotateCall), [*files async batch annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectFileAsyncBatchAnnotateCall), [*images annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectImageAnnotateCall), [*images async batch annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectImageAsyncBatchAnnotateCall), [*locations files annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationFileAnnotateCall), [*locations files async batch annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationFileAsyncBatchAnnotateCall), [*locations images annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationImageAnnotateCall), [*locations images async batch annotate*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationImageAsyncBatchAnnotateCall), [*locations operations get*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationOperationGetCall), [*locations product sets add product*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetAddProductCall), [*locations product sets create*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetCreateCall), [*locations product sets delete*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetDeleteCall), [*locations product sets get*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetGetCall), [*locations product sets import*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetImportCall), [*locations product sets list*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetListCall), [*locations product sets patch*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetPatchCall), [*locations product sets products list*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetProductListCall), [*locations product sets remove product*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductSetRemoveProductCall), [*locations products create*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductCreateCall), [*locations products delete*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductDeleteCall), [*locations products get*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductGetCall), [*locations products list*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductListCall), [*locations products patch*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductPatchCall), [*locations products purge*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductPurgeCall), [*locations products reference images create*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductReferenceImageCreateCall), [*locations products reference images delete*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductReferenceImageDeleteCall), [*locations products reference images get*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductReferenceImageGetCall), [*locations products reference images list*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectLocationProductReferenceImageListCall) and [*operations get*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/api::ProjectOperationGetCall) @@ -31,17 +31,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/Vision)** +* **[Hub](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/Vision)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::CallBuilder) -* **[Resources](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::CallBuilder) +* **[Resources](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Part)** + * **[Parts](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -143,17 +143,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -163,29 +163,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Delegate) to the -[Method Builder](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Delegate) to the +[Method Builder](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::RequestValue) and -[decodable](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::RequestValue) and +[decodable](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-vision1/5.0.2-beta-1+20230113/google_vision1/client::RequestValue) are moved +* [request values](https://docs.rs/google-vision1/5.0.2+20230113/google_vision1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/vision1/src/api.rs b/gen/vision1/src/api.rs index e5d46adcb4..35bf0d6769 100644 --- a/gen/vision1/src/api.rs +++ b/gen/vision1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Vision { Vision { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://vision.googleapis.com/".to_string(), _root_url: "https://vision.googleapis.com/".to_string(), } @@ -149,7 +149,7 @@ impl<'a, S> Vision { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/vision1/src/client.rs b/gen/vision1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/vision1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/vision1/src/lib.rs b/gen/vision1/src/lib.rs index 6ed6fb0817..43d21064e8 100644 --- a/gen/vision1/src/lib.rs +++ b/gen/vision1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Vision* crate version *5.0.2-beta-1+20230113*, where *20230113* is the exact revision of the *vision:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Vision* crate version *5.0.2+20230113*, where *20230113* is the exact revision of the *vision:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Vision* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/vision/). diff --git a/gen/vmmigration1-cli/Cargo.toml b/gen/vmmigration1-cli/Cargo.toml index 96a3afbe1f..dfef0fd18c 100644 --- a/gen/vmmigration1-cli/Cargo.toml +++ b/gen/vmmigration1-cli/Cargo.toml @@ -4,11 +4,11 @@ [package] name = "google-vmmigration1-cli" -version = "4.0.1+20220225" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with VM Migration Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vmmigration1-cli" -homepage = "https://cloud.google.com/migrate/compute-engine" +homepage = "https://cloud.google.com/migrate/virtual-machines" documentation = "http://byron.github.io/google-apis-rs/google_vmmigration1_cli" license = "MIT" keywords = ["vmmigration", "google", "cli"] @@ -20,13 +20,13 @@ name = "vmmigration1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-vmmigration1] path = "../vmmigration1" -version = "4.0.1+20220225" +version = "5.0.2+20230119" + diff --git a/gen/vmmigration1-cli/README.md b/gen/vmmigration1-cli/README.md index c212d075d5..4cef01b09a 100644 --- a/gen/vmmigration1-cli/README.md +++ b/gen/vmmigration1-cli/README.md @@ -11,7 +11,7 @@ capabilities. Errors will be printed to standard error, and cause the program's If data-structures are requested, these will be returned as pretty-printed JSON, to be useful as input to other tools. Everything else about the *VM Migration Service* API can be found at the -[official documentation site](https://cloud.google.com/migrate/compute-engine). +[official documentation site](https://cloud.google.com/migrate/virtual-machines). # Installation and Source Code @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *VM Migration Service* API at revision *20220225*. The CLI is at version *4.0.1*. +This documentation was generated from the *VM Migration Service* API at revision *20230119*. The CLI is at version *5.0.2*. ```bash vmmigration1 [options] @@ -68,6 +68,8 @@ vmmigration1 [options] locations-sources-migrating-vms-list [-p ]... [-o ] locations-sources-migrating-vms-patch (-r )... [-p ]... [-o ] locations-sources-migrating-vms-pause-migration (-r )... [-p ]... [-o ] + locations-sources-migrating-vms-replication-cycles-get [-p ]... [-o ] + locations-sources-migrating-vms-replication-cycles-list [-p ]... [-o ] locations-sources-migrating-vms-resume-migration (-r )... [-p ]... [-o ] locations-sources-migrating-vms-start-migration (-r )... [-p ]... [-o ] locations-sources-patch (-r )... [-p ]... [-o ] diff --git a/gen/vmmigration1-cli/mkdocs.yml b/gen/vmmigration1-cli/mkdocs.yml index 86328b51df..b437a170f9 100644 --- a/gen/vmmigration1-cli/mkdocs.yml +++ b/gen/vmmigration1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: VM Migration Service v4.0.1+20220225 +site_name: VM Migration Service v5.0.2+20230119 site_url: http://byron.github.io/google-apis-rs/google-vmmigration1-cli site_description: A complete library to interact with VM Migration Service (protocol v1) @@ -7,58 +7,61 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/vmmigration1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-groups-add-group-migration.md', 'Projects', 'Locations Groups Add Group Migration'] -- ['projects_locations-groups-create.md', 'Projects', 'Locations Groups Create'] -- ['projects_locations-groups-delete.md', 'Projects', 'Locations Groups Delete'] -- ['projects_locations-groups-get.md', 'Projects', 'Locations Groups Get'] -- ['projects_locations-groups-list.md', 'Projects', 'Locations Groups List'] -- ['projects_locations-groups-patch.md', 'Projects', 'Locations Groups Patch'] -- ['projects_locations-groups-remove-group-migration.md', 'Projects', 'Locations Groups Remove Group Migration'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-cancel.md', 'Projects', 'Locations Operations Cancel'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-sources-create.md', 'Projects', 'Locations Sources Create'] -- ['projects_locations-sources-datacenter-connectors-create.md', 'Projects', 'Locations Sources Datacenter Connectors Create'] -- ['projects_locations-sources-datacenter-connectors-delete.md', 'Projects', 'Locations Sources Datacenter Connectors Delete'] -- ['projects_locations-sources-datacenter-connectors-get.md', 'Projects', 'Locations Sources Datacenter Connectors Get'] -- ['projects_locations-sources-datacenter-connectors-list.md', 'Projects', 'Locations Sources Datacenter Connectors List'] -- ['projects_locations-sources-datacenter-connectors-upgrade-appliance.md', 'Projects', 'Locations Sources Datacenter Connectors Upgrade Appliance'] -- ['projects_locations-sources-delete.md', 'Projects', 'Locations Sources Delete'] -- ['projects_locations-sources-fetch-inventory.md', 'Projects', 'Locations Sources Fetch Inventory'] -- ['projects_locations-sources-get.md', 'Projects', 'Locations Sources Get'] -- ['projects_locations-sources-list.md', 'Projects', 'Locations Sources List'] -- ['projects_locations-sources-migrating-vms-clone-jobs-cancel.md', 'Projects', 'Locations Sources Migrating Vms Clone Jobs Cancel'] -- ['projects_locations-sources-migrating-vms-clone-jobs-create.md', 'Projects', 'Locations Sources Migrating Vms Clone Jobs Create'] -- ['projects_locations-sources-migrating-vms-clone-jobs-get.md', 'Projects', 'Locations Sources Migrating Vms Clone Jobs Get'] -- ['projects_locations-sources-migrating-vms-clone-jobs-list.md', 'Projects', 'Locations Sources Migrating Vms Clone Jobs List'] -- ['projects_locations-sources-migrating-vms-create.md', 'Projects', 'Locations Sources Migrating Vms Create'] -- ['projects_locations-sources-migrating-vms-cutover-jobs-cancel.md', 'Projects', 'Locations Sources Migrating Vms Cutover Jobs Cancel'] -- ['projects_locations-sources-migrating-vms-cutover-jobs-create.md', 'Projects', 'Locations Sources Migrating Vms Cutover Jobs Create'] -- ['projects_locations-sources-migrating-vms-cutover-jobs-get.md', 'Projects', 'Locations Sources Migrating Vms Cutover Jobs Get'] -- ['projects_locations-sources-migrating-vms-cutover-jobs-list.md', 'Projects', 'Locations Sources Migrating Vms Cutover Jobs List'] -- ['projects_locations-sources-migrating-vms-delete.md', 'Projects', 'Locations Sources Migrating Vms Delete'] -- ['projects_locations-sources-migrating-vms-finalize-migration.md', 'Projects', 'Locations Sources Migrating Vms Finalize Migration'] -- ['projects_locations-sources-migrating-vms-get.md', 'Projects', 'Locations Sources Migrating Vms Get'] -- ['projects_locations-sources-migrating-vms-list.md', 'Projects', 'Locations Sources Migrating Vms List'] -- ['projects_locations-sources-migrating-vms-patch.md', 'Projects', 'Locations Sources Migrating Vms Patch'] -- ['projects_locations-sources-migrating-vms-pause-migration.md', 'Projects', 'Locations Sources Migrating Vms Pause Migration'] -- ['projects_locations-sources-migrating-vms-resume-migration.md', 'Projects', 'Locations Sources Migrating Vms Resume Migration'] -- ['projects_locations-sources-migrating-vms-start-migration.md', 'Projects', 'Locations Sources Migrating Vms Start Migration'] -- ['projects_locations-sources-patch.md', 'Projects', 'Locations Sources Patch'] -- ['projects_locations-sources-utilization-reports-create.md', 'Projects', 'Locations Sources Utilization Reports Create'] -- ['projects_locations-sources-utilization-reports-delete.md', 'Projects', 'Locations Sources Utilization Reports Delete'] -- ['projects_locations-sources-utilization-reports-get.md', 'Projects', 'Locations Sources Utilization Reports Get'] -- ['projects_locations-sources-utilization-reports-list.md', 'Projects', 'Locations Sources Utilization Reports List'] -- ['projects_locations-target-projects-create.md', 'Projects', 'Locations Target Projects Create'] -- ['projects_locations-target-projects-delete.md', 'Projects', 'Locations Target Projects Delete'] -- ['projects_locations-target-projects-get.md', 'Projects', 'Locations Target Projects Get'] -- ['projects_locations-target-projects-list.md', 'Projects', 'Locations Target Projects List'] -- ['projects_locations-target-projects-patch.md', 'Projects', 'Locations Target Projects Patch'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations Groups Add Group Migration': 'projects_locations-groups-add-group-migration.md' + - 'Locations Groups Create': 'projects_locations-groups-create.md' + - 'Locations Groups Delete': 'projects_locations-groups-delete.md' + - 'Locations Groups Get': 'projects_locations-groups-get.md' + - 'Locations Groups List': 'projects_locations-groups-list.md' + - 'Locations Groups Patch': 'projects_locations-groups-patch.md' + - 'Locations Groups Remove Group Migration': 'projects_locations-groups-remove-group-migration.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Cancel': 'projects_locations-operations-cancel.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Sources Create': 'projects_locations-sources-create.md' + - 'Locations Sources Datacenter Connectors Create': 'projects_locations-sources-datacenter-connectors-create.md' + - 'Locations Sources Datacenter Connectors Delete': 'projects_locations-sources-datacenter-connectors-delete.md' + - 'Locations Sources Datacenter Connectors Get': 'projects_locations-sources-datacenter-connectors-get.md' + - 'Locations Sources Datacenter Connectors List': 'projects_locations-sources-datacenter-connectors-list.md' + - 'Locations Sources Datacenter Connectors Upgrade Appliance': 'projects_locations-sources-datacenter-connectors-upgrade-appliance.md' + - 'Locations Sources Delete': 'projects_locations-sources-delete.md' + - 'Locations Sources Fetch Inventory': 'projects_locations-sources-fetch-inventory.md' + - 'Locations Sources Get': 'projects_locations-sources-get.md' + - 'Locations Sources List': 'projects_locations-sources-list.md' + - 'Locations Sources Migrating Vms Clone Jobs Cancel': 'projects_locations-sources-migrating-vms-clone-jobs-cancel.md' + - 'Locations Sources Migrating Vms Clone Jobs Create': 'projects_locations-sources-migrating-vms-clone-jobs-create.md' + - 'Locations Sources Migrating Vms Clone Jobs Get': 'projects_locations-sources-migrating-vms-clone-jobs-get.md' + - 'Locations Sources Migrating Vms Clone Jobs List': 'projects_locations-sources-migrating-vms-clone-jobs-list.md' + - 'Locations Sources Migrating Vms Create': 'projects_locations-sources-migrating-vms-create.md' + - 'Locations Sources Migrating Vms Cutover Jobs Cancel': 'projects_locations-sources-migrating-vms-cutover-jobs-cancel.md' + - 'Locations Sources Migrating Vms Cutover Jobs Create': 'projects_locations-sources-migrating-vms-cutover-jobs-create.md' + - 'Locations Sources Migrating Vms Cutover Jobs Get': 'projects_locations-sources-migrating-vms-cutover-jobs-get.md' + - 'Locations Sources Migrating Vms Cutover Jobs List': 'projects_locations-sources-migrating-vms-cutover-jobs-list.md' + - 'Locations Sources Migrating Vms Delete': 'projects_locations-sources-migrating-vms-delete.md' + - 'Locations Sources Migrating Vms Finalize Migration': 'projects_locations-sources-migrating-vms-finalize-migration.md' + - 'Locations Sources Migrating Vms Get': 'projects_locations-sources-migrating-vms-get.md' + - 'Locations Sources Migrating Vms List': 'projects_locations-sources-migrating-vms-list.md' + - 'Locations Sources Migrating Vms Patch': 'projects_locations-sources-migrating-vms-patch.md' + - 'Locations Sources Migrating Vms Pause Migration': 'projects_locations-sources-migrating-vms-pause-migration.md' + - 'Locations Sources Migrating Vms Replication Cycles Get': 'projects_locations-sources-migrating-vms-replication-cycles-get.md' + - 'Locations Sources Migrating Vms Replication Cycles List': 'projects_locations-sources-migrating-vms-replication-cycles-list.md' + - 'Locations Sources Migrating Vms Resume Migration': 'projects_locations-sources-migrating-vms-resume-migration.md' + - 'Locations Sources Migrating Vms Start Migration': 'projects_locations-sources-migrating-vms-start-migration.md' + - 'Locations Sources Patch': 'projects_locations-sources-patch.md' + - 'Locations Sources Utilization Reports Create': 'projects_locations-sources-utilization-reports-create.md' + - 'Locations Sources Utilization Reports Delete': 'projects_locations-sources-utilization-reports-delete.md' + - 'Locations Sources Utilization Reports Get': 'projects_locations-sources-utilization-reports-get.md' + - 'Locations Sources Utilization Reports List': 'projects_locations-sources-utilization-reports-list.md' + - 'Locations Target Projects Create': 'projects_locations-target-projects-create.md' + - 'Locations Target Projects Delete': 'projects_locations-target-projects-delete.md' + - 'Locations Target Projects Get': 'projects_locations-target-projects-get.md' + - 'Locations Target Projects List': 'projects_locations-target-projects-list.md' + - 'Locations Target Projects Patch': 'projects_locations-target-projects-patch.md' theme: readthedocs diff --git a/gen/vmmigration1-cli/src/client.rs b/gen/vmmigration1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/vmmigration1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/vmmigration1-cli/src/main.rs b/gen/vmmigration1-cli/src/main.rs index 475f238dee..7784dc51d6 100644 --- a/gen/vmmigration1-cli/src/main.rs +++ b/gen/vmmigration1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_vmmigration1::{api, Error, oauth2}; +use google_vmmigration1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -402,7 +401,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -501,7 +500,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -648,7 +647,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -898,7 +897,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -973,6 +972,15 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "aws.access-key-creds.access-key-id" => Some(("aws.accessKeyCreds.accessKeyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.access-key-creds.secret-access-key" => Some(("aws.accessKeyCreds.secretAccessKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.aws-region" => Some(("aws.awsRegion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.error.code" => Some(("aws.error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aws.error.message" => Some(("aws.error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.inventory-security-group-names" => Some(("aws.inventorySecurityGroupNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "aws.migration-resources-user-tags" => Some(("aws.migrationResourcesUserTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "aws.public-ip" => Some(("aws.publicIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.state" => Some(("aws.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -983,7 +991,7 @@ where "vmware.username" => Some(("vmware.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vmware.vcenter-ip" => Some(("vmware.vcenterIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "labels", "name", "password", "thumbprint", "update-time", "username", "vcenter-ip", "vmware"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access-key-creds", "access-key-id", "aws", "aws-region", "code", "create-time", "description", "error", "inventory-security-group-names", "labels", "message", "migration-resources-user-tags", "name", "password", "public-ip", "secret-access-key", "state", "thumbprint", "update-time", "username", "vcenter-ip", "vmware"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1286,7 +1294,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1488,8 +1496,14 @@ where for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, "force-refresh" => { - call = call.force_refresh(arg_from_str(value.unwrap_or("false"), err, "force-refresh", "boolean")); + call = call.force_refresh( value.map(|v| arg_from_str(v, err, "force-refresh", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -1504,7 +1518,7 @@ where err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); - v.extend(["force-refresh"].iter().map(|v|*v)); + v.extend(["force-refresh", "page-size", "page-token"].iter().map(|v|*v)); v } )); } } @@ -1600,7 +1614,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1770,6 +1784,7 @@ where "compute-engine-target-details.compute-scheduling.on-host-maintenance" => Some(("computeEngineTargetDetails.computeScheduling.onHostMaintenance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.compute-scheduling.restart-type" => Some(("computeEngineTargetDetails.computeScheduling.restartType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.disk-type" => Some(("computeEngineTargetDetails.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "compute-engine-target-details.hostname" => Some(("computeEngineTargetDetails.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.labels" => Some(("computeEngineTargetDetails.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "compute-engine-target-details.license-type" => Some(("computeEngineTargetDetails.licenseType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.machine-type" => Some(("computeEngineTargetDetails.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -1782,13 +1797,14 @@ where "compute-engine-target-details.vm-name" => Some(("computeEngineTargetDetails.vmName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.zone" => Some(("computeEngineTargetDetails.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "error.message" => Some(("error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state-time" => Some(("stateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "boot-option", "code", "compute-engine-target-details", "compute-scheduling", "create-time", "disk-type", "error", "labels", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "project", "restart-type", "secure-boot", "service-account", "state", "state-time", "type", "vm-name", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "boot-option", "code", "compute-engine-target-details", "compute-scheduling", "create-time", "disk-type", "end-time", "error", "hostname", "labels", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "project", "restart-type", "secure-boot", "service-account", "state", "state-time", "type", "vm-name", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -1917,7 +1933,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -1995,6 +2011,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "aws-source-vm-details.committed-storage-bytes" => Some(("awsSourceVmDetails.committedStorageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws-source-vm-details.firmware" => Some(("awsSourceVmDetails.firmware", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.additional-licenses" => Some(("computeEngineTargetDefaults.additionalLicenses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "compute-engine-target-defaults.applied-license.os-license" => Some(("computeEngineTargetDefaults.appliedLicense.osLicense", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.applied-license.type" => Some(("computeEngineTargetDefaults.appliedLicense.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2003,6 +2021,7 @@ where "compute-engine-target-defaults.compute-scheduling.on-host-maintenance" => Some(("computeEngineTargetDefaults.computeScheduling.onHostMaintenance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.compute-scheduling.restart-type" => Some(("computeEngineTargetDefaults.computeScheduling.restartType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.disk-type" => Some(("computeEngineTargetDefaults.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "compute-engine-target-defaults.hostname" => Some(("computeEngineTargetDefaults.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.labels" => Some(("computeEngineTargetDefaults.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "compute-engine-target-defaults.license-type" => Some(("computeEngineTargetDefaults.licenseType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.machine-type" => Some(("computeEngineTargetDefaults.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2015,8 +2034,15 @@ where "compute-engine-target-defaults.vm-name" => Some(("computeEngineTargetDefaults.vmName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.zone" => Some(("computeEngineTargetDefaults.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.cycle-number" => Some(("currentSyncInfo.cycleNumber", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "current-sync-info.end-time" => Some(("currentSyncInfo.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.error.code" => Some(("currentSyncInfo.error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "current-sync-info.error.message" => Some(("currentSyncInfo.error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.name" => Some(("currentSyncInfo.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "current-sync-info.progress-percent" => Some(("currentSyncInfo.progressPercent", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "current-sync-info.start-time" => Some(("currentSyncInfo.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.state" => Some(("currentSyncInfo.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.total-pause-duration" => Some(("currentSyncInfo.totalPauseDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2032,7 +2058,7 @@ where "state-time" => Some(("stateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "boot-option", "code", "compute-engine-target-defaults", "compute-scheduling", "create-time", "current-sync-info", "description", "disk-type", "display-name", "error", "group", "idle-duration", "labels", "last-sync", "last-sync-time", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "policy", "progress-percent", "restart-type", "secure-boot", "service-account", "skip-os-adaptation", "source-vm-id", "start-time", "state", "state-time", "target-project", "type", "update-time", "vm-name", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "aws-source-vm-details", "boot-option", "code", "committed-storage-bytes", "compute-engine-target-defaults", "compute-scheduling", "create-time", "current-sync-info", "cycle-number", "description", "disk-type", "display-name", "end-time", "error", "firmware", "group", "hostname", "idle-duration", "labels", "last-sync", "last-sync-time", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "policy", "progress-percent", "restart-type", "secure-boot", "service-account", "skip-os-adaptation", "source-vm-id", "start-time", "state", "state-time", "target-project", "total-pause-duration", "type", "update-time", "vm-name", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2214,6 +2240,7 @@ where "compute-engine-target-details.compute-scheduling.on-host-maintenance" => Some(("computeEngineTargetDetails.computeScheduling.onHostMaintenance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.compute-scheduling.restart-type" => Some(("computeEngineTargetDetails.computeScheduling.restartType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.disk-type" => Some(("computeEngineTargetDetails.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "compute-engine-target-details.hostname" => Some(("computeEngineTargetDetails.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.labels" => Some(("computeEngineTargetDetails.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "compute-engine-target-details.license-type" => Some(("computeEngineTargetDetails.licenseType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.machine-type" => Some(("computeEngineTargetDetails.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2226,6 +2253,7 @@ where "compute-engine-target-details.vm-name" => Some(("computeEngineTargetDetails.vmName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-details.zone" => Some(("computeEngineTargetDetails.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "error.message" => Some(("error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2234,7 +2262,7 @@ where "state-message" => Some(("stateMessage", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "state-time" => Some(("stateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "boot-option", "code", "compute-engine-target-details", "compute-scheduling", "create-time", "disk-type", "error", "labels", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "progress-percent", "project", "restart-type", "secure-boot", "service-account", "state", "state-message", "state-time", "type", "vm-name", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "boot-option", "code", "compute-engine-target-details", "compute-scheduling", "create-time", "disk-type", "end-time", "error", "hostname", "labels", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "progress-percent", "project", "restart-type", "secure-boot", "service-account", "state", "state-message", "state-time", "type", "vm-name", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2363,7 +2391,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2623,7 +2651,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -2701,6 +2729,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "aws-source-vm-details.committed-storage-bytes" => Some(("awsSourceVmDetails.committedStorageBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws-source-vm-details.firmware" => Some(("awsSourceVmDetails.firmware", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.additional-licenses" => Some(("computeEngineTargetDefaults.additionalLicenses", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "compute-engine-target-defaults.applied-license.os-license" => Some(("computeEngineTargetDefaults.appliedLicense.osLicense", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.applied-license.type" => Some(("computeEngineTargetDefaults.appliedLicense.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2709,6 +2739,7 @@ where "compute-engine-target-defaults.compute-scheduling.on-host-maintenance" => Some(("computeEngineTargetDefaults.computeScheduling.onHostMaintenance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.compute-scheduling.restart-type" => Some(("computeEngineTargetDefaults.computeScheduling.restartType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.disk-type" => Some(("computeEngineTargetDefaults.diskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "compute-engine-target-defaults.hostname" => Some(("computeEngineTargetDefaults.hostname", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.labels" => Some(("computeEngineTargetDefaults.labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), "compute-engine-target-defaults.license-type" => Some(("computeEngineTargetDefaults.licenseType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.machine-type" => Some(("computeEngineTargetDefaults.machineType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2721,8 +2752,15 @@ where "compute-engine-target-defaults.vm-name" => Some(("computeEngineTargetDefaults.vmName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "compute-engine-target-defaults.zone" => Some(("computeEngineTargetDefaults.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.cycle-number" => Some(("currentSyncInfo.cycleNumber", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "current-sync-info.end-time" => Some(("currentSyncInfo.endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.error.code" => Some(("currentSyncInfo.error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "current-sync-info.error.message" => Some(("currentSyncInfo.error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.name" => Some(("currentSyncInfo.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "current-sync-info.progress-percent" => Some(("currentSyncInfo.progressPercent", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "current-sync-info.start-time" => Some(("currentSyncInfo.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.state" => Some(("currentSyncInfo.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "current-sync-info.total-pause-duration" => Some(("currentSyncInfo.totalPauseDuration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -2738,7 +2776,7 @@ where "state-time" => Some(("stateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-time" => Some(("updateTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "boot-option", "code", "compute-engine-target-defaults", "compute-scheduling", "create-time", "current-sync-info", "description", "disk-type", "display-name", "error", "group", "idle-duration", "labels", "last-sync", "last-sync-time", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "policy", "progress-percent", "restart-type", "secure-boot", "service-account", "skip-os-adaptation", "source-vm-id", "start-time", "state", "state-time", "target-project", "type", "update-time", "vm-name", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["additional-licenses", "applied-license", "aws-source-vm-details", "boot-option", "code", "committed-storage-bytes", "compute-engine-target-defaults", "compute-scheduling", "create-time", "current-sync-info", "cycle-number", "description", "disk-type", "display-name", "end-time", "error", "firmware", "group", "hostname", "idle-duration", "labels", "last-sync", "last-sync-time", "license-type", "machine-type", "machine-type-series", "message", "metadata", "min-node-cpus", "name", "network-tags", "on-host-maintenance", "os-license", "policy", "progress-percent", "restart-type", "secure-boot", "service-account", "skip-os-adaptation", "source-vm-id", "start-time", "state", "state-time", "target-project", "total-pause-duration", "type", "update-time", "vm-name", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2753,7 +2791,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -2889,6 +2927,123 @@ where } } + async fn _projects_locations_sources_migrating_vms_replication_cycles_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_sources_migrating_vms_replication_cycles_get(opt.value_of("name").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + + async fn _projects_locations_sources_migrating_vms_replication_cycles_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + let mut call = self.hub.projects().locations_sources_migrating_vms_replication_cycles_list(opt.value_of("parent").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "page-token" => { + call = call.page_token(value.unwrap_or("")); + }, + "page-size" => { + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); + }, + "order-by" => { + call = call.order_by(value.unwrap_or("")); + }, + "filter" => { + call = call.filter(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["filter", "order-by", "page-size", "page-token"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _projects_locations_sources_migrating_vms_resume_migration(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -3080,6 +3235,15 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "aws.access-key-creds.access-key-id" => Some(("aws.accessKeyCreds.accessKeyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.access-key-creds.secret-access-key" => Some(("aws.accessKeyCreds.secretAccessKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.aws-region" => Some(("aws.awsRegion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.error.code" => Some(("aws.error.code", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "aws.error.message" => Some(("aws.error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.inventory-security-group-names" => Some(("aws.inventorySecurityGroupNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), + "aws.migration-resources-user-tags" => Some(("aws.migrationResourcesUserTags", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "aws.public-ip" => Some(("aws.publicIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "aws.state" => Some(("aws.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "labels" => Some(("labels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), @@ -3090,7 +3254,7 @@ where "vmware.username" => Some(("vmware.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vmware.vcenter-ip" => Some(("vmware.vcenterIp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "description", "labels", "name", "password", "thumbprint", "update-time", "username", "vcenter-ip", "vmware"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["access-key-creds", "access-key-id", "aws", "aws-region", "code", "create-time", "description", "error", "inventory-security-group-names", "labels", "message", "migration-resources-user-tags", "name", "password", "public-ip", "secret-access-key", "state", "thumbprint", "update-time", "username", "vcenter-ip", "vmware"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -3105,7 +3269,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -3383,7 +3547,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3652,7 +3816,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -3751,7 +3915,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, "request-id" => { call = call.request_id(value.unwrap_or("")); @@ -3924,6 +4088,12 @@ where ("locations-sources-migrating-vms-pause-migration", Some(opt)) => { call_result = self._projects_locations_sources_migrating_vms_pause_migration(opt, dry_run, &mut err).await; }, + ("locations-sources-migrating-vms-replication-cycles-get", Some(opt)) => { + call_result = self._projects_locations_sources_migrating_vms_replication_cycles_get(opt, dry_run, &mut err).await; + }, + ("locations-sources-migrating-vms-replication-cycles-list", Some(opt)) => { + call_result = self._projects_locations_sources_migrating_vms_replication_cycles_list(opt, dry_run, &mut err).await; + }, ("locations-sources-migrating-vms-resume-migration", Some(opt)) => { call_result = self._projects_locations_sources_migrating_vms_resume_migration(opt, dry_run, &mut err).await; }, @@ -4039,7 +4209,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-get', 'locations-groups-add-group-migration', 'locations-groups-create', 'locations-groups-delete', 'locations-groups-get', 'locations-groups-list', 'locations-groups-patch', 'locations-groups-remove-group-migration', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-sources-create', 'locations-sources-datacenter-connectors-create', 'locations-sources-datacenter-connectors-delete', 'locations-sources-datacenter-connectors-get', 'locations-sources-datacenter-connectors-list', 'locations-sources-datacenter-connectors-upgrade-appliance', 'locations-sources-delete', 'locations-sources-fetch-inventory', 'locations-sources-get', 'locations-sources-list', 'locations-sources-migrating-vms-clone-jobs-cancel', 'locations-sources-migrating-vms-clone-jobs-create', 'locations-sources-migrating-vms-clone-jobs-get', 'locations-sources-migrating-vms-clone-jobs-list', 'locations-sources-migrating-vms-create', 'locations-sources-migrating-vms-cutover-jobs-cancel', 'locations-sources-migrating-vms-cutover-jobs-create', 'locations-sources-migrating-vms-cutover-jobs-get', 'locations-sources-migrating-vms-cutover-jobs-list', 'locations-sources-migrating-vms-delete', 'locations-sources-migrating-vms-finalize-migration', 'locations-sources-migrating-vms-get', 'locations-sources-migrating-vms-list', 'locations-sources-migrating-vms-patch', 'locations-sources-migrating-vms-pause-migration', 'locations-sources-migrating-vms-resume-migration', 'locations-sources-migrating-vms-start-migration', 'locations-sources-patch', 'locations-sources-utilization-reports-create', 'locations-sources-utilization-reports-delete', 'locations-sources-utilization-reports-get', 'locations-sources-utilization-reports-list', 'locations-target-projects-create', 'locations-target-projects-delete', 'locations-target-projects-get', 'locations-target-projects-list' and 'locations-target-projects-patch'", vec![ + ("projects", "methods: 'locations-get', 'locations-groups-add-group-migration', 'locations-groups-create', 'locations-groups-delete', 'locations-groups-get', 'locations-groups-list', 'locations-groups-patch', 'locations-groups-remove-group-migration', 'locations-list', 'locations-operations-cancel', 'locations-operations-delete', 'locations-operations-get', 'locations-operations-list', 'locations-sources-create', 'locations-sources-datacenter-connectors-create', 'locations-sources-datacenter-connectors-delete', 'locations-sources-datacenter-connectors-get', 'locations-sources-datacenter-connectors-list', 'locations-sources-datacenter-connectors-upgrade-appliance', 'locations-sources-delete', 'locations-sources-fetch-inventory', 'locations-sources-get', 'locations-sources-list', 'locations-sources-migrating-vms-clone-jobs-cancel', 'locations-sources-migrating-vms-clone-jobs-create', 'locations-sources-migrating-vms-clone-jobs-get', 'locations-sources-migrating-vms-clone-jobs-list', 'locations-sources-migrating-vms-create', 'locations-sources-migrating-vms-cutover-jobs-cancel', 'locations-sources-migrating-vms-cutover-jobs-create', 'locations-sources-migrating-vms-cutover-jobs-get', 'locations-sources-migrating-vms-cutover-jobs-list', 'locations-sources-migrating-vms-delete', 'locations-sources-migrating-vms-finalize-migration', 'locations-sources-migrating-vms-get', 'locations-sources-migrating-vms-list', 'locations-sources-migrating-vms-patch', 'locations-sources-migrating-vms-pause-migration', 'locations-sources-migrating-vms-replication-cycles-get', 'locations-sources-migrating-vms-replication-cycles-list', 'locations-sources-migrating-vms-resume-migration', 'locations-sources-migrating-vms-start-migration', 'locations-sources-patch', 'locations-sources-utilization-reports-create', 'locations-sources-utilization-reports-delete', 'locations-sources-utilization-reports-get', 'locations-sources-utilization-reports-list', 'locations-target-projects-create', 'locations-target-projects-delete', 'locations-target-projects-get', 'locations-target-projects-list' and 'locations-target-projects-patch'", vec![ ("locations-get", Some(r##"Gets information about a location."##), "Details at http://byron.github.io/google-apis-rs/google_vmmigration1_cli/projects_locations-get", @@ -4966,6 +5136,50 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-sources-migrating-vms-replication-cycles-get", + Some(r##"Gets details of a single ReplicationCycle."##), + "Details at http://byron.github.io/google-apis-rs/google_vmmigration1_cli/projects_locations-sources-migrating-vms-replication-cycles-get", + vec![ + (Some(r##"name"##), + None, + Some(r##"Required. The name of the ReplicationCycle."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-sources-migrating-vms-replication-cycles-list", + Some(r##"Lists ReplicationCycles in a given MigratingVM."##), + "Details at http://byron.github.io/google-apis-rs/google_vmmigration1_cli/projects_locations-sources-migrating-vms-replication-cycles-list", + vec![ + (Some(r##"parent"##), + None, + Some(r##"Required. The parent, which owns this collection of ReplicationCycles."##), + Some(true), + Some(false)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -5278,8 +5492,8 @@ async fn main() { let mut app = App::new("vmmigration1") .author("Sebastian Thiel ") - .version("4.0.1+20220225") - .about("Use the Migrate for Compute Engine API to programmatically migrate workloads. ") + .version("5.0.2+20230119") + .about("Use the Migrate to Virtual Machines API to programmatically migrate workloads. ") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_vmmigration1_cli") .arg(Arg::with_name("url") .long("scope") diff --git a/gen/vmmigration1/Cargo.toml b/gen/vmmigration1/Cargo.toml index 0d421b1d03..972b579e74 100644 --- a/gen/vmmigration1/Cargo.toml +++ b/gen/vmmigration1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-vmmigration1" -version = "5.0.2-beta-1+20230119" +version = "5.0.2+20230119" authors = ["Sebastian Thiel "] description = "A complete library to interact with VM Migration Service (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/vmmigration1" homepage = "https://cloud.google.com/migrate/virtual-machines" -documentation = "https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119" +documentation = "https://docs.rs/google-vmmigration1/5.0.2+20230119" license = "MIT" keywords = ["vmmigration", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/vmmigration1/README.md b/gen/vmmigration1/README.md index 08a4fc2025..ba49719cf6 100644 --- a/gen/vmmigration1/README.md +++ b/gen/vmmigration1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-vmmigration1` library allows access to all features of the *Google VM Migration Service* service. -This documentation was generated from *VM Migration Service* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *vmmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *VM Migration Service* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *vmmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *VM Migration Service* *v1* API can be found at the [official documentation site](https://cloud.google.com/migrate/virtual-machines). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/VMMigrationService) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/VMMigrationService) ... * projects - * [*locations get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGetCall), [*locations groups add group migration*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGroupAddGroupMigrationCall), [*locations groups create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGroupCreateCall), [*locations groups delete*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGroupDeleteCall), [*locations groups get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGroupGetCall), [*locations groups list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGroupListCall), [*locations groups patch*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGroupPatchCall), [*locations groups remove group migration*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationGroupRemoveGroupMigrationCall), [*locations list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationOperationListCall), [*locations sources create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceCreateCall), [*locations sources datacenter connectors create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorCreateCall), [*locations sources datacenter connectors delete*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorDeleteCall), [*locations sources datacenter connectors get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorGetCall), [*locations sources datacenter connectors list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorListCall), [*locations sources datacenter connectors upgrade appliance*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorUpgradeApplianceCall), [*locations sources delete*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceDeleteCall), [*locations sources fetch inventory*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceFetchInventoryCall), [*locations sources get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceGetCall), [*locations sources list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceListCall), [*locations sources migrating vms clone jobs cancel*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobCancelCall), [*locations sources migrating vms clone jobs create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobCreateCall), [*locations sources migrating vms clone jobs get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobGetCall), [*locations sources migrating vms clone jobs list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobListCall), [*locations sources migrating vms create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCreateCall), [*locations sources migrating vms cutover jobs cancel*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobCancelCall), [*locations sources migrating vms cutover jobs create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobCreateCall), [*locations sources migrating vms cutover jobs get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobGetCall), [*locations sources migrating vms cutover jobs list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobListCall), [*locations sources migrating vms delete*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmDeleteCall), [*locations sources migrating vms finalize migration*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmFinalizeMigrationCall), [*locations sources migrating vms get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmGetCall), [*locations sources migrating vms list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmListCall), [*locations sources migrating vms patch*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmPatchCall), [*locations sources migrating vms pause migration*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmPauseMigrationCall), [*locations sources migrating vms replication cycles get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmReplicationCycleGetCall), [*locations sources migrating vms replication cycles list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmReplicationCycleListCall), [*locations sources migrating vms resume migration*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmResumeMigrationCall), [*locations sources migrating vms start migration*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmStartMigrationCall), [*locations sources patch*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourcePatchCall), [*locations sources utilization reports create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportCreateCall), [*locations sources utilization reports delete*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportDeleteCall), [*locations sources utilization reports get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportGetCall), [*locations sources utilization reports list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportListCall), [*locations target projects create*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationTargetProjectCreateCall), [*locations target projects delete*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationTargetProjectDeleteCall), [*locations target projects get*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationTargetProjectGetCall), [*locations target projects list*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationTargetProjectListCall) and [*locations target projects patch*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/api::ProjectLocationTargetProjectPatchCall) + * [*locations get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGetCall), [*locations groups add group migration*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGroupAddGroupMigrationCall), [*locations groups create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGroupCreateCall), [*locations groups delete*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGroupDeleteCall), [*locations groups get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGroupGetCall), [*locations groups list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGroupListCall), [*locations groups patch*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGroupPatchCall), [*locations groups remove group migration*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationGroupRemoveGroupMigrationCall), [*locations list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationListCall), [*locations operations cancel*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationOperationCancelCall), [*locations operations delete*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationOperationListCall), [*locations sources create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceCreateCall), [*locations sources datacenter connectors create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorCreateCall), [*locations sources datacenter connectors delete*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorDeleteCall), [*locations sources datacenter connectors get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorGetCall), [*locations sources datacenter connectors list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorListCall), [*locations sources datacenter connectors upgrade appliance*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceDatacenterConnectorUpgradeApplianceCall), [*locations sources delete*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceDeleteCall), [*locations sources fetch inventory*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceFetchInventoryCall), [*locations sources get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceGetCall), [*locations sources list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceListCall), [*locations sources migrating vms clone jobs cancel*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobCancelCall), [*locations sources migrating vms clone jobs create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobCreateCall), [*locations sources migrating vms clone jobs get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobGetCall), [*locations sources migrating vms clone jobs list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCloneJobListCall), [*locations sources migrating vms create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCreateCall), [*locations sources migrating vms cutover jobs cancel*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobCancelCall), [*locations sources migrating vms cutover jobs create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobCreateCall), [*locations sources migrating vms cutover jobs get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobGetCall), [*locations sources migrating vms cutover jobs list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmCutoverJobListCall), [*locations sources migrating vms delete*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmDeleteCall), [*locations sources migrating vms finalize migration*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmFinalizeMigrationCall), [*locations sources migrating vms get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmGetCall), [*locations sources migrating vms list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmListCall), [*locations sources migrating vms patch*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmPatchCall), [*locations sources migrating vms pause migration*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmPauseMigrationCall), [*locations sources migrating vms replication cycles get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmReplicationCycleGetCall), [*locations sources migrating vms replication cycles list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmReplicationCycleListCall), [*locations sources migrating vms resume migration*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmResumeMigrationCall), [*locations sources migrating vms start migration*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceMigratingVmStartMigrationCall), [*locations sources patch*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourcePatchCall), [*locations sources utilization reports create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportCreateCall), [*locations sources utilization reports delete*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportDeleteCall), [*locations sources utilization reports get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportGetCall), [*locations sources utilization reports list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationSourceUtilizationReportListCall), [*locations target projects create*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationTargetProjectCreateCall), [*locations target projects delete*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationTargetProjectDeleteCall), [*locations target projects get*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationTargetProjectGetCall), [*locations target projects list*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationTargetProjectListCall) and [*locations target projects patch*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/api::ProjectLocationTargetProjectPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/VMMigrationService)** +* **[Hub](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/VMMigrationService)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::CallBuilder) -* **[Resources](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::CallBuilder) +* **[Resources](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Part)** + * **[Parts](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -153,17 +153,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -173,29 +173,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Delegate) to the -[Method Builder](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Delegate) to the +[Method Builder](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::RequestValue) and -[decodable](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::RequestValue) and +[decodable](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-vmmigration1/5.0.2-beta-1+20230119/google_vmmigration1/client::RequestValue) are moved +* [request values](https://docs.rs/google-vmmigration1/5.0.2+20230119/google_vmmigration1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/vmmigration1/src/api.rs b/gen/vmmigration1/src/api.rs index fc2c4162e1..34c8dc16ed 100644 --- a/gen/vmmigration1/src/api.rs +++ b/gen/vmmigration1/src/api.rs @@ -127,7 +127,7 @@ impl<'a, S> VMMigrationService { VMMigrationService { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://vmmigration.googleapis.com/".to_string(), _root_url: "https://vmmigration.googleapis.com/".to_string(), } @@ -138,7 +138,7 @@ impl<'a, S> VMMigrationService { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/vmmigration1/src/client.rs b/gen/vmmigration1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/vmmigration1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/vmmigration1/src/lib.rs b/gen/vmmigration1/src/lib.rs index 2c72124186..9b2e52b09b 100644 --- a/gen/vmmigration1/src/lib.rs +++ b/gen/vmmigration1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *VM Migration Service* crate version *5.0.2-beta-1+20230119*, where *20230119* is the exact revision of the *vmmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *VM Migration Service* crate version *5.0.2+20230119*, where *20230119* is the exact revision of the *vmmigration:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *VM Migration Service* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/migrate/virtual-machines). diff --git a/gen/walletobjects1-cli/Cargo.toml b/gen/walletobjects1-cli/Cargo.toml index f086a93782..ed3ff56566 100644 --- a/gen/walletobjects1-cli/Cargo.toml +++ b/gen/walletobjects1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-walletobjects1-cli" -version = "4.0.4+20220928" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Walletobjects (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/walletobjects1-cli" @@ -22,11 +22,11 @@ path = "src/main.rs" [dependencies] anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" +mime = "^ 0.3.0" serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" itertools = "^ 0.10" -google-clis-common = { path = "../../google-clis-common", version = "4.0" } +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -38,7 +38,5 @@ tower-service = "^0.3.1" [dependencies.google-walletobjects1] path = "../walletobjects1" -version = "4.0.4+20220928" +version = "5.0.2+20230124" -# [features] -# default = ["yup-oauth2"] diff --git a/gen/walletobjects1-cli/README.md b/gen/walletobjects1-cli/README.md index 697a5fc48a..7bfd54a077 100644 --- a/gen/walletobjects1-cli/README.md +++ b/gen/walletobjects1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Walletobjects* API at revision *20220928*. The CLI is at version *4.0.4*. +This documentation was generated from the *Walletobjects* API at revision *20230124*. The CLI is at version *5.0.2*. ```bash walletobjects1 [options] @@ -107,6 +107,8 @@ walletobjects1 [options] modifylinkedofferobjects (-r )... [-p ]... [-o ] patch (-r )... [-p ]... [-o ] update (-r )... [-p ]... [-o ] + media + upload (-r )... (-u simple -f [-m ]) [-p ]... [-o ] offerclass addmessage (-r )... [-p ]... [-o ] get [-p ]... [-o ] @@ -140,6 +142,8 @@ walletobjects1 [options] list [-p ]... [-o ] patch (-r )... [-p ]... [-o ] update (-r )... [-p ]... [-o ] + walletobjects + v1-private-content-upload-private-data (-r )... [-p ]... [-o ] walletobjects1 --help Configuration: diff --git a/gen/walletobjects1-cli/mkdocs.yml b/gen/walletobjects1-cli/mkdocs.yml index f0eef9f915..77deb03f65 100644 --- a/gen/walletobjects1-cli/mkdocs.yml +++ b/gen/walletobjects1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Walletobjects v4.0.4+20220928 +site_name: Walletobjects v5.0.2+20230124 site_url: http://byron.github.io/google-apis-rs/google-walletobjects1-cli site_description: A complete library to interact with Walletobjects (protocol v1) @@ -87,6 +87,8 @@ nav: - 'Modifylinkedofferobjects': 'loyaltyobject_modifylinkedofferobjects.md' - 'Patch': 'loyaltyobject_patch.md' - 'Update': 'loyaltyobject_update.md' +- 'Media': + - 'Upload': 'media_upload.md' - 'Offerclass': - 'Addmessage': 'offerclass_addmessage.md' - 'Get': 'offerclass_get.md' @@ -120,6 +122,8 @@ nav: - 'List': 'transitobject_list.md' - 'Patch': 'transitobject_patch.md' - 'Update': 'transitobject_update.md' +- 'Walletobjects': + - 'V1 Private Content Upload Private Data': 'walletobjects_v1-private-content-upload-private-data.md' theme: readthedocs diff --git a/gen/walletobjects1-cli/src/main.rs b/gen/walletobjects1-cli/src/main.rs index 94beb5d3a6..1c80f88e24 100644 --- a/gen/walletobjects1-cli/src/main.rs +++ b/gen/walletobjects1-cli/src/main.rs @@ -10,7 +10,8 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_walletobjects1::{api, Error, oauth2}; +use google_walletobjects1::{api, Error, oauth2, client::chrono, FieldMask}; + use google_clis_common as client; @@ -410,10 +411,10 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "issuer-id" => { - call = call.issuer_id(value.unwrap_or("")); + call = call.issuer_id( value.map(|v| arg_from_str(v, err, "issuer-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1377,7 +1378,7 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "class-id" => { call = call.class_id(value.unwrap_or("")); @@ -2612,10 +2613,10 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "issuer-id" => { - call = call.issuer_id(value.unwrap_or("")); + call = call.issuer_id( value.map(|v| arg_from_str(v, err, "issuer-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -3671,7 +3672,7 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "class-id" => { call = call.class_id(value.unwrap_or("")); @@ -4668,10 +4669,10 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "issuer-id" => { - call = call.issuer_id(value.unwrap_or("")); + call = call.issuer_id( value.map(|v| arg_from_str(v, err, "issuer-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -5200,7 +5201,7 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "class-id" => { call = call.class_id(value.unwrap_or("")); @@ -6046,10 +6047,10 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "issuer-id" => { - call = call.issuer_id(value.unwrap_or("")); + call = call.issuer_id( value.map(|v| arg_from_str(v, err, "issuer-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -6897,7 +6898,7 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "class-id" => { call = call.class_id(value.unwrap_or("")); @@ -8378,10 +8379,10 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "issuer-id" => { - call = call.issuer_id(value.unwrap_or("")); + call = call.issuer_id( value.map(|v| arg_from_str(v, err, "issuer-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -9343,7 +9344,7 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "class-id" => { call = call.class_id(value.unwrap_or("")); @@ -10183,6 +10184,245 @@ where } } + async fn _media_upload(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "blob.algorithm" => Some(("blob.algorithm", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.bigstore-object-ref" => Some(("blob.bigstoreObjectRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.blob-ref" => Some(("blob.blobRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.blobstore2-info.blob-generation" => Some(("blob.blobstore2Info.blobGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.blobstore2-info.blob-id" => Some(("blob.blobstore2Info.blobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.blobstore2-info.download-read-handle" => Some(("blob.blobstore2Info.downloadReadHandle", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.blobstore2-info.read-token" => Some(("blob.blobstore2Info.readToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.blobstore2-info.upload-metadata-container" => Some(("blob.blobstore2Info.uploadMetadataContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.content-type" => Some(("blob.contentType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.content-type-info.best-guess" => Some(("blob.contentTypeInfo.bestGuess", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.content-type-info.from-bytes" => Some(("blob.contentTypeInfo.fromBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.content-type-info.from-file-name" => Some(("blob.contentTypeInfo.fromFileName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.content-type-info.from-header" => Some(("blob.contentTypeInfo.fromHeader", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.content-type-info.from-url-path" => Some(("blob.contentTypeInfo.fromUrlPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.cosmo-binary-reference" => Some(("blob.cosmoBinaryReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.crc32c-hash" => Some(("blob.crc32cHash", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.blob-ref" => Some(("blob.diffChecksumsResponse.checksumsLocation.blobRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.blobstore2-info.blob-generation" => Some(("blob.diffChecksumsResponse.checksumsLocation.blobstore2Info.blobGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.blobstore2-info.blob-id" => Some(("blob.diffChecksumsResponse.checksumsLocation.blobstore2Info.blobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.blobstore2-info.download-read-handle" => Some(("blob.diffChecksumsResponse.checksumsLocation.blobstore2Info.downloadReadHandle", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.blobstore2-info.read-token" => Some(("blob.diffChecksumsResponse.checksumsLocation.blobstore2Info.readToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.blobstore2-info.upload-metadata-container" => Some(("blob.diffChecksumsResponse.checksumsLocation.blobstore2Info.uploadMetadataContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.cosmo-binary-reference" => Some(("blob.diffChecksumsResponse.checksumsLocation.cosmoBinaryReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.crc32c-hash" => Some(("blob.diffChecksumsResponse.checksumsLocation.crc32cHash", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.inline" => Some(("blob.diffChecksumsResponse.checksumsLocation.inline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.length" => Some(("blob.diffChecksumsResponse.checksumsLocation.length", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.md5-hash" => Some(("blob.diffChecksumsResponse.checksumsLocation.md5Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.object-id.bucket-name" => Some(("blob.diffChecksumsResponse.checksumsLocation.objectId.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.object-id.generation" => Some(("blob.diffChecksumsResponse.checksumsLocation.objectId.generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.object-id.object-name" => Some(("blob.diffChecksumsResponse.checksumsLocation.objectId.objectName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.path" => Some(("blob.diffChecksumsResponse.checksumsLocation.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.reference-type" => Some(("blob.diffChecksumsResponse.checksumsLocation.referenceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.checksums-location.sha1-hash" => Some(("blob.diffChecksumsResponse.checksumsLocation.sha1Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.chunk-size-bytes" => Some(("blob.diffChecksumsResponse.chunkSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.blob-ref" => Some(("blob.diffChecksumsResponse.objectLocation.blobRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.blobstore2-info.blob-generation" => Some(("blob.diffChecksumsResponse.objectLocation.blobstore2Info.blobGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.blobstore2-info.blob-id" => Some(("blob.diffChecksumsResponse.objectLocation.blobstore2Info.blobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.blobstore2-info.download-read-handle" => Some(("blob.diffChecksumsResponse.objectLocation.blobstore2Info.downloadReadHandle", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.blobstore2-info.read-token" => Some(("blob.diffChecksumsResponse.objectLocation.blobstore2Info.readToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.blobstore2-info.upload-metadata-container" => Some(("blob.diffChecksumsResponse.objectLocation.blobstore2Info.uploadMetadataContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.cosmo-binary-reference" => Some(("blob.diffChecksumsResponse.objectLocation.cosmoBinaryReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.crc32c-hash" => Some(("blob.diffChecksumsResponse.objectLocation.crc32cHash", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.inline" => Some(("blob.diffChecksumsResponse.objectLocation.inline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.length" => Some(("blob.diffChecksumsResponse.objectLocation.length", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.md5-hash" => Some(("blob.diffChecksumsResponse.objectLocation.md5Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.object-id.bucket-name" => Some(("blob.diffChecksumsResponse.objectLocation.objectId.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.object-id.generation" => Some(("blob.diffChecksumsResponse.objectLocation.objectId.generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.object-id.object-name" => Some(("blob.diffChecksumsResponse.objectLocation.objectId.objectName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.path" => Some(("blob.diffChecksumsResponse.objectLocation.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.reference-type" => Some(("blob.diffChecksumsResponse.objectLocation.referenceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-location.sha1-hash" => Some(("blob.diffChecksumsResponse.objectLocation.sha1Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-size-bytes" => Some(("blob.diffChecksumsResponse.objectSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-checksums-response.object-version" => Some(("blob.diffChecksumsResponse.objectVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.blob-ref" => Some(("blob.diffDownloadResponse.objectLocation.blobRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.blobstore2-info.blob-generation" => Some(("blob.diffDownloadResponse.objectLocation.blobstore2Info.blobGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.blobstore2-info.blob-id" => Some(("blob.diffDownloadResponse.objectLocation.blobstore2Info.blobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.blobstore2-info.download-read-handle" => Some(("blob.diffDownloadResponse.objectLocation.blobstore2Info.downloadReadHandle", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.blobstore2-info.read-token" => Some(("blob.diffDownloadResponse.objectLocation.blobstore2Info.readToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.blobstore2-info.upload-metadata-container" => Some(("blob.diffDownloadResponse.objectLocation.blobstore2Info.uploadMetadataContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.cosmo-binary-reference" => Some(("blob.diffDownloadResponse.objectLocation.cosmoBinaryReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.crc32c-hash" => Some(("blob.diffDownloadResponse.objectLocation.crc32cHash", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.inline" => Some(("blob.diffDownloadResponse.objectLocation.inline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.length" => Some(("blob.diffDownloadResponse.objectLocation.length", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.md5-hash" => Some(("blob.diffDownloadResponse.objectLocation.md5Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.object-id.bucket-name" => Some(("blob.diffDownloadResponse.objectLocation.objectId.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.object-id.generation" => Some(("blob.diffDownloadResponse.objectLocation.objectId.generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.object-id.object-name" => Some(("blob.diffDownloadResponse.objectLocation.objectId.objectName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.path" => Some(("blob.diffDownloadResponse.objectLocation.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.reference-type" => Some(("blob.diffDownloadResponse.objectLocation.referenceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-download-response.object-location.sha1-hash" => Some(("blob.diffDownloadResponse.objectLocation.sha1Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.blob-ref" => Some(("blob.diffUploadRequest.checksumsInfo.blobRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.blobstore2-info.blob-generation" => Some(("blob.diffUploadRequest.checksumsInfo.blobstore2Info.blobGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.blobstore2-info.blob-id" => Some(("blob.diffUploadRequest.checksumsInfo.blobstore2Info.blobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.blobstore2-info.download-read-handle" => Some(("blob.diffUploadRequest.checksumsInfo.blobstore2Info.downloadReadHandle", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.blobstore2-info.read-token" => Some(("blob.diffUploadRequest.checksumsInfo.blobstore2Info.readToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.blobstore2-info.upload-metadata-container" => Some(("blob.diffUploadRequest.checksumsInfo.blobstore2Info.uploadMetadataContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.cosmo-binary-reference" => Some(("blob.diffUploadRequest.checksumsInfo.cosmoBinaryReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.crc32c-hash" => Some(("blob.diffUploadRequest.checksumsInfo.crc32cHash", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.inline" => Some(("blob.diffUploadRequest.checksumsInfo.inline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.length" => Some(("blob.diffUploadRequest.checksumsInfo.length", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.md5-hash" => Some(("blob.diffUploadRequest.checksumsInfo.md5Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.object-id.bucket-name" => Some(("blob.diffUploadRequest.checksumsInfo.objectId.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.object-id.generation" => Some(("blob.diffUploadRequest.checksumsInfo.objectId.generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.object-id.object-name" => Some(("blob.diffUploadRequest.checksumsInfo.objectId.objectName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.path" => Some(("blob.diffUploadRequest.checksumsInfo.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.reference-type" => Some(("blob.diffUploadRequest.checksumsInfo.referenceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.checksums-info.sha1-hash" => Some(("blob.diffUploadRequest.checksumsInfo.sha1Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.blob-ref" => Some(("blob.diffUploadRequest.objectInfo.blobRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.blobstore2-info.blob-generation" => Some(("blob.diffUploadRequest.objectInfo.blobstore2Info.blobGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.blobstore2-info.blob-id" => Some(("blob.diffUploadRequest.objectInfo.blobstore2Info.blobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.blobstore2-info.download-read-handle" => Some(("blob.diffUploadRequest.objectInfo.blobstore2Info.downloadReadHandle", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.blobstore2-info.read-token" => Some(("blob.diffUploadRequest.objectInfo.blobstore2Info.readToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.blobstore2-info.upload-metadata-container" => Some(("blob.diffUploadRequest.objectInfo.blobstore2Info.uploadMetadataContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.cosmo-binary-reference" => Some(("blob.diffUploadRequest.objectInfo.cosmoBinaryReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.crc32c-hash" => Some(("blob.diffUploadRequest.objectInfo.crc32cHash", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.inline" => Some(("blob.diffUploadRequest.objectInfo.inline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.length" => Some(("blob.diffUploadRequest.objectInfo.length", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.md5-hash" => Some(("blob.diffUploadRequest.objectInfo.md5Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.object-id.bucket-name" => Some(("blob.diffUploadRequest.objectInfo.objectId.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.object-id.generation" => Some(("blob.diffUploadRequest.objectInfo.objectId.generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.object-id.object-name" => Some(("blob.diffUploadRequest.objectInfo.objectId.objectName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.path" => Some(("blob.diffUploadRequest.objectInfo.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.reference-type" => Some(("blob.diffUploadRequest.objectInfo.referenceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-info.sha1-hash" => Some(("blob.diffUploadRequest.objectInfo.sha1Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-request.object-version" => Some(("blob.diffUploadRequest.objectVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.object-version" => Some(("blob.diffUploadResponse.objectVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.blob-ref" => Some(("blob.diffUploadResponse.originalObject.blobRef", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.blobstore2-info.blob-generation" => Some(("blob.diffUploadResponse.originalObject.blobstore2Info.blobGeneration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.blobstore2-info.blob-id" => Some(("blob.diffUploadResponse.originalObject.blobstore2Info.blobId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.blobstore2-info.download-read-handle" => Some(("blob.diffUploadResponse.originalObject.blobstore2Info.downloadReadHandle", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.blobstore2-info.read-token" => Some(("blob.diffUploadResponse.originalObject.blobstore2Info.readToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.blobstore2-info.upload-metadata-container" => Some(("blob.diffUploadResponse.originalObject.blobstore2Info.uploadMetadataContainer", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.cosmo-binary-reference" => Some(("blob.diffUploadResponse.originalObject.cosmoBinaryReference", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.crc32c-hash" => Some(("blob.diffUploadResponse.originalObject.crc32cHash", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.inline" => Some(("blob.diffUploadResponse.originalObject.inline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.length" => Some(("blob.diffUploadResponse.originalObject.length", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.md5-hash" => Some(("blob.diffUploadResponse.originalObject.md5Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.object-id.bucket-name" => Some(("blob.diffUploadResponse.originalObject.objectId.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.object-id.generation" => Some(("blob.diffUploadResponse.originalObject.objectId.generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.object-id.object-name" => Some(("blob.diffUploadResponse.originalObject.objectId.objectName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.path" => Some(("blob.diffUploadResponse.originalObject.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.reference-type" => Some(("blob.diffUploadResponse.originalObject.referenceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-upload-response.original-object.sha1-hash" => Some(("blob.diffUploadResponse.originalObject.sha1Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-version-response.object-size-bytes" => Some(("blob.diffVersionResponse.objectSizeBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.diff-version-response.object-version" => Some(("blob.diffVersionResponse.objectVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.download-parameters.allow-gzip-compression" => Some(("blob.downloadParameters.allowGzipCompression", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "blob.download-parameters.ignore-range" => Some(("blob.downloadParameters.ignoreRange", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "blob.filename" => Some(("blob.filename", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.hash" => Some(("blob.hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.hash-verified" => Some(("blob.hashVerified", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "blob.inline" => Some(("blob.inline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.is-potential-retry" => Some(("blob.isPotentialRetry", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + "blob.length" => Some(("blob.length", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.md5-hash" => Some(("blob.md5Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.media-id" => Some(("blob.mediaId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.object-id.bucket-name" => Some(("blob.objectId.bucketName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.object-id.generation" => Some(("blob.objectId.generation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.object-id.object-name" => Some(("blob.objectId.objectName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.path" => Some(("blob.path", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.reference-type" => Some(("blob.referenceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.sha1-hash" => Some(("blob.sha1Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.sha256-hash" => Some(("blob.sha256Hash", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.timestamp" => Some(("blob.timestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "blob.token" => Some(("blob.token", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "media-request-info.current-bytes" => Some(("mediaRequestInfo.currentBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "media-request-info.custom-data" => Some(("mediaRequestInfo.customData", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "media-request-info.diff-object-version" => Some(("mediaRequestInfo.diffObjectVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "media-request-info.final-status" => Some(("mediaRequestInfo.finalStatus", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "media-request-info.notification-type" => Some(("mediaRequestInfo.notificationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "media-request-info.request-id" => Some(("mediaRequestInfo.requestId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "media-request-info.total-bytes" => Some(("mediaRequestInfo.totalBytes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "media-request-info.total-bytes-is-estimated" => Some(("mediaRequestInfo.totalBytesIsEstimated", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["algorithm", "allow-gzip-compression", "best-guess", "bigstore-object-ref", "blob", "blob-generation", "blob-id", "blob-ref", "blobstore2-info", "bucket-name", "checksums-info", "checksums-location", "chunk-size-bytes", "content-type", "content-type-info", "cosmo-binary-reference", "crc32c-hash", "current-bytes", "custom-data", "diff-checksums-response", "diff-download-response", "diff-object-version", "diff-upload-request", "diff-upload-response", "diff-version-response", "download-parameters", "download-read-handle", "filename", "final-status", "from-bytes", "from-file-name", "from-header", "from-url-path", "generation", "hash", "hash-verified", "ignore-range", "inline", "is-potential-retry", "length", "md5-hash", "media-id", "media-request-info", "notification-type", "object-id", "object-info", "object-location", "object-name", "object-size-bytes", "object-version", "original-object", "path", "read-token", "reference-type", "request-id", "sha1-hash", "sha256-hash", "timestamp", "token", "total-bytes", "total-bytes-is-estimated", "upload-metadata-container"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UploadPrivateImageRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.media().upload(request, opt.value_of("issuer-id").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let vals = opt.values_of("mode").unwrap().collect::>(); + let protocol = calltype_from_str(vals[0], ["simple"].iter().map(|&v| v.to_string()).collect(), err); + let mut input_file = input_file_from_opts(vals[1], err); + let mime_type = input_mime_from_opts(opt.value_of("mime").unwrap_or("application/octet-stream"), err); + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Upload(UploadProtocol::Simple) => call.upload(input_file.unwrap(), mime_type.unwrap()).await, + CallType::Standard => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _offerclass_addmessage(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { @@ -10526,10 +10766,10 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "issuer-id" => { - call = call.issuer_id(value.unwrap_or("")); + call = call.issuer_id( value.map(|v| arg_from_str(v, err, "issuer-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -11409,7 +11649,7 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "class-id" => { call = call.class_id(value.unwrap_or("")); @@ -12474,6 +12714,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "activation-options.activation-url" => Some(("activationOptions.activationUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "activation-options.allow-reactivation" => Some(("activationOptions.allowReactivation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "allow-multiple-users-per-object" => Some(("allowMultipleUsersPerObject", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "callback-options.update-request-url" => Some(("callbackOptions.updateRequestUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "callback-options.url" => Some(("callbackOptions.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -12631,7 +12873,7 @@ where "word-mark.source-uri.localized-description.kind" => Some(("wordMark.sourceUri.localizedDescription.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "word-mark.source-uri.uri" => Some(("wordMark.sourceUri.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-multiple-users-per-object", "animation-type", "callback-options", "class-template-info", "comments", "content-description", "country-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "default-value", "description", "enable-single-leg-itinerary", "enable-smart-tap", "first-row-option", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "multiple-devices-and-holders-allowed-status", "redemption-issuers", "review", "review-status", "security-animation", "show-last-update-time", "source-uri", "transit-operator-name", "transit-option", "transit-type", "update-request-url", "uri", "url", "value", "version", "view-unlock-requirement", "watermark", "word-mark"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-options", "activation-url", "allow-multiple-users-per-object", "allow-reactivation", "animation-type", "callback-options", "class-template-info", "comments", "content-description", "country-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "default-value", "description", "enable-single-leg-itinerary", "enable-smart-tap", "first-row-option", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "multiple-devices-and-holders-allowed-status", "redemption-issuers", "review", "review-status", "security-animation", "show-last-update-time", "source-uri", "transit-operator-name", "transit-option", "transit-type", "update-request-url", "uri", "url", "value", "version", "view-unlock-requirement", "watermark", "word-mark"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -12701,10 +12943,10 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "issuer-id" => { - call = call.issuer_id(value.unwrap_or("")); + call = call.issuer_id( value.map(|v| arg_from_str(v, err, "issuer-id", "int64")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -12776,6 +13018,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "activation-options.activation-url" => Some(("activationOptions.activationUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "activation-options.allow-reactivation" => Some(("activationOptions.allowReactivation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "allow-multiple-users-per-object" => Some(("allowMultipleUsersPerObject", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "callback-options.update-request-url" => Some(("callbackOptions.updateRequestUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "callback-options.url" => Some(("callbackOptions.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -12933,7 +13177,7 @@ where "word-mark.source-uri.localized-description.kind" => Some(("wordMark.sourceUri.localizedDescription.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "word-mark.source-uri.uri" => Some(("wordMark.sourceUri.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-multiple-users-per-object", "animation-type", "callback-options", "class-template-info", "comments", "content-description", "country-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "default-value", "description", "enable-single-leg-itinerary", "enable-smart-tap", "first-row-option", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "multiple-devices-and-holders-allowed-status", "redemption-issuers", "review", "review-status", "security-animation", "show-last-update-time", "source-uri", "transit-operator-name", "transit-option", "transit-type", "update-request-url", "uri", "url", "value", "version", "view-unlock-requirement", "watermark", "word-mark"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-options", "activation-url", "allow-multiple-users-per-object", "allow-reactivation", "animation-type", "callback-options", "class-template-info", "comments", "content-description", "country-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "default-value", "description", "enable-single-leg-itinerary", "enable-smart-tap", "first-row-option", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "multiple-devices-and-holders-allowed-status", "redemption-issuers", "review", "review-status", "security-animation", "show-last-update-time", "source-uri", "transit-operator-name", "transit-option", "transit-type", "update-request-url", "uri", "url", "value", "version", "view-unlock-requirement", "watermark", "word-mark"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13016,6 +13260,8 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "activation-options.activation-url" => Some(("activationOptions.activationUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "activation-options.allow-reactivation" => Some(("activationOptions.allowReactivation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "allow-multiple-users-per-object" => Some(("allowMultipleUsersPerObject", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "callback-options.update-request-url" => Some(("callbackOptions.updateRequestUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "callback-options.url" => Some(("callbackOptions.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -13173,7 +13419,7 @@ where "word-mark.source-uri.localized-description.kind" => Some(("wordMark.sourceUri.localizedDescription.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "word-mark.source-uri.uri" => Some(("wordMark.sourceUri.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["allow-multiple-users-per-object", "animation-type", "callback-options", "class-template-info", "comments", "content-description", "country-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "default-value", "description", "enable-single-leg-itinerary", "enable-smart-tap", "first-row-option", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "multiple-devices-and-holders-allowed-status", "redemption-issuers", "review", "review-status", "security-animation", "show-last-update-time", "source-uri", "transit-operator-name", "transit-option", "transit-type", "update-request-url", "uri", "url", "value", "version", "view-unlock-requirement", "watermark", "word-mark"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-options", "activation-url", "allow-multiple-users-per-object", "allow-reactivation", "animation-type", "callback-options", "class-template-info", "comments", "content-description", "country-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "default-value", "description", "enable-single-leg-itinerary", "enable-smart-tap", "first-row-option", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "multiple-devices-and-holders-allowed-status", "redemption-issuers", "review", "review-status", "security-animation", "show-last-update-time", "source-uri", "transit-operator-name", "transit-option", "transit-type", "update-request-url", "uri", "url", "value", "version", "view-unlock-requirement", "watermark", "word-mark"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13408,6 +13654,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "activation-status.state" => Some(("activationStatus.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.kind" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.language" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.value" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -13499,6 +13746,8 @@ where "barcode.type" => Some(("barcode.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "barcode.value" => Some(("barcode.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "class-id" => Some(("classId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "class-reference.activation-options.activation-url" => Some(("classReference.activationOptions.activationUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "class-reference.activation-options.allow-reactivation" => Some(("classReference.activationOptions.allowReactivation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "class-reference.allow-multiple-users-per-object" => Some(("classReference.allowMultipleUsersPerObject", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "class-reference.callback-options.update-request-url" => Some(("classReference.callbackOptions.updateRequestUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "class-reference.callback-options.url" => Some(("classReference.callbackOptions.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -13664,6 +13913,7 @@ where "custom-ticket-status.default-value.language" => Some(("customTicketStatus.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-ticket-status.default-value.value" => Some(("customTicketStatus.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-ticket-status.kind" => Some(("customTicketStatus.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-context.device-token" => Some(("deviceContext.deviceToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disable-expiration-notification" => Some(("disableExpirationNotification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "grouping-info.grouping-id" => Some(("groupingInfo.groupingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "grouping-info.sort-index" => Some(("groupingInfo.sortIndex", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -13774,7 +14024,7 @@ where "valid-time-interval.start.date" => Some(("validTimeInterval.start.date", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "algorithm", "allow-multiple-users-per-object", "alternate-text", "android-app-link-info", "animation-type", "app-link-data", "app-logo-image", "app-target", "arrival-date-time", "barcode", "callback-options", "carriage", "class-id", "class-reference", "class-template-info", "coach", "comments", "concession-category", "confirmation-code", "content-description", "country-code", "currency-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-ticket-status", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "date", "default-value", "departure-date-time", "description", "destination-name", "destination-station-code", "disable-expiration-notification", "discount-message", "enable-single-leg-itinerary", "enable-smart-tap", "end", "face-value", "fare-class", "fare-name", "first-row-option", "grouping-id", "grouping-info", "has-linked-device", "has-users", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "ios-app-link-info", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "micros", "multiple-devices-and-holders-allowed-status", "origin-name", "origin-station-code", "other-restrictions", "passenger-names", "passenger-type", "period-millis", "platform", "purchase-date-time", "purchase-details", "purchase-price", "purchase-receipt-number", "redemption-issuers", "render-encoding", "review", "review-status", "rotating-barcode", "route-restrictions", "route-restrictions-details", "seat", "seat-assignment", "security-animation", "show-code-text", "show-last-update-time", "smart-tap-redemption-value", "sort-index", "source-uri", "start", "state", "target-uri", "ticket-cost", "ticket-leg", "ticket-number", "ticket-restrictions", "ticket-seat", "ticket-status", "time-restrictions", "title", "totp-details", "transit-operator-name", "transit-option", "transit-terminus-name", "transit-type", "trip-id", "trip-type", "type", "update-request-url", "uri", "url", "valid-time-interval", "value", "value-pattern", "version", "view-unlock-requirement", "watermark", "web-app-link-info", "word-mark", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "activation-options", "activation-status", "activation-url", "algorithm", "allow-multiple-users-per-object", "allow-reactivation", "alternate-text", "android-app-link-info", "animation-type", "app-link-data", "app-logo-image", "app-target", "arrival-date-time", "barcode", "callback-options", "carriage", "class-id", "class-reference", "class-template-info", "coach", "comments", "concession-category", "confirmation-code", "content-description", "country-code", "currency-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-ticket-status", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "date", "default-value", "departure-date-time", "description", "destination-name", "destination-station-code", "device-context", "device-token", "disable-expiration-notification", "discount-message", "enable-single-leg-itinerary", "enable-smart-tap", "end", "face-value", "fare-class", "fare-name", "first-row-option", "grouping-id", "grouping-info", "has-linked-device", "has-users", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "ios-app-link-info", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "micros", "multiple-devices-and-holders-allowed-status", "origin-name", "origin-station-code", "other-restrictions", "passenger-names", "passenger-type", "period-millis", "platform", "purchase-date-time", "purchase-details", "purchase-price", "purchase-receipt-number", "redemption-issuers", "render-encoding", "review", "review-status", "rotating-barcode", "route-restrictions", "route-restrictions-details", "seat", "seat-assignment", "security-animation", "show-code-text", "show-last-update-time", "smart-tap-redemption-value", "sort-index", "source-uri", "start", "state", "target-uri", "ticket-cost", "ticket-leg", "ticket-number", "ticket-restrictions", "ticket-seat", "ticket-status", "time-restrictions", "title", "totp-details", "transit-operator-name", "transit-option", "transit-terminus-name", "transit-type", "trip-id", "trip-type", "type", "update-request-url", "uri", "url", "valid-time-interval", "value", "value-pattern", "version", "view-unlock-requirement", "watermark", "web-app-link-info", "word-mark", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -13844,7 +14094,7 @@ where call = call.token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "int32")).unwrap_or(-0)); }, "class-id" => { call = call.class_id(value.unwrap_or("")); @@ -13919,6 +14169,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "activation-status.state" => Some(("activationStatus.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.kind" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.language" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.value" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -14010,6 +14261,8 @@ where "barcode.type" => Some(("barcode.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "barcode.value" => Some(("barcode.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "class-id" => Some(("classId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "class-reference.activation-options.activation-url" => Some(("classReference.activationOptions.activationUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "class-reference.activation-options.allow-reactivation" => Some(("classReference.activationOptions.allowReactivation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "class-reference.allow-multiple-users-per-object" => Some(("classReference.allowMultipleUsersPerObject", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "class-reference.callback-options.update-request-url" => Some(("classReference.callbackOptions.updateRequestUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "class-reference.callback-options.url" => Some(("classReference.callbackOptions.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -14175,6 +14428,7 @@ where "custom-ticket-status.default-value.language" => Some(("customTicketStatus.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-ticket-status.default-value.value" => Some(("customTicketStatus.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-ticket-status.kind" => Some(("customTicketStatus.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-context.device-token" => Some(("deviceContext.deviceToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disable-expiration-notification" => Some(("disableExpirationNotification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "grouping-info.grouping-id" => Some(("groupingInfo.groupingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "grouping-info.sort-index" => Some(("groupingInfo.sortIndex", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -14285,7 +14539,7 @@ where "valid-time-interval.start.date" => Some(("validTimeInterval.start.date", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "algorithm", "allow-multiple-users-per-object", "alternate-text", "android-app-link-info", "animation-type", "app-link-data", "app-logo-image", "app-target", "arrival-date-time", "barcode", "callback-options", "carriage", "class-id", "class-reference", "class-template-info", "coach", "comments", "concession-category", "confirmation-code", "content-description", "country-code", "currency-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-ticket-status", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "date", "default-value", "departure-date-time", "description", "destination-name", "destination-station-code", "disable-expiration-notification", "discount-message", "enable-single-leg-itinerary", "enable-smart-tap", "end", "face-value", "fare-class", "fare-name", "first-row-option", "grouping-id", "grouping-info", "has-linked-device", "has-users", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "ios-app-link-info", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "micros", "multiple-devices-and-holders-allowed-status", "origin-name", "origin-station-code", "other-restrictions", "passenger-names", "passenger-type", "period-millis", "platform", "purchase-date-time", "purchase-details", "purchase-price", "purchase-receipt-number", "redemption-issuers", "render-encoding", "review", "review-status", "rotating-barcode", "route-restrictions", "route-restrictions-details", "seat", "seat-assignment", "security-animation", "show-code-text", "show-last-update-time", "smart-tap-redemption-value", "sort-index", "source-uri", "start", "state", "target-uri", "ticket-cost", "ticket-leg", "ticket-number", "ticket-restrictions", "ticket-seat", "ticket-status", "time-restrictions", "title", "totp-details", "transit-operator-name", "transit-option", "transit-terminus-name", "transit-type", "trip-id", "trip-type", "type", "update-request-url", "uri", "url", "valid-time-interval", "value", "value-pattern", "version", "view-unlock-requirement", "watermark", "web-app-link-info", "word-mark", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "activation-options", "activation-status", "activation-url", "algorithm", "allow-multiple-users-per-object", "allow-reactivation", "alternate-text", "android-app-link-info", "animation-type", "app-link-data", "app-logo-image", "app-target", "arrival-date-time", "barcode", "callback-options", "carriage", "class-id", "class-reference", "class-template-info", "coach", "comments", "concession-category", "confirmation-code", "content-description", "country-code", "currency-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-ticket-status", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "date", "default-value", "departure-date-time", "description", "destination-name", "destination-station-code", "device-context", "device-token", "disable-expiration-notification", "discount-message", "enable-single-leg-itinerary", "enable-smart-tap", "end", "face-value", "fare-class", "fare-name", "first-row-option", "grouping-id", "grouping-info", "has-linked-device", "has-users", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "ios-app-link-info", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "micros", "multiple-devices-and-holders-allowed-status", "origin-name", "origin-station-code", "other-restrictions", "passenger-names", "passenger-type", "period-millis", "platform", "purchase-date-time", "purchase-details", "purchase-price", "purchase-receipt-number", "redemption-issuers", "render-encoding", "review", "review-status", "rotating-barcode", "route-restrictions", "route-restrictions-details", "seat", "seat-assignment", "security-animation", "show-code-text", "show-last-update-time", "smart-tap-redemption-value", "sort-index", "source-uri", "start", "state", "target-uri", "ticket-cost", "ticket-leg", "ticket-number", "ticket-restrictions", "ticket-seat", "ticket-status", "time-restrictions", "title", "totp-details", "transit-operator-name", "transit-option", "transit-terminus-name", "transit-type", "trip-id", "trip-type", "type", "update-request-url", "uri", "url", "valid-time-interval", "value", "value-pattern", "version", "view-unlock-requirement", "watermark", "web-app-link-info", "word-mark", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14368,6 +14622,7 @@ where let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { + "activation-status.state" => Some(("activationStatus.state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.kind" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.language" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "app-link-data.android-app-link-info.app-logo-image.content-description.default-value.value" => Some(("appLinkData.androidAppLinkInfo.appLogoImage.contentDescription.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -14459,6 +14714,8 @@ where "barcode.type" => Some(("barcode.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "barcode.value" => Some(("barcode.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "class-id" => Some(("classId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "class-reference.activation-options.activation-url" => Some(("classReference.activationOptions.activationUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "class-reference.activation-options.allow-reactivation" => Some(("classReference.activationOptions.allowReactivation", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "class-reference.allow-multiple-users-per-object" => Some(("classReference.allowMultipleUsersPerObject", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "class-reference.callback-options.update-request-url" => Some(("classReference.callbackOptions.updateRequestUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "class-reference.callback-options.url" => Some(("classReference.callbackOptions.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -14624,6 +14881,7 @@ where "custom-ticket-status.default-value.language" => Some(("customTicketStatus.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-ticket-status.default-value.value" => Some(("customTicketStatus.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "custom-ticket-status.kind" => Some(("customTicketStatus.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "device-context.device-token" => Some(("deviceContext.deviceToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "disable-expiration-notification" => Some(("disableExpirationNotification", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "grouping-info.grouping-id" => Some(("groupingInfo.groupingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "grouping-info.sort-index" => Some(("groupingInfo.sortIndex", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), @@ -14734,7 +14992,7 @@ where "valid-time-interval.start.date" => Some(("validTimeInterval.start.date", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "algorithm", "allow-multiple-users-per-object", "alternate-text", "android-app-link-info", "animation-type", "app-link-data", "app-logo-image", "app-target", "arrival-date-time", "barcode", "callback-options", "carriage", "class-id", "class-reference", "class-template-info", "coach", "comments", "concession-category", "confirmation-code", "content-description", "country-code", "currency-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-ticket-status", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "date", "default-value", "departure-date-time", "description", "destination-name", "destination-station-code", "disable-expiration-notification", "discount-message", "enable-single-leg-itinerary", "enable-smart-tap", "end", "face-value", "fare-class", "fare-name", "first-row-option", "grouping-id", "grouping-info", "has-linked-device", "has-users", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "ios-app-link-info", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "micros", "multiple-devices-and-holders-allowed-status", "origin-name", "origin-station-code", "other-restrictions", "passenger-names", "passenger-type", "period-millis", "platform", "purchase-date-time", "purchase-details", "purchase-price", "purchase-receipt-number", "redemption-issuers", "render-encoding", "review", "review-status", "rotating-barcode", "route-restrictions", "route-restrictions-details", "seat", "seat-assignment", "security-animation", "show-code-text", "show-last-update-time", "smart-tap-redemption-value", "sort-index", "source-uri", "start", "state", "target-uri", "ticket-cost", "ticket-leg", "ticket-number", "ticket-restrictions", "ticket-seat", "ticket-status", "time-restrictions", "title", "totp-details", "transit-operator-name", "transit-option", "transit-terminus-name", "transit-type", "trip-id", "trip-type", "type", "update-request-url", "uri", "url", "valid-time-interval", "value", "value-pattern", "version", "view-unlock-requirement", "watermark", "web-app-link-info", "word-mark", "zone"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "activation-options", "activation-status", "activation-url", "algorithm", "allow-multiple-users-per-object", "allow-reactivation", "alternate-text", "android-app-link-info", "animation-type", "app-link-data", "app-logo-image", "app-target", "arrival-date-time", "barcode", "callback-options", "carriage", "class-id", "class-reference", "class-template-info", "coach", "comments", "concession-category", "confirmation-code", "content-description", "country-code", "currency-code", "custom-carriage-label", "custom-coach-label", "custom-concession-category", "custom-concession-category-label", "custom-confirmation-code-label", "custom-discount-message-label", "custom-fare-class", "custom-fare-class-label", "custom-fare-name-label", "custom-other-restrictions-label", "custom-platform-label", "custom-purchase-face-value-label", "custom-purchase-price-label", "custom-purchase-receipt-number-label", "custom-route-restrictions-details-label", "custom-route-restrictions-label", "custom-seat-label", "custom-ticket-number-label", "custom-ticket-status", "custom-time-restrictions-label", "custom-transit-terminus-name-label", "custom-zone-label", "date", "default-value", "departure-date-time", "description", "destination-name", "destination-station-code", "device-context", "device-token", "disable-expiration-notification", "discount-message", "enable-single-leg-itinerary", "enable-smart-tap", "end", "face-value", "fare-class", "fare-name", "first-row-option", "grouping-id", "grouping-info", "has-linked-device", "has-users", "hero-image", "hex-background-color", "homepage-uri", "id", "info-module-data", "ios-app-link-info", "issuer-name", "kind", "language", "language-override", "list-template-override", "localized-description", "localized-issuer-name", "logo", "micros", "multiple-devices-and-holders-allowed-status", "origin-name", "origin-station-code", "other-restrictions", "passenger-names", "passenger-type", "period-millis", "platform", "purchase-date-time", "purchase-details", "purchase-price", "purchase-receipt-number", "redemption-issuers", "render-encoding", "review", "review-status", "rotating-barcode", "route-restrictions", "route-restrictions-details", "seat", "seat-assignment", "security-animation", "show-code-text", "show-last-update-time", "smart-tap-redemption-value", "sort-index", "source-uri", "start", "state", "target-uri", "ticket-cost", "ticket-leg", "ticket-number", "ticket-restrictions", "ticket-seat", "ticket-status", "time-restrictions", "title", "totp-details", "transit-operator-name", "transit-option", "transit-terminus-name", "transit-type", "trip-id", "trip-type", "type", "update-request-url", "uri", "url", "valid-time-interval", "value", "value-pattern", "version", "view-unlock-requirement", "watermark", "web-app-link-info", "word-mark", "zone"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -14794,6 +15052,104 @@ where } } + async fn _walletobjects_v1_private_content_upload_private_data(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "issuer-id" => Some(("issuerId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.body.default-value.kind" => Some(("text.body.defaultValue.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.body.default-value.language" => Some(("text.body.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.body.default-value.value" => Some(("text.body.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.body.kind" => Some(("text.body.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.header.default-value.kind" => Some(("text.header.defaultValue.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.header.default-value.language" => Some(("text.header.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.header.default-value.value" => Some(("text.header.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "text.header.kind" => Some(("text.header.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uri.description.default-value.kind" => Some(("uri.description.defaultValue.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uri.description.default-value.language" => Some(("uri.description.defaultValue.language", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uri.description.default-value.value" => Some(("uri.description.defaultValue.value", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uri.description.kind" => Some(("uri.description.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "uri.uri" => Some(("uri.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["body", "default-value", "description", "header", "issuer-id", "kind", "language", "text", "uri", "value"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::UploadPrivateDataRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.walletobjects().v1_private_content_upload_private_data(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -15093,6 +15449,17 @@ where } } }, + ("media", Some(opt)) => { + match opt.subcommand() { + ("upload", Some(opt)) => { + call_result = self._media_upload(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("media".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, ("offerclass", Some(opt)) => { match opt.subcommand() { ("addmessage", Some(opt)) => { @@ -15222,6 +15589,17 @@ where } } }, + ("walletobjects", Some(opt)) => { + match opt.subcommand() { + ("v1-private-content-upload-private-data", Some(opt)) => { + call_result = self._walletobjects_v1_private_content_upload_private_data(opt, dry_run, &mut err).await; + }, + _ => { + err.issues.push(CLIError::MissingMethodError("walletobjects".to_string())); + writeln!(io::stderr(), "{}\n", opt.usage()).ok(); + } + } + }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); @@ -15294,6 +15672,7 @@ where #[tokio::main] async fn main() { let mut exit_status = 0i32; + let upload_value_names = ["mode", "file"]; let arg_data = [ ("eventticketclass", "methods: 'addmessage', 'get', 'insert', 'list', 'patch' and 'update'", vec![ ("addmessage", @@ -16909,6 +17288,43 @@ async fn main() { ]), ]), + ("media", "methods: 'upload'", vec![ + ("upload", + Some(r##"Uploads a private image and returns an Id to be used in its place."##), + "Details at http://byron.github.io/google-apis-rs/google_walletobjects1_cli/media_upload", + vec![ + (Some(r##"issuer-id"##), + None, + Some(r##"The ID of the issuer sending the image."##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"mode"##), + Some(r##"u"##), + Some(r##"Specify the upload protocol (simple) and the file to upload"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ("offerclass", "methods: 'addmessage', 'get', 'insert', 'list', 'patch' and 'update'", vec![ ("addmessage", Some(r##"Adds a message to the offer class referenced by the given class ID."##), @@ -17575,11 +17991,36 @@ async fn main() { ]), ]), + ("walletobjects", "methods: 'v1-private-content-upload-private-data'", vec![ + ("v1-private-content-upload-private-data", + Some(r##"Upload private data (text or URI) and returns an Id to be used in its place."##), + "Details at http://byron.github.io/google-apis-rs/google_walletobjects1_cli/walletobjects_v1-private-content-upload-private-data", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ]), + ]; let mut app = App::new("walletobjects1") .author("Sebastian Thiel ") - .version("4.0.4+20220928") + .version("5.0.2+20230124") .about("API for issuers to save and manage Google Wallet Objects.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_walletobjects1_cli") .arg(Arg::with_name("url") @@ -17632,6 +18073,17 @@ async fn main() { if let &Some(multi) = multi { arg = arg.multiple(multi); } + if arg_name_str == "mode" { + arg = arg.number_of_values(2); + arg = arg.value_names(&upload_value_names); + + scmd = scmd.arg(Arg::with_name("mime") + .short("m") + .requires("mode") + .required(false) + .help("The file's mime time, like 'application/octet-stream'") + .takes_value(true)); + } scmd = scmd.arg(arg); } mcmd = mcmd.subcommand(scmd); diff --git a/gen/walletobjects1/Cargo.toml b/gen/walletobjects1/Cargo.toml index ce81eb0039..4d25ee5dd9 100644 --- a/gen/walletobjects1/Cargo.toml +++ b/gen/walletobjects1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-walletobjects1" -version = "5.0.2-beta-1+20230124" +version = "5.0.2+20230124" authors = ["Sebastian Thiel "] description = "A complete library to interact with Walletobjects (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/walletobjects1" homepage = "https://developers.google.com/pay/passes" -documentation = "https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124" +documentation = "https://docs.rs/google-walletobjects1/5.0.2+20230124" license = "MIT" keywords = ["walletobjects", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/walletobjects1/README.md b/gen/walletobjects1/README.md index 84f36faa00..52b21c7916 100644 --- a/gen/walletobjects1/README.md +++ b/gen/walletobjects1/README.md @@ -5,59 +5,59 @@ DO NOT EDIT ! --> The `google-walletobjects1` library allows access to all features of the *Google Walletobjects* service. -This documentation was generated from *Walletobjects* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *walletobjects:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Walletobjects* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *walletobjects:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Walletobjects* *v1* API can be found at the [official documentation site](https://developers.google.com/pay/passes). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/Walletobjects) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/Walletobjects) ... * eventticketclass - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketclasUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketclasUpdateCall) * eventticketobject - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketobjectListCall), [*modifylinkedofferobjects*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketobjectModifylinkedofferobjectCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::EventticketobjectUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketobjectListCall), [*modifylinkedofferobjects*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketobjectModifylinkedofferobjectCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::EventticketobjectUpdateCall) * flightclass - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightclasUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightclasUpdateCall) * flightobject - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::FlightobjectUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::FlightobjectUpdateCall) * genericclass - * [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericclasUpdateCall) + * [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericclasUpdateCall) * genericobject - * [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GenericobjectUpdateCall) + * [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GenericobjectUpdateCall) * giftcardclass - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardclasUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardclasUpdateCall) * giftcardobject - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::GiftcardobjectUpdateCall) -* [issuer](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::Issuer) - * [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::IssuerGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::IssuerInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::IssuerListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::IssuerPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::IssuerUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::GiftcardobjectUpdateCall) +* [issuer](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::Issuer) + * [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::IssuerGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::IssuerInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::IssuerListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::IssuerPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::IssuerUpdateCall) * jwt - * [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::JwtInsertCall) + * [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::JwtInsertCall) * loyaltyclass - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyclasUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyclasUpdateCall) * loyaltyobject - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyobjectListCall), [*modifylinkedofferobjects*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyobjectModifylinkedofferobjectCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::LoyaltyobjectUpdateCall) -* [media](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::Media) - * [*upload*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::MediaUploadCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyobjectListCall), [*modifylinkedofferobjects*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyobjectModifylinkedofferobjectCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::LoyaltyobjectUpdateCall) +* [media](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::Media) + * [*upload*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::MediaUploadCall) * offerclass - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferclasUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferclasUpdateCall) * offerobject - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::OfferobjectUpdateCall) -* [permissions](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::Permission) - * [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::PermissionGetCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::PermissionUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::OfferobjectUpdateCall) +* [permissions](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::Permission) + * [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::PermissionGetCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::PermissionUpdateCall) * smarttap - * [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::SmarttapInsertCall) + * [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::SmarttapInsertCall) * transitclass - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitclasUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitclasAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitclasGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitclasInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitclasListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitclasPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitclasUpdateCall) * transitobject - * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::TransitobjectUpdateCall) + * [*addmessage*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitobjectAddmessageCall), [*get*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitobjectGetCall), [*insert*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitobjectInsertCall), [*list*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitobjectListCall), [*patch*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitobjectPatchCall) and [*update*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::TransitobjectUpdateCall) * walletobjects - * [*v1 private content upload private data*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::WalletobjectV1PrivateContentUploadPrivateDataCall) + * [*v1 private content upload private data*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::WalletobjectV1PrivateContentUploadPrivateDataCall) Upload supported by ... -* [*upload media*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/api::MediaUploadCall) +* [*upload media*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/api::MediaUploadCall) @@ -65,17 +65,17 @@ Upload supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/Walletobjects)** +* **[Hub](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/Walletobjects)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::CallBuilder) -* **[Resources](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::CallBuilder) +* **[Resources](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Part)** + * **[Parts](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -177,17 +177,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -197,29 +197,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Delegate) to the -[Method Builder](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Delegate) to the +[Method Builder](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::RequestValue) and -[decodable](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::RequestValue) and +[decodable](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-walletobjects1/5.0.2-beta-1+20230124/google_walletobjects1/client::RequestValue) are moved +* [request values](https://docs.rs/google-walletobjects1/5.0.2+20230124/google_walletobjects1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/walletobjects1/src/api.rs b/gen/walletobjects1/src/api.rs index eec8ea931e..ea127ccf5e 100644 --- a/gen/walletobjects1/src/api.rs +++ b/gen/walletobjects1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> Walletobjects { Walletobjects { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://walletobjects.googleapis.com/".to_string(), _root_url: "https://walletobjects.googleapis.com/".to_string(), } @@ -193,7 +193,7 @@ impl<'a, S> Walletobjects { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/walletobjects1/src/client.rs b/gen/walletobjects1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/walletobjects1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/walletobjects1/src/lib.rs b/gen/walletobjects1/src/lib.rs index 23ad8810dc..f7edf04ae3 100644 --- a/gen/walletobjects1/src/lib.rs +++ b/gen/walletobjects1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Walletobjects* crate version *5.0.2-beta-1+20230124*, where *20230124* is the exact revision of the *walletobjects:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Walletobjects* crate version *5.0.2+20230124*, where *20230124* is the exact revision of the *walletobjects:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Walletobjects* *v1* API can be found at the //! [official documentation site](https://developers.google.com/pay/passes). diff --git a/gen/webfonts1-cli/Cargo.toml b/gen/webfonts1-cli/Cargo.toml index c017150647..5947426725 100644 --- a/gen/webfonts1-cli/Cargo.toml +++ b/gen/webfonts1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-webfonts1-cli" -version = "4.0.1+20220215" +version = "5.0.2+20221206" authors = ["Sebastian Thiel "] description = "A complete library to interact with Webfonts (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/webfonts1-cli" @@ -20,13 +20,13 @@ name = "webfonts1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-webfonts1] path = "../webfonts1" -version = "4.0.1+20220215" +version = "5.0.2+20221206" + diff --git a/gen/webfonts1-cli/README.md b/gen/webfonts1-cli/README.md index 049078f2d9..5118cda013 100644 --- a/gen/webfonts1-cli/README.md +++ b/gen/webfonts1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Webfonts* API at revision *20220215*. The CLI is at version *4.0.1*. +This documentation was generated from the *Webfonts* API at revision *20221206*. The CLI is at version *5.0.2*. ```bash webfonts1 [options] diff --git a/gen/webfonts1-cli/mkdocs.yml b/gen/webfonts1-cli/mkdocs.yml index d98cefeb87..052add493a 100644 --- a/gen/webfonts1-cli/mkdocs.yml +++ b/gen/webfonts1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Webfonts v4.0.1+20220215 +site_name: Webfonts v5.0.2+20221206 site_url: http://byron.github.io/google-apis-rs/google-webfonts1-cli site_description: A complete library to interact with Webfonts (protocol v1) @@ -7,9 +7,10 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/webfonts1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['webfonts_list.md', 'Webfonts', 'List'] +nav: +- Home: 'index.md' +- 'Webfonts': + - 'List': 'webfonts_list.md' theme: readthedocs diff --git a/gen/webfonts1-cli/src/client.rs b/gen/webfonts1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/webfonts1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/webfonts1-cli/src/main.rs b/gen/webfonts1-cli/src/main.rs index 85993ea884..842eb2f1ca 100644 --- a/gen/webfonts1-cli/src/main.rs +++ b/gen/webfonts1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_webfonts1::{api, Error, oauth2}; +use google_webfonts1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -216,7 +215,7 @@ async fn main() { let mut app = App::new("webfonts1") .author("Sebastian Thiel ") - .version("4.0.1+20220215") + .version("5.0.2+20221206") .about("The Google Web Fonts Developer API lets you retrieve information about web fonts served by Google.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_webfonts1_cli") .arg(Arg::with_name("folder") diff --git a/gen/webfonts1/Cargo.toml b/gen/webfonts1/Cargo.toml index 94881a9da0..05fcfcf9fd 100644 --- a/gen/webfonts1/Cargo.toml +++ b/gen/webfonts1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-webfonts1" -version = "5.0.2-beta-1+20221206" +version = "5.0.2+20221206" authors = ["Sebastian Thiel "] description = "A complete library to interact with Webfonts (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/webfonts1" homepage = "https://developers.google.com/fonts/docs/developer_api" -documentation = "https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206" +documentation = "https://docs.rs/google-webfonts1/5.0.2+20221206" license = "MIT" keywords = ["webfonts", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/webfonts1/README.md b/gen/webfonts1/README.md index 913b26bd04..e3e5d8bbaa 100644 --- a/gen/webfonts1/README.md +++ b/gen/webfonts1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-webfonts1` library allows access to all features of the *Google Webfonts* service. -This documentation was generated from *Webfonts* crate version *5.0.2-beta-1+20221206*, where *20221206* is the exact revision of the *webfonts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Webfonts* crate version *5.0.2+20221206*, where *20221206* is the exact revision of the *webfonts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Webfonts* *v1* API can be found at the [official documentation site](https://developers.google.com/fonts/docs/developer_api). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/Webfonts) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/Webfonts) ... -* [webfonts](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/api::Webfont) - * [*list*](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/api::WebfontListCall) +* [webfonts](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/api::Webfont) + * [*list*](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/api::WebfontListCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/Webfonts)** +* **[Hub](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/Webfonts)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::CallBuilder) -* **[Resources](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::CallBuilder) +* **[Resources](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Part)** + * **[Parts](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -119,17 +119,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -139,29 +139,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Delegate) to the -[Method Builder](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Delegate) to the +[Method Builder](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::RequestValue) and -[decodable](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::RequestValue) and +[decodable](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-webfonts1/5.0.2-beta-1+20221206/google_webfonts1/client::RequestValue) are moved +* [request values](https://docs.rs/google-webfonts1/5.0.2+20221206/google_webfonts1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/webfonts1/src/api.rs b/gen/webfonts1/src/api.rs index 83d98b5d82..4e216e9441 100644 --- a/gen/webfonts1/src/api.rs +++ b/gen/webfonts1/src/api.rs @@ -98,7 +98,7 @@ impl<'a, S> Webfonts { Webfonts { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://webfonts.googleapis.com/".to_string(), _root_url: "https://webfonts.googleapis.com/".to_string(), } @@ -109,7 +109,7 @@ impl<'a, S> Webfonts { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/webfonts1/src/client.rs b/gen/webfonts1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/webfonts1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/webfonts1/src/lib.rs b/gen/webfonts1/src/lib.rs index 5dcefeeb9f..e1fbefd3e4 100644 --- a/gen/webfonts1/src/lib.rs +++ b/gen/webfonts1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Webfonts* crate version *5.0.2-beta-1+20221206*, where *20221206* is the exact revision of the *webfonts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Webfonts* crate version *5.0.2+20221206*, where *20221206* is the exact revision of the *webfonts:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Webfonts* *v1* API can be found at the //! [official documentation site](https://developers.google.com/fonts/docs/developer_api). diff --git a/gen/webmasters3-cli/Cargo.toml b/gen/webmasters3-cli/Cargo.toml index af716b63e9..fd35bb4ed7 100644 --- a/gen/webmasters3-cli/Cargo.toml +++ b/gen/webmasters3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-webmasters3-cli" -version = "4.0.1+20190428" +version = "5.0.2+20190428" authors = ["Sebastian Thiel "] description = "A complete library to interact with webmasters (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/webmasters3-cli" @@ -20,13 +20,13 @@ name = "webmasters3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-webmasters3] path = "../webmasters3" -version = "4.0.1+20190428" +version = "5.0.2+20190428" + diff --git a/gen/webmasters3-cli/README.md b/gen/webmasters3-cli/README.md index e3692364d7..738dad2958 100644 --- a/gen/webmasters3-cli/README.md +++ b/gen/webmasters3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *webmasters* API at revision *20190428*. The CLI is at version *4.0.1*. +This documentation was generated from the *webmasters* API at revision *20190428*. The CLI is at version *5.0.2*. ```bash webmasters3 [options] diff --git a/gen/webmasters3-cli/mkdocs.yml b/gen/webmasters3-cli/mkdocs.yml index 097a762fa4..89c0735ede 100644 --- a/gen/webmasters3-cli/mkdocs.yml +++ b/gen/webmasters3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: webmasters v4.0.1+20190428 +site_name: webmasters v5.0.2+20190428 site_url: http://byron.github.io/google-apis-rs/google-webmasters3-cli site_description: A complete library to interact with webmasters (protocol v3) @@ -7,17 +7,20 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/webmasters3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['searchanalytics_query.md', 'Searchanalytics', 'Query'] -- ['sitemaps_delete.md', 'Sitemaps', 'Delete'] -- ['sitemaps_get.md', 'Sitemaps', 'Get'] -- ['sitemaps_list.md', 'Sitemaps', 'List'] -- ['sitemaps_submit.md', 'Sitemaps', 'Submit'] -- ['sites_add.md', 'Sites', 'Add'] -- ['sites_delete.md', 'Sites', 'Delete'] -- ['sites_get.md', 'Sites', 'Get'] -- ['sites_list.md', 'Sites', 'List'] +nav: +- Home: 'index.md' +- 'Searchanalytics': + - 'Query': 'searchanalytics_query.md' +- 'Sitemaps': + - 'Delete': 'sitemaps_delete.md' + - 'Get': 'sitemaps_get.md' + - 'List': 'sitemaps_list.md' + - 'Submit': 'sitemaps_submit.md' +- 'Sites': + - 'Add': 'sites_add.md' + - 'Delete': 'sites_delete.md' + - 'Get': 'sites_get.md' + - 'List': 'sites_list.md' theme: readthedocs diff --git a/gen/webmasters3-cli/src/client.rs b/gen/webmasters3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/webmasters3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/webmasters3-cli/src/main.rs b/gen/webmasters3-cli/src/main.rs index 08805b727a..e803b24413 100644 --- a/gen/webmasters3-cli/src/main.rs +++ b/gen/webmasters3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_webmasters3::{api, Error, oauth2}; +use google_webmasters3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -864,7 +863,7 @@ async fn main() { let mut app = App::new("webmasters3") .author("Sebastian Thiel ") - .version("4.0.1+20190428") + .version("5.0.2+20190428") .about("View Google Search Console data for your verified sites.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_webmasters3_cli") .arg(Arg::with_name("url") diff --git a/gen/webmasters3/Cargo.toml b/gen/webmasters3/Cargo.toml index 9402db1ad3..bf5e569a06 100644 --- a/gen/webmasters3/Cargo.toml +++ b/gen/webmasters3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-webmasters3" -version = "5.0.2-beta-1+20190428" +version = "5.0.2+20190428" authors = ["Sebastian Thiel "] description = "A complete library to interact with webmasters (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/webmasters3" homepage = "https://developers.google.com/webmaster-tools/" -documentation = "https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428" +documentation = "https://docs.rs/google-webmasters3/5.0.2+20190428" license = "MIT" keywords = ["webmasters", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/webmasters3/README.md b/gen/webmasters3/README.md index 12ce7d4198..c504a4d508 100644 --- a/gen/webmasters3/README.md +++ b/gen/webmasters3/README.md @@ -5,20 +5,20 @@ DO NOT EDIT ! --> The `google-webmasters3` library allows access to all features of the *Google webmasters* service. -This documentation was generated from *webmasters* crate version *5.0.2-beta-1+20190428*, where *20190428* is the exact revision of the *webmasters:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *webmasters* crate version *5.0.2+20190428*, where *20190428* is the exact revision of the *webmasters:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *webmasters* *v3* API can be found at the [official documentation site](https://developers.google.com/webmaster-tools/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/Webmasters) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/Webmasters) ... * searchanalytics - * [*query*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SearchanalyticQueryCall) + * [*query*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SearchanalyticQueryCall) * sitemaps - * [*delete*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SitemapDeleteCall), [*get*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SitemapGetCall), [*list*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SitemapListCall) and [*submit*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SitemapSubmitCall) + * [*delete*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SitemapDeleteCall), [*get*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SitemapGetCall), [*list*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SitemapListCall) and [*submit*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SitemapSubmitCall) * sites - * [*add*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SiteAddCall), [*delete*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SiteDeleteCall), [*get*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SiteGetCall) and [*list*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/api::SiteListCall) + * [*add*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SiteAddCall), [*delete*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SiteDeleteCall), [*get*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SiteGetCall) and [*list*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/api::SiteListCall) @@ -27,17 +27,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/Webmasters)** +* **[Hub](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/Webmasters)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::CallBuilder) -* **[Resources](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::CallBuilder) +* **[Resources](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Part)** + * **[Parts](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Delegate) to the -[Method Builder](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Delegate) to the +[Method Builder](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::RequestValue) and -[decodable](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::RequestValue) and +[decodable](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-webmasters3/5.0.2-beta-1+20190428/google_webmasters3/client::RequestValue) are moved +* [request values](https://docs.rs/google-webmasters3/5.0.2+20190428/google_webmasters3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/webmasters3/src/api.rs b/gen/webmasters3/src/api.rs index a17305b2fc..bd15dc2905 100644 --- a/gen/webmasters3/src/api.rs +++ b/gen/webmasters3/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> Webmasters { Webmasters { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://www.googleapis.com/webmasters/v3/".to_string(), _root_url: "https://www.googleapis.com/".to_string(), } @@ -146,7 +146,7 @@ impl<'a, S> Webmasters { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/webmasters3/src/client.rs b/gen/webmasters3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/webmasters3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/webmasters3/src/lib.rs b/gen/webmasters3/src/lib.rs index bfc6263985..aafc1b033e 100644 --- a/gen/webmasters3/src/lib.rs +++ b/gen/webmasters3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *webmasters* crate version *5.0.2-beta-1+20190428*, where *20190428* is the exact revision of the *webmasters:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *webmasters* crate version *5.0.2+20190428*, where *20190428* is the exact revision of the *webmasters:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *webmasters* *v3* API can be found at the //! [official documentation site](https://developers.google.com/webmaster-tools/). diff --git a/gen/webrisk1-cli/Cargo.toml b/gen/webrisk1-cli/Cargo.toml index fd7023d8f6..a3ca61bec9 100644 --- a/gen/webrisk1-cli/Cargo.toml +++ b/gen/webrisk1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-webrisk1-cli" -version = "4.0.1+20220226" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Web Risk (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/webrisk1-cli" @@ -20,13 +20,13 @@ name = "webrisk1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-webrisk1] path = "../webrisk1" -version = "4.0.1+20220226" +version = "5.0.2+20230121" + diff --git a/gen/webrisk1-cli/README.md b/gen/webrisk1-cli/README.md index d4b74e3bc3..9cf04b726a 100644 --- a/gen/webrisk1-cli/README.md +++ b/gen/webrisk1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Web Risk* API at revision *20220226*. The CLI is at version *4.0.1*. +This documentation was generated from the *Web Risk* API at revision *20230121*. The CLI is at version *5.0.2*. ```bash webrisk1 [options] diff --git a/gen/webrisk1-cli/mkdocs.yml b/gen/webrisk1-cli/mkdocs.yml index ee3f3bed5f..02ac404fc8 100644 --- a/gen/webrisk1-cli/mkdocs.yml +++ b/gen/webrisk1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Web Risk v4.0.1+20220226 +site_name: Web Risk v5.0.2+20230121 site_url: http://byron.github.io/google-apis-rs/google-webrisk1-cli site_description: A complete library to interact with Web Risk (protocol v1) @@ -7,17 +7,21 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/webrisk1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['hashes_search.md', 'Hashes', 'Search'] -- ['projects_operations-cancel.md', 'Projects', 'Operations Cancel'] -- ['projects_operations-delete.md', 'Projects', 'Operations Delete'] -- ['projects_operations-get.md', 'Projects', 'Operations Get'] -- ['projects_operations-list.md', 'Projects', 'Operations List'] -- ['projects_submissions-create.md', 'Projects', 'Submissions Create'] -- ['projects_uris-submit.md', 'Projects', 'Uris Submit'] -- ['threat-lists_compute-diff.md', 'Threat Lists', 'Compute Diff'] -- ['uris_search.md', 'Uris', 'Search'] +nav: +- Home: 'index.md' +- 'Hashes': + - 'Search': 'hashes_search.md' +- 'Projects': + - 'Operations Cancel': 'projects_operations-cancel.md' + - 'Operations Delete': 'projects_operations-delete.md' + - 'Operations Get': 'projects_operations-get.md' + - 'Operations List': 'projects_operations-list.md' + - 'Submissions Create': 'projects_submissions-create.md' + - 'Uris Submit': 'projects_uris-submit.md' +- 'Threat Lists': + - 'Compute Diff': 'threat-lists_compute-diff.md' +- 'Uris': + - 'Search': 'uris_search.md' theme: readthedocs diff --git a/gen/webrisk1-cli/src/client.rs b/gen/webrisk1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/webrisk1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/webrisk1-cli/src/main.rs b/gen/webrisk1-cli/src/main.rs index c16bd8587f..549677aa40 100644 --- a/gen/webrisk1-cli/src/main.rs +++ b/gen/webrisk1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_webrisk1::{api, Error, oauth2}; +use google_webrisk1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -61,7 +60,7 @@ where call = call.add_threat_types(value.unwrap_or("")); }, "hash-prefix" => { - call = call.hash_prefix(value.unwrap_or("")); + call = call.hash_prefix( value.map(|v| arg_from_str(v, err, "hash-prefix", "byte")).unwrap_or(b"hello world")); }, _ => { let mut found = false; @@ -302,7 +301,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -530,7 +529,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "version-token" => { - call = call.version_token(value.unwrap_or("")); + call = call.version_token( value.map(|v| arg_from_str(v, err, "version-token", "byte")).unwrap_or(b"hello world")); }, "threat-type" => { call = call.threat_type(value.unwrap_or("")); @@ -539,10 +538,10 @@ where call = call.add_constraints_supported_compressions(value.unwrap_or("")); }, "constraints-max-diff-entries" => { - call = call.constraints_max_diff_entries(arg_from_str(value.unwrap_or("-0"), err, "constraints-max-diff-entries", "integer")); + call = call.constraints_max_diff_entries( value.map(|v| arg_from_str(v, err, "constraints-max-diff-entries", "int32")).unwrap_or(-0)); }, "constraints-max-database-entries" => { - call = call.constraints_max_database_entries(arg_from_str(value.unwrap_or("-0"), err, "constraints-max-database-entries", "integer")); + call = call.constraints_max_database_entries( value.map(|v| arg_from_str(v, err, "constraints-max-database-entries", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -1001,7 +1000,7 @@ async fn main() { let mut app = App::new("webrisk1") .author("Sebastian Thiel ") - .version("4.0.1+20220226") + .version("5.0.2+20230121") .about("") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_webrisk1_cli") .arg(Arg::with_name("url") diff --git a/gen/webrisk1/Cargo.toml b/gen/webrisk1/Cargo.toml index d800f7e75d..e0d1fc7a69 100644 --- a/gen/webrisk1/Cargo.toml +++ b/gen/webrisk1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-webrisk1" -version = "5.0.2-beta-1+20230121" +version = "5.0.2+20230121" authors = ["Sebastian Thiel "] description = "A complete library to interact with Web Risk (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/webrisk1" homepage = "https://cloud.google.com/web-risk/" -documentation = "https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121" +documentation = "https://docs.rs/google-webrisk1/5.0.2+20230121" license = "MIT" keywords = ["webrisk", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/webrisk1/README.md b/gen/webrisk1/README.md index 41d2805e6a..da4e3979d8 100644 --- a/gen/webrisk1/README.md +++ b/gen/webrisk1/README.md @@ -5,22 +5,22 @@ DO NOT EDIT ! --> The `google-webrisk1` library allows access to all features of the *Google Web Risk* service. -This documentation was generated from *Web Risk* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *webrisk:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Web Risk* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *webrisk:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Web Risk* *v1* API can be found at the [official documentation site](https://cloud.google.com/web-risk/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/WebRisk) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/WebRisk) ... * hashes - * [*search*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::HashSearchCall) + * [*search*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::HashSearchCall) * projects - * [*operations cancel*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::ProjectOperationCancelCall), [*operations delete*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::ProjectOperationDeleteCall), [*operations get*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::ProjectOperationListCall), [*submissions create*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::ProjectSubmissionCreateCall) and [*uris submit*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::ProjectUriSubmitCall) + * [*operations cancel*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::ProjectOperationCancelCall), [*operations delete*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::ProjectOperationDeleteCall), [*operations get*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::ProjectOperationGetCall), [*operations list*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::ProjectOperationListCall), [*submissions create*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::ProjectSubmissionCreateCall) and [*uris submit*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::ProjectUriSubmitCall) * threat lists - * [*compute diff*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::ThreatListComputeDiffCall) + * [*compute diff*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::ThreatListComputeDiffCall) * uris - * [*search*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/api::UriSearchCall) + * [*search*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/api::UriSearchCall) @@ -29,17 +29,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/WebRisk)** +* **[Hub](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/WebRisk)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::CallBuilder) -* **[Resources](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::CallBuilder) +* **[Resources](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Part)** + * **[Parts](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -131,17 +131,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -151,29 +151,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Delegate) to the -[Method Builder](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Delegate) to the +[Method Builder](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::RequestValue) and -[decodable](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::RequestValue) and +[decodable](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-webrisk1/5.0.2-beta-1+20230121/google_webrisk1/client::RequestValue) are moved +* [request values](https://docs.rs/google-webrisk1/5.0.2+20230121/google_webrisk1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/webrisk1/src/api.rs b/gen/webrisk1/src/api.rs index 713fe6eed3..7c4fd036cf 100644 --- a/gen/webrisk1/src/api.rs +++ b/gen/webrisk1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> WebRisk { WebRisk { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://webrisk.googleapis.com/".to_string(), _root_url: "https://webrisk.googleapis.com/".to_string(), } @@ -145,7 +145,7 @@ impl<'a, S> WebRisk { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/webrisk1/src/client.rs b/gen/webrisk1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/webrisk1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/webrisk1/src/lib.rs b/gen/webrisk1/src/lib.rs index 24d91d463e..11a680f2ab 100644 --- a/gen/webrisk1/src/lib.rs +++ b/gen/webrisk1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Web Risk* crate version *5.0.2-beta-1+20230121*, where *20230121* is the exact revision of the *webrisk:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Web Risk* crate version *5.0.2+20230121*, where *20230121* is the exact revision of the *webrisk:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Web Risk* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/web-risk/). diff --git a/gen/workflowexecutions1-cli/Cargo.toml b/gen/workflowexecutions1-cli/Cargo.toml index e11cf3b0c6..89983b04f7 100644 --- a/gen/workflowexecutions1-cli/Cargo.toml +++ b/gen/workflowexecutions1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-workflowexecutions1-cli" -version = "4.0.1+20220222" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Workflow Executions (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/workflowexecutions1-cli" @@ -20,13 +20,13 @@ name = "workflowexecutions1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-workflowexecutions1] path = "../workflowexecutions1" -version = "4.0.1+20220222" +version = "5.0.2+20230110" + diff --git a/gen/workflowexecutions1-cli/README.md b/gen/workflowexecutions1-cli/README.md index 714ea35342..fced91c4a0 100644 --- a/gen/workflowexecutions1-cli/README.md +++ b/gen/workflowexecutions1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Workflow Executions* API at revision *20220222*. The CLI is at version *4.0.1*. +This documentation was generated from the *Workflow Executions* API at revision *20230110*. The CLI is at version *5.0.2*. ```bash workflowexecutions1 [options] @@ -34,6 +34,7 @@ workflowexecutions1 [options] locations-workflows-executions-create (-r )... [-p ]... [-o ] locations-workflows-executions-get [-p ]... [-o ] locations-workflows-executions-list [-p ]... [-o ] + locations-workflows-trigger-pubsub-execution (-r )... [-p ]... [-o ] workflowexecutions1 --help Configuration: diff --git a/gen/workflowexecutions1-cli/mkdocs.yml b/gen/workflowexecutions1-cli/mkdocs.yml index 46fc47a0d7..b2c6e8e7b2 100644 --- a/gen/workflowexecutions1-cli/mkdocs.yml +++ b/gen/workflowexecutions1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Workflow Executions v4.0.1+20220222 +site_name: Workflow Executions v5.0.2+20230110 site_url: http://byron.github.io/google-apis-rs/google-workflowexecutions1-cli site_description: A complete library to interact with Workflow Executions (protocol v1) @@ -7,12 +7,14 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/workflowexecutio docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-workflows-executions-cancel.md', 'Projects', 'Locations Workflows Executions Cancel'] -- ['projects_locations-workflows-executions-create.md', 'Projects', 'Locations Workflows Executions Create'] -- ['projects_locations-workflows-executions-get.md', 'Projects', 'Locations Workflows Executions Get'] -- ['projects_locations-workflows-executions-list.md', 'Projects', 'Locations Workflows Executions List'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Workflows Executions Cancel': 'projects_locations-workflows-executions-cancel.md' + - 'Locations Workflows Executions Create': 'projects_locations-workflows-executions-create.md' + - 'Locations Workflows Executions Get': 'projects_locations-workflows-executions-get.md' + - 'Locations Workflows Executions List': 'projects_locations-workflows-executions-list.md' + - 'Locations Workflows Trigger Pubsub Execution': 'projects_locations-workflows-trigger-pubsub-execution.md' theme: readthedocs diff --git a/gen/workflowexecutions1-cli/src/client.rs b/gen/workflowexecutions1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/workflowexecutions1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/workflowexecutions1-cli/src/main.rs b/gen/workflowexecutions1-cli/src/main.rs index ac92b8571c..ec591d6a33 100644 --- a/gen/workflowexecutions1-cli/src/main.rs +++ b/gen/workflowexecutions1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_workflowexecutions1::{api, Error, oauth2}; +use google_workflowexecutions1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -160,6 +159,7 @@ where match &temp_cursor.to_string()[..] { "argument" => Some(("argument", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "call-log-level" => Some(("callLogLevel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "duration" => Some(("duration", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.context" => Some(("error.context", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "error.payload" => Some(("error.payload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -169,7 +169,7 @@ where "state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "workflow-revision-id" => Some(("workflowRevisionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["argument", "call-log-level", "context", "end-time", "error", "name", "payload", "result", "start-time", "state", "workflow-revision-id"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["argument", "call-log-level", "context", "duration", "end-time", "error", "name", "payload", "result", "start-time", "state", "workflow-revision-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -298,7 +298,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, _ => { let mut found = false; @@ -347,6 +347,97 @@ where } } + async fn _projects_locations_workflows_trigger_pubsub_execution(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "gcp-cloud-events-mode" => Some(("GCPCloudEventsMode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "message.attributes" => Some(("message.attributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })), + "message.data" => Some(("message.data", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "message.message-id" => Some(("message.messageId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "message.ordering-key" => Some(("message.orderingKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "message.publish-time" => Some(("message.publishTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "subscription" => Some(("subscription", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["gcp-cloud-events-mode", "attributes", "data", "message", "message-id", "ordering-key", "publish-time", "subscription"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::TriggerPubsubExecutionRequest = json::value::from_value(object).unwrap(); + let mut call = self.hub.projects().locations_workflows_trigger_pubsub_execution(request, opt.value_of("workflow").unwrap_or("")); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _doit(&self, dry_run: bool) -> Result, Option> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); @@ -366,6 +457,9 @@ where ("locations-workflows-executions-list", Some(opt)) => { call_result = self._projects_locations_workflows_executions_list(opt, dry_run, &mut err).await; }, + ("locations-workflows-trigger-pubsub-execution", Some(opt)) => { + call_result = self._projects_locations_workflows_trigger_pubsub_execution(opt, dry_run, &mut err).await; + }, _ => { err.issues.push(CLIError::MissingMethodError("projects".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); @@ -445,7 +539,7 @@ where async fn main() { let mut exit_status = 0i32; let arg_data = [ - ("projects", "methods: 'locations-workflows-executions-cancel', 'locations-workflows-executions-create', 'locations-workflows-executions-get' and 'locations-workflows-executions-list'", vec![ + ("projects", "methods: 'locations-workflows-executions-cancel', 'locations-workflows-executions-create', 'locations-workflows-executions-get', 'locations-workflows-executions-list' and 'locations-workflows-trigger-pubsub-execution'", vec![ ("locations-workflows-executions-cancel", Some(r##"Cancels an execution of the given name."##), "Details at http://byron.github.io/google-apis-rs/google_workflowexecutions1_cli/projects_locations-workflows-executions-cancel", @@ -540,6 +634,34 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("locations-workflows-trigger-pubsub-execution", + Some(r##"Triggers a new execution using the latest revision of the given workflow by a Pub/Sub push notification."##), + "Details at http://byron.github.io/google-apis-rs/google_workflowexecutions1_cli/projects_locations-workflows-trigger-pubsub-execution", + vec![ + (Some(r##"workflow"##), + None, + Some(r##"Required. Name of the workflow for which an execution should be created. Format: projects/{project}/locations/{location}/workflows/{workflow}"##), + Some(true), + Some(false)), + + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -552,7 +674,7 @@ async fn main() { let mut app = App::new("workflowexecutions1") .author("Sebastian Thiel ") - .version("4.0.1+20220222") + .version("5.0.2+20230110") .about("Execute workflows created with Workflows API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_workflowexecutions1_cli") .arg(Arg::with_name("url") diff --git a/gen/workflowexecutions1/Cargo.toml b/gen/workflowexecutions1/Cargo.toml index 447a3a50af..2f86b0d5c3 100644 --- a/gen/workflowexecutions1/Cargo.toml +++ b/gen/workflowexecutions1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-workflowexecutions1" -version = "5.0.2-beta-1+20230110" +version = "5.0.2+20230110" authors = ["Sebastian Thiel "] description = "A complete library to interact with Workflow Executions (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/workflowexecutions1" homepage = "https://cloud.google.com/workflows" -documentation = "https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110" +documentation = "https://docs.rs/google-workflowexecutions1/5.0.2+20230110" license = "MIT" keywords = ["workflowexecutions", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/workflowexecutions1/README.md b/gen/workflowexecutions1/README.md index 466451871f..9836b185c4 100644 --- a/gen/workflowexecutions1/README.md +++ b/gen/workflowexecutions1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-workflowexecutions1` library allows access to all features of the *Google Workflow Executions* service. -This documentation was generated from *Workflow Executions* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *workflowexecutions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Workflow Executions* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *workflowexecutions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Workflow Executions* *v1* API can be found at the [official documentation site](https://cloud.google.com/workflows). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/WorkflowExecutions) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/WorkflowExecutions) ... * projects - * [*locations workflows executions cancel*](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionCancelCall), [*locations workflows executions create*](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionCreateCall), [*locations workflows executions get*](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionGetCall), [*locations workflows executions list*](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionListCall) and [*locations workflows trigger pubsub execution*](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowTriggerPubsubExecutionCall) + * [*locations workflows executions cancel*](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionCancelCall), [*locations workflows executions create*](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionCreateCall), [*locations workflows executions get*](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionGetCall), [*locations workflows executions list*](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowExecutionListCall) and [*locations workflows trigger pubsub execution*](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/api::ProjectLocationWorkflowTriggerPubsubExecutionCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/WorkflowExecutions)** +* **[Hub](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/WorkflowExecutions)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::CallBuilder) -* **[Resources](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::CallBuilder) +* **[Resources](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Part)** + * **[Parts](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -127,17 +127,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -147,29 +147,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Delegate) to the -[Method Builder](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Delegate) to the +[Method Builder](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::RequestValue) and -[decodable](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::RequestValue) and +[decodable](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-workflowexecutions1/5.0.2-beta-1+20230110/google_workflowexecutions1/client::RequestValue) are moved +* [request values](https://docs.rs/google-workflowexecutions1/5.0.2+20230110/google_workflowexecutions1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/workflowexecutions1/src/api.rs b/gen/workflowexecutions1/src/api.rs index 4777e9a27b..d88a575b6b 100644 --- a/gen/workflowexecutions1/src/api.rs +++ b/gen/workflowexecutions1/src/api.rs @@ -125,7 +125,7 @@ impl<'a, S> WorkflowExecutions { WorkflowExecutions { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://workflowexecutions.googleapis.com/".to_string(), _root_url: "https://workflowexecutions.googleapis.com/".to_string(), } @@ -136,7 +136,7 @@ impl<'a, S> WorkflowExecutions { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/workflowexecutions1/src/client.rs b/gen/workflowexecutions1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/workflowexecutions1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/workflowexecutions1/src/lib.rs b/gen/workflowexecutions1/src/lib.rs index ba52aa0912..24056f0eef 100644 --- a/gen/workflowexecutions1/src/lib.rs +++ b/gen/workflowexecutions1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Workflow Executions* crate version *5.0.2-beta-1+20230110*, where *20230110* is the exact revision of the *workflowexecutions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Workflow Executions* crate version *5.0.2+20230110*, where *20230110* is the exact revision of the *workflowexecutions:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Workflow Executions* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/workflows). diff --git a/gen/workflows1-cli/Cargo.toml b/gen/workflows1-cli/Cargo.toml index 33397cc976..a4a16d41d4 100644 --- a/gen/workflows1-cli/Cargo.toml +++ b/gen/workflows1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-workflows1-cli" -version = "4.0.1+20220223" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Workflows (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/workflows1-cli" @@ -20,13 +20,13 @@ name = "workflows1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-workflows1] path = "../workflows1" -version = "4.0.1+20220223" +version = "5.0.2+20230105" + diff --git a/gen/workflows1-cli/README.md b/gen/workflows1-cli/README.md index aa8af3509f..103a3af808 100644 --- a/gen/workflows1-cli/README.md +++ b/gen/workflows1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *Workflows* API at revision *20220223*. The CLI is at version *4.0.1*. +This documentation was generated from the *Workflows* API at revision *20230105*. The CLI is at version *5.0.2*. ```bash workflows1 [options] diff --git a/gen/workflows1-cli/mkdocs.yml b/gen/workflows1-cli/mkdocs.yml index cd14dd978c..f60d41ba6c 100644 --- a/gen/workflows1-cli/mkdocs.yml +++ b/gen/workflows1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: Workflows v4.0.1+20220223 +site_name: Workflows v5.0.2+20230105 site_url: http://byron.github.io/google-apis-rs/google-workflows1-cli site_description: A complete library to interact with Workflows (protocol v1) @@ -7,18 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/workflows1-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['projects_locations-get.md', 'Projects', 'Locations Get'] -- ['projects_locations-list.md', 'Projects', 'Locations List'] -- ['projects_locations-operations-delete.md', 'Projects', 'Locations Operations Delete'] -- ['projects_locations-operations-get.md', 'Projects', 'Locations Operations Get'] -- ['projects_locations-operations-list.md', 'Projects', 'Locations Operations List'] -- ['projects_locations-workflows-create.md', 'Projects', 'Locations Workflows Create'] -- ['projects_locations-workflows-delete.md', 'Projects', 'Locations Workflows Delete'] -- ['projects_locations-workflows-get.md', 'Projects', 'Locations Workflows Get'] -- ['projects_locations-workflows-list.md', 'Projects', 'Locations Workflows List'] -- ['projects_locations-workflows-patch.md', 'Projects', 'Locations Workflows Patch'] +nav: +- Home: 'index.md' +- 'Projects': + - 'Locations Get': 'projects_locations-get.md' + - 'Locations List': 'projects_locations-list.md' + - 'Locations Operations Delete': 'projects_locations-operations-delete.md' + - 'Locations Operations Get': 'projects_locations-operations-get.md' + - 'Locations Operations List': 'projects_locations-operations-list.md' + - 'Locations Workflows Create': 'projects_locations-workflows-create.md' + - 'Locations Workflows Delete': 'projects_locations-workflows-delete.md' + - 'Locations Workflows Get': 'projects_locations-workflows-get.md' + - 'Locations Workflows List': 'projects_locations-workflows-list.md' + - 'Locations Workflows Patch': 'projects_locations-workflows-patch.md' theme: readthedocs diff --git a/gen/workflows1-cli/src/client.rs b/gen/workflows1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/workflows1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/workflows1-cli/src/main.rs b/gen/workflows1-cli/src/main.rs index efff8eccb9..db783e229a 100644 --- a/gen/workflows1-cli/src/main.rs +++ b/gen/workflows1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_workflows1::{api, Error, oauth2}; +use google_workflows1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -113,7 +112,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -279,7 +278,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "filter" => { call = call.filter(value.unwrap_or("")); @@ -543,7 +542,7 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "order-by" => { call = call.order_by(value.unwrap_or("")); @@ -647,7 +646,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { - call = call.update_mask(value.unwrap_or("")); + call = call.update_mask( value.map(|v| arg_from_str(v, err, "update-mask", "google-fieldmask")).unwrap_or(FieldMask::default())); }, _ => { let mut found = false; @@ -924,7 +923,7 @@ async fn main() { Some(false)), ]), ("locations-workflows-create", - Some(r##"Creates a new workflow. If a workflow with the specified name already exists in the specified project and location, the long running operation will return ALREADY_EXISTS error."##), + Some(r##"Creates a new workflow. If a workflow with the specified name already exists in the specified project and location, the long running operation returns a ALREADY_EXISTS error."##), "Details at http://byron.github.io/google-apis-rs/google_workflows1_cli/projects_locations-workflows-create", vec![ (Some(r##"parent"##), @@ -974,12 +973,12 @@ async fn main() { Some(false)), ]), ("locations-workflows-get", - Some(r##"Gets details of a single Workflow."##), + Some(r##"Gets details of a single workflow."##), "Details at http://byron.github.io/google-apis-rs/google_workflows1_cli/projects_locations-workflows-get", vec![ (Some(r##"name"##), None, - Some(r##"Required. Name of the workflow which information should be retrieved. Format: projects/{project}/locations/{location}/workflows/{workflow}"##), + Some(r##"Required. Name of the workflow for which information should be retrieved. Format: projects/{project}/locations/{location}/workflows/{workflow}"##), Some(true), Some(false)), @@ -996,7 +995,7 @@ async fn main() { Some(false)), ]), ("locations-workflows-list", - Some(r##"Lists Workflows in a given project and location. The default order is not specified."##), + Some(r##"Lists workflows in a given project and location. The default order is not specified."##), "Details at http://byron.github.io/google-apis-rs/google_workflows1_cli/projects_locations-workflows-list", vec![ (Some(r##"parent"##), @@ -1018,7 +1017,7 @@ async fn main() { Some(false)), ]), ("locations-workflows-patch", - Some(r##"Updates an existing workflow. Running this method has no impact on already running executions of the workflow. A new revision of the workflow may be created as a result of a successful update operation. In that case, such revision will be used in new workflow executions."##), + Some(r##"Updates an existing workflow. Running this method has no impact on already running executions of the workflow. A new revision of the workflow might be created as a result of a successful update operation. In that case, the new revision is used in new workflow executions."##), "Details at http://byron.github.io/google-apis-rs/google_workflows1_cli/projects_locations-workflows-patch", vec![ (Some(r##"name"##), @@ -1051,7 +1050,7 @@ async fn main() { let mut app = App::new("workflows1") .author("Sebastian Thiel ") - .version("4.0.1+20220223") + .version("5.0.2+20230105") .about("Manage workflow definitions. To execute workflows and manage executions, see the Workflows Executions API.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_workflows1_cli") .arg(Arg::with_name("url") diff --git a/gen/workflows1/Cargo.toml b/gen/workflows1/Cargo.toml index 8d526bbb9c..a1f1071239 100644 --- a/gen/workflows1/Cargo.toml +++ b/gen/workflows1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-workflows1" -version = "5.0.2-beta-1+20230105" +version = "5.0.2+20230105" authors = ["Sebastian Thiel "] description = "A complete library to interact with Workflows (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/workflows1" homepage = "https://cloud.google.com/workflows" -documentation = "https://docs.rs/google-workflows1/5.0.2-beta-1+20230105" +documentation = "https://docs.rs/google-workflows1/5.0.2+20230105" license = "MIT" keywords = ["workflows", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/workflows1/README.md b/gen/workflows1/README.md index 30f3a71bd5..b1572d6091 100644 --- a/gen/workflows1/README.md +++ b/gen/workflows1/README.md @@ -5,16 +5,16 @@ DO NOT EDIT ! --> The `google-workflows1` library allows access to all features of the *Google Workflows* service. -This documentation was generated from *Workflows* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *workflows:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *Workflows* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *workflows:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *Workflows* *v1* API can be found at the [official documentation site](https://cloud.google.com/workflows). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/Workflows) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/Workflows) ... * projects - * [*locations get*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationListCall), [*locations operations delete*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationOperationListCall), [*locations workflows create*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationWorkflowCreateCall), [*locations workflows delete*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationWorkflowDeleteCall), [*locations workflows get*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationWorkflowGetCall), [*locations workflows list*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationWorkflowListCall) and [*locations workflows patch*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/api::ProjectLocationWorkflowPatchCall) + * [*locations get*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationGetCall), [*locations list*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationListCall), [*locations operations delete*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationOperationDeleteCall), [*locations operations get*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationOperationGetCall), [*locations operations list*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationOperationListCall), [*locations workflows create*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationWorkflowCreateCall), [*locations workflows delete*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationWorkflowDeleteCall), [*locations workflows get*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationWorkflowGetCall), [*locations workflows list*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationWorkflowListCall) and [*locations workflows patch*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/api::ProjectLocationWorkflowPatchCall) @@ -23,17 +23,17 @@ Handle the following *Resources* with ease from the central [hub](https://docs.r The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/Workflows)** +* **[Hub](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/Workflows)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::CallBuilder) -* **[Resources](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::CallBuilder) +* **[Resources](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Part)** + * **[Parts](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -128,17 +128,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -148,29 +148,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Delegate) to the -[Method Builder](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Delegate) to the +[Method Builder](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::RequestValue) and -[decodable](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::RequestValue) and +[decodable](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-workflows1/5.0.2-beta-1+20230105/google_workflows1/client::RequestValue) are moved +* [request values](https://docs.rs/google-workflows1/5.0.2+20230105/google_workflows1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/workflows1/src/api.rs b/gen/workflows1/src/api.rs index 2054c44f8c..42283828ca 100644 --- a/gen/workflows1/src/api.rs +++ b/gen/workflows1/src/api.rs @@ -126,7 +126,7 @@ impl<'a, S> Workflows { Workflows { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://workflows.googleapis.com/".to_string(), _root_url: "https://workflows.googleapis.com/".to_string(), } @@ -137,7 +137,7 @@ impl<'a, S> Workflows { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/workflows1/src/client.rs b/gen/workflows1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/workflows1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/workflows1/src/lib.rs b/gen/workflows1/src/lib.rs index 90d8007af0..70f048cd4d 100644 --- a/gen/workflows1/src/lib.rs +++ b/gen/workflows1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *Workflows* crate version *5.0.2-beta-1+20230105*, where *20230105* is the exact revision of the *workflows:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *Workflows* crate version *5.0.2+20230105*, where *20230105* is the exact revision of the *workflows:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *Workflows* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/workflows). diff --git a/gen/youtube3-cli/Cargo.toml b/gen/youtube3-cli/Cargo.toml index 551b66ada7..970d263434 100644 --- a/gen/youtube3-cli/Cargo.toml +++ b/gen/youtube3-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-youtube3-cli" -version = "4.0.1+20220303" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with YouTube (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/youtube3-cli" @@ -20,13 +20,13 @@ name = "youtube3" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-youtube3] path = "../youtube3" -version = "4.0.1+20220303" +version = "5.0.2+20230123" + diff --git a/gen/youtube3-cli/README.md b/gen/youtube3-cli/README.md index bde65275cb..46a3ebbb7a 100644 --- a/gen/youtube3-cli/README.md +++ b/gen/youtube3-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *YouTube* API at revision *20220303*. The CLI is at version *4.0.1*. +This documentation was generated from the *YouTube* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash youtube3 [options] @@ -67,6 +67,7 @@ youtube3 [options] bind ... [-p ]... [-o ] delete [-p ]... insert (-r )... [-p ]... [-o ] + insert-cuepoint (-r )... [-p ]... [-o ] list ... [-p ]... [-o ] transition ... [-p ]... [-o ] update (-r )... [-p ]... [-o ] diff --git a/gen/youtube3-cli/mkdocs.yml b/gen/youtube3-cli/mkdocs.yml index fcee49df51..601c92a704 100644 --- a/gen/youtube3-cli/mkdocs.yml +++ b/gen/youtube3-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: YouTube v4.0.1+20220303 +site_name: YouTube v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-youtube3-cli site_description: A complete library to interact with YouTube (protocol v3) @@ -7,83 +7,114 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/youtube3-cli docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['abuse-reports_insert.md', 'Abuse Reports', 'Insert'] -- ['activities_list.md', 'Activities', 'List'] -- ['captions_delete.md', 'Captions', 'Delete'] -- ['captions_download.md', 'Captions', 'Download'] -- ['captions_insert.md', 'Captions', 'Insert'] -- ['captions_list.md', 'Captions', 'List'] -- ['captions_update.md', 'Captions', 'Update'] -- ['channel-banners_insert.md', 'Channel Banners', 'Insert'] -- ['channel-sections_delete.md', 'Channel Sections', 'Delete'] -- ['channel-sections_insert.md', 'Channel Sections', 'Insert'] -- ['channel-sections_list.md', 'Channel Sections', 'List'] -- ['channel-sections_update.md', 'Channel Sections', 'Update'] -- ['channels_list.md', 'Channels', 'List'] -- ['channels_update.md', 'Channels', 'Update'] -- ['comment-threads_insert.md', 'Comment Threads', 'Insert'] -- ['comment-threads_list.md', 'Comment Threads', 'List'] -- ['comments_delete.md', 'Comments', 'Delete'] -- ['comments_insert.md', 'Comments', 'Insert'] -- ['comments_list.md', 'Comments', 'List'] -- ['comments_mark-as-spam.md', 'Comments', 'Mark As Spam'] -- ['comments_set-moderation-status.md', 'Comments', 'Set Moderation Status'] -- ['comments_update.md', 'Comments', 'Update'] -- ['i18n-languages_list.md', 'I18n Languages', 'List'] -- ['i18n-regions_list.md', 'I18n Regions', 'List'] -- ['live-broadcasts_bind.md', 'Live Broadcasts', 'Bind'] -- ['live-broadcasts_delete.md', 'Live Broadcasts', 'Delete'] -- ['live-broadcasts_insert.md', 'Live Broadcasts', 'Insert'] -- ['live-broadcasts_list.md', 'Live Broadcasts', 'List'] -- ['live-broadcasts_transition.md', 'Live Broadcasts', 'Transition'] -- ['live-broadcasts_update.md', 'Live Broadcasts', 'Update'] -- ['live-chat-bans_delete.md', 'Live Chat Bans', 'Delete'] -- ['live-chat-bans_insert.md', 'Live Chat Bans', 'Insert'] -- ['live-chat-messages_delete.md', 'Live Chat Messages', 'Delete'] -- ['live-chat-messages_insert.md', 'Live Chat Messages', 'Insert'] -- ['live-chat-messages_list.md', 'Live Chat Messages', 'List'] -- ['live-chat-moderators_delete.md', 'Live Chat Moderators', 'Delete'] -- ['live-chat-moderators_insert.md', 'Live Chat Moderators', 'Insert'] -- ['live-chat-moderators_list.md', 'Live Chat Moderators', 'List'] -- ['live-streams_delete.md', 'Live Streams', 'Delete'] -- ['live-streams_insert.md', 'Live Streams', 'Insert'] -- ['live-streams_list.md', 'Live Streams', 'List'] -- ['live-streams_update.md', 'Live Streams', 'Update'] -- ['members_list.md', 'Members', 'List'] -- ['memberships-levels_list.md', 'Memberships Levels', 'List'] -- ['playlist-items_delete.md', 'Playlist Items', 'Delete'] -- ['playlist-items_insert.md', 'Playlist Items', 'Insert'] -- ['playlist-items_list.md', 'Playlist Items', 'List'] -- ['playlist-items_update.md', 'Playlist Items', 'Update'] -- ['playlists_delete.md', 'Playlists', 'Delete'] -- ['playlists_insert.md', 'Playlists', 'Insert'] -- ['playlists_list.md', 'Playlists', 'List'] -- ['playlists_update.md', 'Playlists', 'Update'] -- ['search_list.md', 'Search', 'List'] -- ['subscriptions_delete.md', 'Subscriptions', 'Delete'] -- ['subscriptions_insert.md', 'Subscriptions', 'Insert'] -- ['subscriptions_list.md', 'Subscriptions', 'List'] -- ['super-chat-events_list.md', 'Super Chat Events', 'List'] -- ['tests_insert.md', 'Tests', 'Insert'] -- ['third-party-links_delete.md', 'Third Party Links', 'Delete'] -- ['third-party-links_insert.md', 'Third Party Links', 'Insert'] -- ['third-party-links_list.md', 'Third Party Links', 'List'] -- ['third-party-links_update.md', 'Third Party Links', 'Update'] -- ['thumbnails_set.md', 'Thumbnails', 'Set'] -- ['video-abuse-report-reasons_list.md', 'Video Abuse Report Reasons', 'List'] -- ['video-categories_list.md', 'Video Categories', 'List'] -- ['videos_delete.md', 'Videos', 'Delete'] -- ['videos_get-rating.md', 'Videos', 'Get Rating'] -- ['videos_insert.md', 'Videos', 'Insert'] -- ['videos_list.md', 'Videos', 'List'] -- ['videos_rate.md', 'Videos', 'Rate'] -- ['videos_report-abuse.md', 'Videos', 'Report Abuse'] -- ['videos_update.md', 'Videos', 'Update'] -- ['watermarks_set.md', 'Watermarks', 'Set'] -- ['watermarks_unset.md', 'Watermarks', 'Unset'] -- ['youtube_v3-update-comment-threads.md', 'Youtube', 'V3 Update Comment Threads'] +nav: +- Home: 'index.md' +- 'Abuse Reports': + - 'Insert': 'abuse-reports_insert.md' +- 'Activities': + - 'List': 'activities_list.md' +- 'Captions': + - 'Delete': 'captions_delete.md' + - 'Download': 'captions_download.md' + - 'Insert': 'captions_insert.md' + - 'List': 'captions_list.md' + - 'Update': 'captions_update.md' +- 'Channel Banners': + - 'Insert': 'channel-banners_insert.md' +- 'Channel Sections': + - 'Delete': 'channel-sections_delete.md' + - 'Insert': 'channel-sections_insert.md' + - 'List': 'channel-sections_list.md' + - 'Update': 'channel-sections_update.md' +- 'Channels': + - 'List': 'channels_list.md' + - 'Update': 'channels_update.md' +- 'Comment Threads': + - 'Insert': 'comment-threads_insert.md' + - 'List': 'comment-threads_list.md' +- 'Comments': + - 'Delete': 'comments_delete.md' + - 'Insert': 'comments_insert.md' + - 'List': 'comments_list.md' + - 'Mark As Spam': 'comments_mark-as-spam.md' + - 'Set Moderation Status': 'comments_set-moderation-status.md' + - 'Update': 'comments_update.md' +- 'I18n Languages': + - 'List': 'i18n-languages_list.md' +- 'I18n Regions': + - 'List': 'i18n-regions_list.md' +- 'Live Broadcasts': + - 'Bind': 'live-broadcasts_bind.md' + - 'Delete': 'live-broadcasts_delete.md' + - 'Insert': 'live-broadcasts_insert.md' + - 'Insert Cuepoint': 'live-broadcasts_insert-cuepoint.md' + - 'List': 'live-broadcasts_list.md' + - 'Transition': 'live-broadcasts_transition.md' + - 'Update': 'live-broadcasts_update.md' +- 'Live Chat Bans': + - 'Delete': 'live-chat-bans_delete.md' + - 'Insert': 'live-chat-bans_insert.md' +- 'Live Chat Messages': + - 'Delete': 'live-chat-messages_delete.md' + - 'Insert': 'live-chat-messages_insert.md' + - 'List': 'live-chat-messages_list.md' +- 'Live Chat Moderators': + - 'Delete': 'live-chat-moderators_delete.md' + - 'Insert': 'live-chat-moderators_insert.md' + - 'List': 'live-chat-moderators_list.md' +- 'Live Streams': + - 'Delete': 'live-streams_delete.md' + - 'Insert': 'live-streams_insert.md' + - 'List': 'live-streams_list.md' + - 'Update': 'live-streams_update.md' +- 'Members': + - 'List': 'members_list.md' +- 'Memberships Levels': + - 'List': 'memberships-levels_list.md' +- 'Playlist Items': + - 'Delete': 'playlist-items_delete.md' + - 'Insert': 'playlist-items_insert.md' + - 'List': 'playlist-items_list.md' + - 'Update': 'playlist-items_update.md' +- 'Playlists': + - 'Delete': 'playlists_delete.md' + - 'Insert': 'playlists_insert.md' + - 'List': 'playlists_list.md' + - 'Update': 'playlists_update.md' +- 'Search': + - 'List': 'search_list.md' +- 'Subscriptions': + - 'Delete': 'subscriptions_delete.md' + - 'Insert': 'subscriptions_insert.md' + - 'List': 'subscriptions_list.md' +- 'Super Chat Events': + - 'List': 'super-chat-events_list.md' +- 'Tests': + - 'Insert': 'tests_insert.md' +- 'Third Party Links': + - 'Delete': 'third-party-links_delete.md' + - 'Insert': 'third-party-links_insert.md' + - 'List': 'third-party-links_list.md' + - 'Update': 'third-party-links_update.md' +- 'Thumbnails': + - 'Set': 'thumbnails_set.md' +- 'Video Abuse Report Reasons': + - 'List': 'video-abuse-report-reasons_list.md' +- 'Video Categories': + - 'List': 'video-categories_list.md' +- 'Videos': + - 'Delete': 'videos_delete.md' + - 'Get Rating': 'videos_get-rating.md' + - 'Insert': 'videos_insert.md' + - 'List': 'videos_list.md' + - 'Rate': 'videos_rate.md' + - 'Report Abuse': 'videos_report-abuse.md' + - 'Update': 'videos_update.md' +- 'Watermarks': + - 'Set': 'watermarks_set.md' + - 'Unset': 'watermarks_unset.md' +- 'Youtube': + - 'V3 Update Comment Threads': 'youtube_v3-update-comment-threads.md' theme: readthedocs diff --git a/gen/youtube3-cli/src/client.rs b/gen/youtube3-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/youtube3-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/youtube3-cli/src/main.rs b/gen/youtube3-cli/src/main.rs index a3e7c9599e..e2c6209d02 100644 --- a/gen/youtube3-cli/src/main.rs +++ b/gen/youtube3-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_youtube3::{api, Error, oauth2}; +use google_youtube3::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -149,22 +148,22 @@ where call = call.region_code(value.unwrap_or("")); }, "published-before" => { - call = call.published_before(value.unwrap_or("")); + call = call.published_before( value.map(|v| arg_from_str(v, err, "published-before", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "published-after" => { - call = call.published_after(value.unwrap_or("")); + call = call.published_after( value.map(|v| arg_from_str(v, err, "published-after", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "mine" => { - call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); + call = call.mine( value.map(|v| arg_from_str(v, err, "mine", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "home" => { - call = call.home(arg_from_str(value.unwrap_or("false"), err, "home", "boolean")); + call = call.home( value.map(|v| arg_from_str(v, err, "home", "boolean")).unwrap_or(false)); }, "channel-id" => { call = call.channel_id(value.unwrap_or("")); @@ -393,7 +392,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "sync" => { - call = call.sync(arg_from_str(value.unwrap_or("false"), err, "sync", "boolean")); + call = call.sync( value.map(|v| arg_from_str(v, err, "sync", "boolean")).unwrap_or(false)); }, "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); @@ -568,7 +567,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "sync" => { - call = call.sync(arg_from_str(value.unwrap_or("false"), err, "sync", "boolean")); + call = call.sync( value.map(|v| arg_from_str(v, err, "sync", "boolean")).unwrap_or(false)); }, "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); @@ -890,7 +889,7 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "mine" => { - call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); + call = call.mine( value.map(|v| arg_from_str(v, err, "mine", "boolean")).unwrap_or(false)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -1064,16 +1063,16 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "my-subscribers" => { - call = call.my_subscribers(arg_from_str(value.unwrap_or("false"), err, "my-subscribers", "boolean")); + call = call.my_subscribers( value.map(|v| arg_from_str(v, err, "my-subscribers", "boolean")).unwrap_or(false)); }, "mine" => { - call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); + call = call.mine( value.map(|v| arg_from_str(v, err, "mine", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "managed-by-me" => { - call = call.managed_by_me(arg_from_str(value.unwrap_or("false"), err, "managed-by-me", "boolean")); + call = call.managed_by_me( value.map(|v| arg_from_str(v, err, "managed-by-me", "boolean")).unwrap_or(false)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -1449,7 +1448,7 @@ where call = call.moderation_status(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -1669,7 +1668,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -1772,7 +1771,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "ban-author" => { - call = call.ban_author(arg_from_str(value.unwrap_or("false"), err, "ban-author", "boolean")); + call = call.ban_author( value.map(|v| arg_from_str(v, err, "ban-author", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -2210,6 +2209,7 @@ where "snippet.thumbnails.standard.url" => Some(("snippet.thumbnails.standard.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.thumbnails.standard.width" => Some(("snippet.thumbnails.standard.width", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "snippet.title" => Some(("snippet.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.concurrent-viewers" => Some(("statistics.concurrentViewers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.total-chat-count" => Some(("statistics.totalChatCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "status.life-cycle-status" => Some(("status.lifeCycleStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.live-broadcast-priority" => Some(("status.liveBroadcastPriority", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2218,7 +2218,7 @@ where "status.recording-status" => Some(("status.recordingStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.self-declared-made-for-kids" => Some(("status.selfDeclaredMadeForKids", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["actual-end-time", "actual-start-time", "bound-stream-id", "bound-stream-last-update-time-ms", "broadcast-stream-delay-ms", "channel-id", "closed-captions-type", "content-details", "default", "description", "embed-html", "enable-auto-start", "enable-auto-stop", "enable-closed-captions", "enable-content-encryption", "enable-dvr", "enable-embed", "enable-low-latency", "enable-monitor-stream", "etag", "height", "high", "id", "is-default-broadcast", "kind", "latency-preference", "life-cycle-status", "live-broadcast-priority", "live-chat-id", "made-for-kids", "maxres", "medium", "mesh", "monitor-stream", "privacy-status", "projection", "published-at", "record-from-start", "recording-status", "scheduled-end-time", "scheduled-start-time", "self-declared-made-for-kids", "snippet", "standard", "start-with-slate", "statistics", "status", "stereo-layout", "thumbnails", "title", "total-chat-count", "url", "width"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["actual-end-time", "actual-start-time", "bound-stream-id", "bound-stream-last-update-time-ms", "broadcast-stream-delay-ms", "channel-id", "closed-captions-type", "concurrent-viewers", "content-details", "default", "description", "embed-html", "enable-auto-start", "enable-auto-stop", "enable-closed-captions", "enable-content-encryption", "enable-dvr", "enable-embed", "enable-low-latency", "enable-monitor-stream", "etag", "height", "high", "id", "is-default-broadcast", "kind", "latency-preference", "life-cycle-status", "live-broadcast-priority", "live-chat-id", "made-for-kids", "maxres", "medium", "mesh", "monitor-stream", "privacy-status", "projection", "published-at", "record-from-start", "recording-status", "scheduled-end-time", "scheduled-start-time", "self-declared-made-for-kids", "snippet", "standard", "start-with-slate", "statistics", "status", "stereo-layout", "thumbnails", "title", "total-chat-count", "url", "width"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2285,6 +2285,109 @@ where } } + async fn _live_broadcasts_insert_cuepoint(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) + -> Result<(), DoitError> { + + let mut field_cursor = FieldCursor::default(); + let mut object = json::value::Value::Object(Default::default()); + + for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let last_errc = err.issues.len(); + let (key, value) = parse_kv_arg(&*kvarg, err, false); + let mut temp_cursor = field_cursor.clone(); + if let Err(field_err) = temp_cursor.set(&*key) { + err.issues.push(field_err); + } + if value.is_none() { + field_cursor = temp_cursor.clone(); + if err.issues.len() > last_errc { + err.issues.remove(last_errc); + } + continue; + } + + let type_info: Option<(&'static str, JsonTypeInfo)> = + match &temp_cursor.to_string()[..] { + "cue-type" => Some(("cueType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "duration-secs" => Some(("durationSecs", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "insertion-offset-time-ms" => Some(("insertionOffsetTimeMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "walltime-ms" => Some(("walltimeMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + _ => { + let suggestion = FieldCursor::did_you_mean(key, &vec!["cue-type", "duration-secs", "etag", "id", "insertion-offset-time-ms", "walltime-ms"]); + err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); + None + } + }; + if let Some((field_cursor_str, type_info)) = type_info { + FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); + } + } + let mut request: api::Cuepoint = json::value::from_value(object).unwrap(); + let mut call = self.hub.live_broadcasts().insert_cuepoint(request); + for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + let (key, value) = parse_kv_arg(&*parg, err, false); + match key { + "part" => { + call = call.add_part(value.unwrap_or("")); + }, + "on-behalf-of-content-owner-channel" => { + call = call.on_behalf_of_content_owner_channel(value.unwrap_or("")); + }, + "on-behalf-of-content-owner" => { + call = call.on_behalf_of_content_owner(value.unwrap_or("")); + }, + "id" => { + call = call.id(value.unwrap_or("")); + }, + _ => { + let mut found = false; + for param in &self.gp { + if key == *param { + found = true; + call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); + break; + } + } + if !found { + err.issues.push(CLIError::UnknownParameter(key.to_string(), + {let mut v = Vec::new(); + v.extend(self.gp.iter().map(|v|*v)); + v.extend(["id", "on-behalf-of-content-owner", "on-behalf-of-content-owner-channel", "part"].iter().map(|v|*v)); + v } )); + } + } + } + } + let protocol = CallType::Standard; + if dry_run { + Ok(()) + } else { + assert!(err.issues.len() == 0); + for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { + call = call.add_scope(scope); + } + let mut ostream = match writer_from_opts(opt.value_of("out")) { + Ok(mut f) => f, + Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), + }; + match match protocol { + CallType::Standard => call.doit().await, + _ => unreachable!() + } { + Err(api_err) => Err(DoitError::ApiError(api_err)), + Ok((mut response, output_schema)) => { + let mut value = json::value::to_value(&output_schema).expect("serde to work"); + remove_json_null_values(&mut value); + json::to_writer_pretty(&mut ostream, &value).unwrap(); + ostream.flush().unwrap(); + Ok(()) + } + } + } + } + async fn _live_broadcasts_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.live_broadcasts().list(&opt.values_of("part").map(|i|i.collect()).unwrap_or(Vec::new()).iter().map(|&v| v.to_string()).collect::>()); @@ -2301,10 +2404,10 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "mine" => { - call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); + call = call.mine( value.map(|v| arg_from_str(v, err, "mine", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -2491,6 +2594,7 @@ where "snippet.thumbnails.standard.url" => Some(("snippet.thumbnails.standard.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.thumbnails.standard.width" => Some(("snippet.thumbnails.standard.width", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "snippet.title" => Some(("snippet.title", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "statistics.concurrent-viewers" => Some(("statistics.concurrentViewers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "statistics.total-chat-count" => Some(("statistics.totalChatCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "status.life-cycle-status" => Some(("status.lifeCycleStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.live-broadcast-priority" => Some(("status.liveBroadcastPriority", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), @@ -2499,7 +2603,7 @@ where "status.recording-status" => Some(("status.recordingStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.self-declared-made-for-kids" => Some(("status.selfDeclaredMadeForKids", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["actual-end-time", "actual-start-time", "bound-stream-id", "bound-stream-last-update-time-ms", "broadcast-stream-delay-ms", "channel-id", "closed-captions-type", "content-details", "default", "description", "embed-html", "enable-auto-start", "enable-auto-stop", "enable-closed-captions", "enable-content-encryption", "enable-dvr", "enable-embed", "enable-low-latency", "enable-monitor-stream", "etag", "height", "high", "id", "is-default-broadcast", "kind", "latency-preference", "life-cycle-status", "live-broadcast-priority", "live-chat-id", "made-for-kids", "maxres", "medium", "mesh", "monitor-stream", "privacy-status", "projection", "published-at", "record-from-start", "recording-status", "scheduled-end-time", "scheduled-start-time", "self-declared-made-for-kids", "snippet", "standard", "start-with-slate", "statistics", "status", "stereo-layout", "thumbnails", "title", "total-chat-count", "url", "width"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["actual-end-time", "actual-start-time", "bound-stream-id", "bound-stream-last-update-time-ms", "broadcast-stream-delay-ms", "channel-id", "closed-captions-type", "concurrent-viewers", "content-details", "default", "description", "embed-html", "enable-auto-start", "enable-auto-stop", "enable-closed-captions", "enable-content-encryption", "enable-dvr", "enable-embed", "enable-low-latency", "enable-monitor-stream", "etag", "height", "high", "id", "is-default-broadcast", "kind", "latency-preference", "life-cycle-status", "live-broadcast-priority", "live-chat-id", "made-for-kids", "maxres", "medium", "mesh", "monitor-stream", "privacy-status", "projection", "published-at", "record-from-start", "recording-status", "scheduled-end-time", "scheduled-start-time", "self-declared-made-for-kids", "snippet", "standard", "start-with-slate", "statistics", "status", "stereo-layout", "thumbnails", "title", "total-chat-count", "url", "width"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2788,11 +2892,16 @@ where "snippet.fan-funding-event-details.amount-micros" => Some(("snippet.fanFundingEventDetails.amountMicros", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.fan-funding-event-details.currency" => Some(("snippet.fanFundingEventDetails.currency", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.fan-funding-event-details.user-comment" => Some(("snippet.fanFundingEventDetails.userComment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snippet.gift-membership-received-details.associated-membership-gifting-message-id" => Some(("snippet.giftMembershipReceivedDetails.associatedMembershipGiftingMessageId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snippet.gift-membership-received-details.gifter-channel-id" => Some(("snippet.giftMembershipReceivedDetails.gifterChannelId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snippet.gift-membership-received-details.member-level-name" => Some(("snippet.giftMembershipReceivedDetails.memberLevelName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.has-display-content" => Some(("snippet.hasDisplayContent", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "snippet.live-chat-id" => Some(("snippet.liveChatId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.member-milestone-chat-details.member-level-name" => Some(("snippet.memberMilestoneChatDetails.memberLevelName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.member-milestone-chat-details.member-month" => Some(("snippet.memberMilestoneChatDetails.memberMonth", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "snippet.member-milestone-chat-details.user-comment" => Some(("snippet.memberMilestoneChatDetails.userComment", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snippet.membership-gifting-details.gift-memberships-count" => Some(("snippet.membershipGiftingDetails.giftMembershipsCount", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), + "snippet.membership-gifting-details.gift-memberships-level-name" => Some(("snippet.membershipGiftingDetails.giftMembershipsLevelName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.message-deleted-details.deleted-message-id" => Some(("snippet.messageDeletedDetails.deletedMessageId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.message-retracted-details.retracted-message-id" => Some(("snippet.messageRetractedDetails.retractedMessageId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.new-sponsor-details.is-upgrade" => Some(("snippet.newSponsorDetails.isUpgrade", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), @@ -2819,7 +2928,7 @@ where "snippet.user-banned-details.banned-user-details.display-name" => Some(("snippet.userBannedDetails.bannedUserDetails.displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.user-banned-details.banned-user-details.profile-image-url" => Some(("snippet.userBannedDetails.bannedUserDetails.profileImageUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["alt-text", "alt-text-language", "amount-display-string", "amount-micros", "author-channel-id", "author-details", "ban-duration-seconds", "ban-type", "banned-user-details", "channel-id", "channel-url", "currency", "deleted-message-id", "display-message", "display-name", "etag", "fan-funding-event-details", "has-display-content", "id", "is-chat-moderator", "is-chat-owner", "is-chat-sponsor", "is-upgrade", "is-verified", "kind", "live-chat-id", "member-level-name", "member-milestone-chat-details", "member-month", "message-deleted-details", "message-retracted-details", "message-text", "new-sponsor-details", "profile-image-url", "published-at", "retracted-message-id", "snippet", "sticker-id", "super-chat-details", "super-sticker-details", "super-sticker-metadata", "text-message-details", "tier", "type", "user-banned-details", "user-comment"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["alt-text", "alt-text-language", "amount-display-string", "amount-micros", "associated-membership-gifting-message-id", "author-channel-id", "author-details", "ban-duration-seconds", "ban-type", "banned-user-details", "channel-id", "channel-url", "currency", "deleted-message-id", "display-message", "display-name", "etag", "fan-funding-event-details", "gift-membership-received-details", "gift-memberships-count", "gift-memberships-level-name", "gifter-channel-id", "has-display-content", "id", "is-chat-moderator", "is-chat-owner", "is-chat-sponsor", "is-upgrade", "is-verified", "kind", "live-chat-id", "member-level-name", "member-milestone-chat-details", "member-month", "membership-gifting-details", "message-deleted-details", "message-retracted-details", "message-text", "new-sponsor-details", "profile-image-url", "published-at", "retracted-message-id", "snippet", "sticker-id", "super-chat-details", "super-sticker-details", "super-sticker-metadata", "text-message-details", "tier", "type", "user-banned-details", "user-comment"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -2886,13 +2995,13 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "profile-image-size" => { - call = call.profile_image_size(arg_from_str(value.unwrap_or("-0"), err, "profile-image-size", "integer")); + call = call.profile_image_size( value.map(|v| arg_from_str(v, err, "profile-image-size", "uint32")).unwrap_or(0)); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "hl" => { call = call.hl(value.unwrap_or("")); @@ -3090,7 +3199,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, _ => { let mut found = false; @@ -3319,10 +3428,10 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "mine" => { - call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); + call = call.mine( value.map(|v| arg_from_str(v, err, "mine", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -3500,7 +3609,7 @@ where call = call.mode(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "has-access-to-level" => { call = call.has_access_to_level(value.unwrap_or("")); @@ -3799,7 +3908,7 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -4162,10 +4271,10 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "mine" => { - call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); + call = call.mine( value.map(|v| arg_from_str(v, err, "mine", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -4397,10 +4506,10 @@ where call = call.q(value.unwrap_or("")); }, "published-before" => { - call = call.published_before(value.unwrap_or("")); + call = call.published_before( value.map(|v| arg_from_str(v, err, "published-before", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "published-after" => { - call = call.published_after(value.unwrap_or("")); + call = call.published_after( value.map(|v| arg_from_str(v, err, "published-after", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); @@ -4412,7 +4521,7 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "location-radius" => { call = call.location_radius(value.unwrap_or("")); @@ -4421,13 +4530,13 @@ where call = call.location(value.unwrap_or("")); }, "for-mine" => { - call = call.for_mine(arg_from_str(value.unwrap_or("false"), err, "for-mine", "boolean")); + call = call.for_mine( value.map(|v| arg_from_str(v, err, "for-mine", "boolean")).unwrap_or(false)); }, "for-developer" => { - call = call.for_developer(arg_from_str(value.unwrap_or("false"), err, "for-developer", "boolean")); + call = call.for_developer( value.map(|v| arg_from_str(v, err, "for-developer", "boolean")).unwrap_or(false)); }, "for-content-owner" => { - call = call.for_content_owner(arg_from_str(value.unwrap_or("false"), err, "for-content-owner", "boolean")); + call = call.for_content_owner( value.map(|v| arg_from_str(v, err, "for-content-owner", "boolean")).unwrap_or(false)); }, "event-type" => { call = call.event_type(value.unwrap_or("")); @@ -4680,16 +4789,16 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "my-subscribers" => { - call = call.my_subscribers(arg_from_str(value.unwrap_or("false"), err, "my-subscribers", "boolean")); + call = call.my_subscribers( value.map(|v| arg_from_str(v, err, "my-subscribers", "boolean")).unwrap_or(false)); }, "my-recent-subscribers" => { - call = call.my_recent_subscribers(arg_from_str(value.unwrap_or("false"), err, "my-recent-subscribers", "boolean")); + call = call.my_recent_subscribers( value.map(|v| arg_from_str(v, err, "my-recent-subscribers", "boolean")).unwrap_or(false)); }, "mine" => { - call = call.mine(arg_from_str(value.unwrap_or("false"), err, "mine", "boolean")); + call = call.mine( value.map(|v| arg_from_str(v, err, "mine", "boolean")).unwrap_or(false)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "id" => { call = call.add_id(value.unwrap_or("")); @@ -4757,7 +4866,7 @@ where call = call.page_token(value.unwrap_or("")); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "hl" => { call = call.hl(value.unwrap_or("")); @@ -4974,12 +5083,13 @@ where "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linking-token" => Some(("linkingToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snippet.channel-to-store-link.merchant-id" => Some(("snippet.channelToStoreLink.merchantId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.channel-to-store-link.store-name" => Some(("snippet.channelToStoreLink.storeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.channel-to-store-link.store-url" => Some(("snippet.channelToStoreLink.storeUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.type" => Some(("snippet.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.link-status" => Some(("status.linkStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["channel-to-store-link", "etag", "kind", "link-status", "linking-token", "snippet", "status", "store-name", "store-url", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["channel-to-store-link", "etag", "kind", "link-status", "linking-token", "merchant-id", "snippet", "status", "store-name", "store-url", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5125,12 +5235,13 @@ where "etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "linking-token" => Some(("linkingToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), + "snippet.channel-to-store-link.merchant-id" => Some(("snippet.channelToStoreLink.merchantId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.channel-to-store-link.store-name" => Some(("snippet.channelToStoreLink.storeName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.channel-to-store-link.store-url" => Some(("snippet.channelToStoreLink.storeUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "snippet.type" => Some(("snippet.type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "status.link-status" => Some(("status.linkStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { - let suggestion = FieldCursor::did_you_mean(key, &vec!["channel-to-store-link", "etag", "kind", "link-status", "linking-token", "snippet", "status", "store-name", "store-url", "type"]); + let suggestion = FieldCursor::did_you_mean(key, &vec!["channel-to-store-link", "etag", "kind", "link-status", "linking-token", "merchant-id", "snippet", "status", "store-name", "store-url", "type"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } @@ -5681,7 +5792,7 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "stabilize" => { - call = call.stabilize(arg_from_str(value.unwrap_or("false"), err, "stabilize", "boolean")); + call = call.stabilize( value.map(|v| arg_from_str(v, err, "stabilize", "boolean")).unwrap_or(false)); }, "on-behalf-of-content-owner-channel" => { call = call.on_behalf_of_content_owner_channel(value.unwrap_or("")); @@ -5690,10 +5801,10 @@ where call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "notify-subscribers" => { - call = call.notify_subscribers(arg_from_str(value.unwrap_or("false"), err, "notify-subscribers", "boolean")); + call = call.notify_subscribers( value.map(|v| arg_from_str(v, err, "notify-subscribers", "boolean")).unwrap_or(false)); }, "auto-levels" => { - call = call.auto_levels(arg_from_str(value.unwrap_or("false"), err, "auto-levels", "boolean")); + call = call.auto_levels( value.map(|v| arg_from_str(v, err, "auto-levels", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -5767,13 +5878,13 @@ where call = call.my_rating(value.unwrap_or("")); }, "max-width" => { - call = call.max_width(arg_from_str(value.unwrap_or("-0"), err, "max-width", "integer")); + call = call.max_width( value.map(|v| arg_from_str(v, err, "max-width", "int32")).unwrap_or(-0)); }, "max-results" => { - call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); + call = call.max_results( value.map(|v| arg_from_str(v, err, "max-results", "uint32")).unwrap_or(0)); }, "max-height" => { - call = call.max_height(arg_from_str(value.unwrap_or("-0"), err, "max-height", "integer")); + call = call.max_height( value.map(|v| arg_from_str(v, err, "max-height", "int32")).unwrap_or(-0)); }, "locale" => { call = call.locale(value.unwrap_or("")); @@ -6639,6 +6750,9 @@ where ("insert", Some(opt)) => { call_result = self._live_broadcasts_insert(opt, dry_run, &mut err).await; }, + ("insert-cuepoint", Some(opt)) => { + call_result = self._live_broadcasts_insert_cuepoint(opt, dry_run, &mut err).await; + }, ("list", Some(opt)) => { call_result = self._live_broadcasts_list(opt, dry_run, &mut err).await; }, @@ -7587,7 +7701,7 @@ async fn main() { ]), ]), - ("live-broadcasts", "methods: 'bind', 'delete', 'insert', 'list', 'transition' and 'update'", vec![ + ("live-broadcasts", "methods: 'bind', 'delete', 'insert', 'insert-cuepoint', 'list', 'transition' and 'update'", vec![ ("bind", Some(r##"Bind a broadcast to a stream."##), "Details at http://byron.github.io/google-apis-rs/google_youtube3_cli/live-broadcasts_bind", @@ -7648,6 +7762,28 @@ async fn main() { Some(false), Some(true)), + (Some(r##"out"##), + Some(r##"o"##), + Some(r##"Specify the file into which to write the program's output"##), + Some(false), + Some(false)), + ]), + ("insert-cuepoint", + Some(r##"Insert cuepoints in a broadcast"##), + "Details at http://byron.github.io/google-apis-rs/google_youtube3_cli/live-broadcasts_insert-cuepoint", + vec![ + (Some(r##"kv"##), + Some(r##"r"##), + Some(r##"Set various fields of the request structure, matching the key=value form"##), + Some(true), + Some(true)), + + (Some(r##"v"##), + Some(r##"p"##), + Some(r##"Set various optional parameters, matching the key=value form"##), + Some(false), + Some(true)), + (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), @@ -8755,7 +8891,7 @@ async fn main() { let mut app = App::new("youtube3") .author("Sebastian Thiel ") - .version("4.0.1+20220303") + .version("5.0.2+20230123") .about("The YouTube Data API v3 is an API that provides access to YouTube data, such as videos, playlists, and channels.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_youtube3_cli") .arg(Arg::with_name("url") diff --git a/gen/youtube3/Cargo.toml b/gen/youtube3/Cargo.toml index f0563a6458..f44efeaf9e 100644 --- a/gen/youtube3/Cargo.toml +++ b/gen/youtube3/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-youtube3" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with YouTube (protocol v3)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/youtube3" homepage = "https://developers.google.com/youtube/" -documentation = "https://docs.rs/google-youtube3/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-youtube3/5.0.2+20230123" license = "MIT" keywords = ["youtube", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/youtube3/README.md b/gen/youtube3/README.md index 14eb7a0189..3e229aadee 100644 --- a/gen/youtube3/README.md +++ b/gen/youtube3/README.md @@ -5,88 +5,88 @@ DO NOT EDIT ! --> The `google-youtube3` library allows access to all features of the *Google YouTube* service. -This documentation was generated from *YouTube* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *youtube:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *YouTube* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *youtube:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *YouTube* *v3* API can be found at the [official documentation site](https://developers.google.com/youtube/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/YouTube) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/YouTube) ... -* [abuse reports](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::AbuseReport) - * [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::AbuseReportInsertCall) -* [activities](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Activity) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ActivityListCall) -* [captions](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Caption) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionDeleteCall), [*download*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionDownloadCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionUpdateCall) +* [abuse reports](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::AbuseReport) + * [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::AbuseReportInsertCall) +* [activities](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Activity) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ActivityListCall) +* [captions](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Caption) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionDeleteCall), [*download*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionDownloadCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionUpdateCall) * channel banners - * [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelBannerInsertCall) -* [channel sections](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelSection) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelSectionDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelSectionInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelSectionListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelSectionUpdateCall) -* [channels](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Channel) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelUpdateCall) -* [comment threads](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentThread) - * [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentThreadInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentThreadListCall) -* [comments](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Comment) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentListCall), [*mark as spam*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentMarkAsSpamCall), [*set moderation status*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentSetModerationStatuCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CommentUpdateCall) -* [i18n languages](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::I18nLanguage) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::I18nLanguageListCall) -* [i18n regions](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::I18nRegion) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::I18nRegionListCall) -* [live broadcasts](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcast) - * [*bind*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcastBindCall), [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcastDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcastInsertCall), [*insert cuepoint*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcastInsertCuepointCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcastListCall), [*transition*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcastTransitionCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveBroadcastUpdateCall) -* [live chat bans](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatBan) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatBanDeleteCall) and [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatBanInsertCall) -* [live chat messages](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatMessage) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatMessageDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatMessageInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatMessageListCall) -* [live chat moderators](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatModerator) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatModeratorDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatModeratorInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveChatModeratorListCall) -* [live streams](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveStream) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveStreamDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveStreamInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveStreamListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::LiveStreamUpdateCall) -* [members](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Member) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::MemberListCall) -* [memberships levels](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::MembershipsLevel) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::MembershipsLevelListCall) -* [playlist items](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistItem) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistItemDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistItemInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistItemListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistItemUpdateCall) -* [playlists](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Playlist) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::PlaylistUpdateCall) + * [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelBannerInsertCall) +* [channel sections](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelSection) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelSectionDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelSectionInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelSectionListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelSectionUpdateCall) +* [channels](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Channel) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelUpdateCall) +* [comment threads](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentThread) + * [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentThreadInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentThreadListCall) +* [comments](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Comment) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentListCall), [*mark as spam*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentMarkAsSpamCall), [*set moderation status*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentSetModerationStatuCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CommentUpdateCall) +* [i18n languages](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::I18nLanguage) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::I18nLanguageListCall) +* [i18n regions](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::I18nRegion) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::I18nRegionListCall) +* [live broadcasts](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcast) + * [*bind*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcastBindCall), [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcastDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcastInsertCall), [*insert cuepoint*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcastInsertCuepointCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcastListCall), [*transition*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcastTransitionCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveBroadcastUpdateCall) +* [live chat bans](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatBan) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatBanDeleteCall) and [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatBanInsertCall) +* [live chat messages](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatMessage) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatMessageDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatMessageInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatMessageListCall) +* [live chat moderators](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatModerator) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatModeratorDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatModeratorInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveChatModeratorListCall) +* [live streams](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveStream) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveStreamDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveStreamInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveStreamListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::LiveStreamUpdateCall) +* [members](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Member) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::MemberListCall) +* [memberships levels](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::MembershipsLevel) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::MembershipsLevelListCall) +* [playlist items](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistItem) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistItemDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistItemInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistItemListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistItemUpdateCall) +* [playlists](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Playlist) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::PlaylistUpdateCall) * search - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::SearchListCall) -* [subscriptions](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Subscription) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::SubscriptionDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::SubscriptionInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::SubscriptionListCall) -* [super chat events](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::SuperChatEvent) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::SuperChatEventListCall) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::SearchListCall) +* [subscriptions](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Subscription) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::SubscriptionDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::SubscriptionInsertCall) and [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::SubscriptionListCall) +* [super chat events](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::SuperChatEvent) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::SuperChatEventListCall) * tests - * [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::TestInsertCall) -* [third party links](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ThirdPartyLink) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ThirdPartyLinkDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ThirdPartyLinkInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ThirdPartyLinkListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ThirdPartyLinkUpdateCall) -* [thumbnails](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Thumbnail) - * [*set*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ThumbnailSetCall) -* [video abuse report reasons](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoAbuseReportReason) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoAbuseReportReasonListCall) -* [video categories](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoCategory) - * [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoCategoryListCall) -* [videos](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::Video) - * [*delete*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoDeleteCall), [*get rating*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoGetRatingCall), [*insert*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoListCall), [*rate*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoRateCall), [*report abuse*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoReportAbuseCall) and [*update*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoUpdateCall) + * [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::TestInsertCall) +* [third party links](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ThirdPartyLink) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ThirdPartyLinkDeleteCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ThirdPartyLinkInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ThirdPartyLinkListCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ThirdPartyLinkUpdateCall) +* [thumbnails](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Thumbnail) + * [*set*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ThumbnailSetCall) +* [video abuse report reasons](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoAbuseReportReason) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoAbuseReportReasonListCall) +* [video categories](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoCategory) + * [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoCategoryListCall) +* [videos](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::Video) + * [*delete*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoDeleteCall), [*get rating*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoGetRatingCall), [*insert*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoInsertCall), [*list*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoListCall), [*rate*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoRateCall), [*report abuse*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoReportAbuseCall) and [*update*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoUpdateCall) * watermarks - * [*set*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::WatermarkSetCall) and [*unset*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::WatermarkUnsetCall) + * [*set*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::WatermarkSetCall) and [*unset*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::WatermarkUnsetCall) * youtube - * [*v3 update comment threads*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::YoutubeV3UpdateCommentThreadCall) + * [*v3 update comment threads*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::YoutubeV3UpdateCommentThreadCall) Upload supported by ... -* [*insert captions*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionInsertCall) -* [*update captions*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionUpdateCall) -* [*insert channel banners*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ChannelBannerInsertCall) -* [*set thumbnails*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::ThumbnailSetCall) -* [*insert videos*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::VideoInsertCall) -* [*set watermarks*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::WatermarkSetCall) +* [*insert captions*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionInsertCall) +* [*update captions*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionUpdateCall) +* [*insert channel banners*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ChannelBannerInsertCall) +* [*set thumbnails*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::ThumbnailSetCall) +* [*insert videos*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::VideoInsertCall) +* [*set watermarks*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::WatermarkSetCall) Download supported by ... -* [*download captions*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/api::CaptionDownloadCall) +* [*download captions*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/api::CaptionDownloadCall) @@ -94,17 +94,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/YouTube)** +* **[Hub](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/YouTube)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::CallBuilder) -* **[Resources](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::CallBuilder) +* **[Resources](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Part)** + * **[Parts](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::CallBuilder)** +* **[Activities](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -207,17 +207,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -227,29 +227,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Delegate) to the -[Method Builder](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Delegate) to the +[Method Builder](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::RequestValue) and -[decodable](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::RequestValue) and +[decodable](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-youtube3/5.0.2-beta-1+20230123/google_youtube3/client::RequestValue) are moved +* [request values](https://docs.rs/google-youtube3/5.0.2+20230123/google_youtube3/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/youtube3/src/api.rs b/gen/youtube3/src/api.rs index dde0fe832e..52ed2a797a 100644 --- a/gen/youtube3/src/api.rs +++ b/gen/youtube3/src/api.rs @@ -155,7 +155,7 @@ impl<'a, S> YouTube { YouTube { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://youtube.googleapis.com/".to_string(), _root_url: "https://youtube.googleapis.com/".to_string(), } @@ -253,7 +253,7 @@ impl<'a, S> YouTube { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/youtube3/src/client.rs b/gen/youtube3/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/youtube3/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/youtube3/src/lib.rs b/gen/youtube3/src/lib.rs index d53778a76c..88993e50ef 100644 --- a/gen/youtube3/src/lib.rs +++ b/gen/youtube3/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *YouTube* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *youtube:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *YouTube* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *youtube:v3* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *YouTube* *v3* API can be found at the //! [official documentation site](https://developers.google.com/youtube/). diff --git a/gen/youtubereporting1-cli/Cargo.toml b/gen/youtubereporting1-cli/Cargo.toml index 9e9cdb0677..1ecd81753e 100644 --- a/gen/youtubereporting1-cli/Cargo.toml +++ b/gen/youtubereporting1-cli/Cargo.toml @@ -4,7 +4,7 @@ [package] name = "google-youtubereporting1-cli" -version = "4.0.1+20220305" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with YouTube Reporting (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/youtubereporting1-cli" @@ -20,13 +20,13 @@ name = "youtubereporting1" path = "src/main.rs" [dependencies] +anyhow = "^ 1.0" hyper-rustls = "0.23.0" -mime = "^ 0.2.0" -serde = "^ 1.0" +mime = "^ 0.3.0" +serde = { version = "^ 1.0", features = ["derive"] } serde_json = "^ 1.0" -serde_derive = "^ 1.0" -yup-oauth2 = "^ 7.0" itertools = "^ 0.10" +google-clis-common = { path = "../../google-clis-common", version = "5.0" } strsim = "^0.5" clap = "^2.0" http = "^0.2" @@ -36,7 +36,7 @@ tower-service = "^0.3.1" - [dependencies.google-youtubereporting1] path = "../youtubereporting1" -version = "4.0.1+20220305" +version = "5.0.2+20230123" + diff --git a/gen/youtubereporting1-cli/README.md b/gen/youtubereporting1-cli/README.md index 06f12fbea6..2692e5ca2d 100644 --- a/gen/youtubereporting1-cli/README.md +++ b/gen/youtubereporting1-cli/README.md @@ -25,7 +25,7 @@ Find the source code [on github](https://github.com/Byron/google-apis-rs/tree/ma # Usage -This documentation was generated from the *YouTube Reporting* API at revision *20220305*. The CLI is at version *4.0.1*. +This documentation was generated from the *YouTube Reporting* API at revision *20230123*. The CLI is at version *5.0.2*. ```bash youtubereporting1 [options] diff --git a/gen/youtubereporting1-cli/mkdocs.yml b/gen/youtubereporting1-cli/mkdocs.yml index f808bc8378..60f00795f4 100644 --- a/gen/youtubereporting1-cli/mkdocs.yml +++ b/gen/youtubereporting1-cli/mkdocs.yml @@ -1,4 +1,4 @@ -site_name: YouTube Reporting v4.0.1+20220305 +site_name: YouTube Reporting v5.0.2+20230123 site_url: http://byron.github.io/google-apis-rs/google-youtubereporting1-cli site_description: A complete library to interact with YouTube Reporting (protocol v1) @@ -7,16 +7,19 @@ repo_url: https://github.com/Byron/google-apis-rs/tree/main/gen/youtubereporting docs_dir: docs site_dir: build_html -pages: -- ['index.md', 'Home'] -- ['jobs_create.md', 'Jobs', 'Create'] -- ['jobs_delete.md', 'Jobs', 'Delete'] -- ['jobs_get.md', 'Jobs', 'Get'] -- ['jobs_list.md', 'Jobs', 'List'] -- ['jobs_reports-get.md', 'Jobs', 'Reports Get'] -- ['jobs_reports-list.md', 'Jobs', 'Reports List'] -- ['media_download.md', 'Media', 'Download'] -- ['report-types_list.md', 'Report Types', 'List'] +nav: +- Home: 'index.md' +- 'Jobs': + - 'Create': 'jobs_create.md' + - 'Delete': 'jobs_delete.md' + - 'Get': 'jobs_get.md' + - 'List': 'jobs_list.md' + - 'Reports Get': 'jobs_reports-get.md' + - 'Reports List': 'jobs_reports-list.md' +- 'Media': + - 'Download': 'media_download.md' +- 'Report Types': + - 'List': 'report-types_list.md' theme: readthedocs diff --git a/gen/youtubereporting1-cli/src/client.rs b/gen/youtubereporting1-cli/src/client.rs deleted file mode 100644 index 0ece418e7d..0000000000 --- a/gen/youtubereporting1-cli/src/client.rs +++ /dev/null @@ -1,748 +0,0 @@ -// COPY OF 'src/rust/cli/client.rs' -// DO NOT EDIT -use clap::{App, SubCommand}; -use mime::Mime; -use crate::oauth2::{ApplicationSecret, ConsoleApplicationSecret}; -use serde_json as json; -use serde_json::value::Value; - -use std::env; -use std::error::Error as StdError; -use std::fmt; -use std::fs; -use std::io; -use std::io::{stdout, Read, Write}; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::string::ToString; - -use std::default::Default; - -const FIELD_SEP: char = '.'; - -pub enum ComplexType { - Pod, - Vec, - Map, -} - -// Null, -// Bool(bool), -// I64(i64), -// U64(u64), -// F64(f64), -// String(String), - -pub enum JsonType { - Boolean, - Int, - Uint, - Float, - String, -} - -pub struct JsonTypeInfo { - pub jtype: JsonType, - pub ctype: ComplexType, -} - -// Based on @erickt user comment. Thanks for the idea ! -// Remove all keys whose values are null from given value (changed in place) -pub fn remove_json_null_values(value: &mut Value) { - match *value { - Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -fn did_you_mean<'a>(v: &str, possible_values: &[&'a str]) -> Option<&'a str> { - let mut candidate: Option<(f64, &str)> = None; - for pv in possible_values { - let confidence = strsim::jaro_winkler(v, pv); - if confidence > 0.8 && (candidate.is_none() || (candidate.as_ref().unwrap().0 < confidence)) - { - candidate = Some((confidence, pv)); - } - } - match candidate { - None => None, - Some((_, candidate)) => Some(candidate), - } -} - -pub enum CallType { - Upload(UploadProtocol), - Standard, -} - -arg_enum! { - pub enum UploadProtocol { - Simple, - // Resumable // This seems to be lost during the async conversion - } -} - -impl AsRef for UploadProtocol { - fn as_ref(&self) -> &str { - match *self { - UploadProtocol::Simple => "simple", - // UploadProtocol::Resumable => "resumable", - } - } -} - -impl AsRef for CallType { - fn as_ref(&self) -> &str { - match *self { - CallType::Upload(ref proto) => proto.as_ref(), - CallType::Standard => "standard-request", - } - } -} - -#[derive(Clone, Default)] -pub struct FieldCursor(Vec); - -impl ToString for FieldCursor { - fn to_string(&self) -> String { - self.0.join(".") - } -} - -impl From<&'static str> for FieldCursor { - fn from(value: &'static str) -> FieldCursor { - let mut res = FieldCursor::default(); - res.set(value).unwrap(); - res - } -} - -fn assure_entry<'a>(m: &'a mut json::Map, k: &str) -> &'a mut Value { - if m.contains_key(k) { - return m.get_mut(k).expect("value to exist"); - } - m.insert(k.to_owned(), Value::Object(Default::default())); - m.get_mut(k).expect("value to exist") -} - -impl FieldCursor { - pub fn set(&mut self, value: &str) -> Result<(), CLIError> { - if value.is_empty() { - return Err(CLIError::Field(FieldError::Empty)); - } - - let mut first_is_field_sep = false; - let mut char_count: usize = 0; - let mut last_c = FIELD_SEP; - let mut num_conscutive_field_seps = 0; - - let mut field = String::new(); - let mut fields = self.0.clone(); - - let push_field = |fs: &mut Vec, f: &mut String| { - if !f.is_empty() { - fs.push(f.clone()); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - char_count += 1; - - if c == FIELD_SEP { - if cid == 0 { - first_is_field_sep = true; - } - num_conscutive_field_seps += 1; - if cid > 0 && last_c == FIELD_SEP { - if fields.pop().is_none() { - return Err(CLIError::Field(FieldError::PopOnEmpty(value.to_string()))); - } - } else { - push_field(&mut fields, &mut field); - } - } else { - num_conscutive_field_seps = 0; - if cid == 1 && first_is_field_sep { - fields.truncate(0); - } - field.push(c); - } - - last_c = c; - } - - push_field(&mut fields, &mut field); - - if char_count == 1 && first_is_field_sep { - fields.truncate(0); - } - if char_count > 1 && num_conscutive_field_seps == 1 { - return Err(CLIError::Field(FieldError::TrailingFieldSep( - value.to_string(), - ))); - } - - self.0 = fields; - Ok(()) - } - - pub fn did_you_mean(value: &str, possible_values: &[&str]) -> Option { - if value.is_empty() { - return None; - } - - let mut last_c = FIELD_SEP; - - let mut field = String::new(); - let mut output = String::new(); - - let push_field = |fs: &mut String, f: &mut String| { - if !f.is_empty() { - fs.push_str(match did_you_mean(&f, possible_values) { - Some(candidate) => candidate, - None => &f, - }); - f.truncate(0); - } - }; - - for (cid, c) in value.chars().enumerate() { - if c == FIELD_SEP { - if last_c != FIELD_SEP { - push_field(&mut output, &mut field); - } - output.push(c); - } else { - field.push(c); - } - - last_c = c; - } - - push_field(&mut output, &mut field); - - if output == value { - None - } else { - Some(output) - } - } - - pub fn set_json_value( - &self, - mut object: &mut Value, - value: &str, - type_info: JsonTypeInfo, - err: &mut InvalidOptionsError, - orig_cursor: &FieldCursor, - ) { - assert!(!self.0.is_empty()); - - for field in &self.0[..self.0.len() - 1] { - let tmp = object; - object = match *tmp { - Value::Object(ref mut mapping) => assure_entry(mapping, &field), - _ => panic!("We don't expect non-object Values here ..."), - }; - } - - match *object { - Value::Object(ref mut mapping) => { - let field = &self.0[self.0.len() - 1]; - let to_jval = - |value: &str, jtype: JsonType, err: &mut InvalidOptionsError| -> Value { - match jtype { - JsonType::Boolean => { - Value::Bool(arg_from_str(value, err, &field, "boolean")) - } - JsonType::Int => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "int")) - .expect("valid f64"), - ), - JsonType::Uint => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "uint")) - .expect("valid f64"), - ), - JsonType::Float => Value::Number( - json::Number::from_f64(arg_from_str(value, err, &field, "float")) - .expect("valid f64"), - ), - JsonType::String => Value::String(value.to_owned()), - } - }; - - match type_info.ctype { - ComplexType::Pod => { - if mapping - .insert(field.to_owned(), to_jval(value, type_info.jtype, err)) - .is_some() - { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - ComplexType::Vec => match *assure_entry(mapping, field) { - Value::Array(ref mut values) => { - values.push(to_jval(value, type_info.jtype, err)) - } - _ => unreachable!(), - }, - ComplexType::Map => { - let (key, value) = parse_kv_arg(value, err, true); - let jval = to_jval(value.unwrap_or(""), type_info.jtype, err); - - match *assure_entry(mapping, &field) { - Value::Object(ref mut value_map) => { - if value_map.insert(key.to_owned(), jval).is_some() { - err.issues.push(CLIError::Field(FieldError::Duplicate( - orig_cursor.to_string(), - ))); - } - } - _ => unreachable!(), - } - } - } - } - _ => unreachable!(), - } - } - - pub fn num_fields(&self) -> usize { - self.0.len() - } -} - -pub fn parse_kv_arg<'a>( - kv: &'a str, - err: &mut InvalidOptionsError, - for_hashmap: bool, -) -> (&'a str, Option<&'a str>) { - let mut add_err = || { - err.issues - .push(CLIError::InvalidKeyValueSyntax(kv.to_string(), for_hashmap)) - }; - match kv.find('=') { - None => { - add_err(); - (kv, None) - } - Some(pos) => { - let key = &kv[..pos]; - if kv.len() <= pos + 1 { - add_err(); - return (key, Some("")); - } - (key, Some(&kv[pos + 1..])) - } - } -} - -pub fn calltype_from_str( - name: &str, - valid_protocols: Vec, - err: &mut InvalidOptionsError, -) -> CallType { - CallType::Upload(match UploadProtocol::from_str(name) { - Ok(up) => up, - Err(msg) => { - err.issues.push(CLIError::InvalidUploadProtocol( - name.to_string(), - valid_protocols, - )); - UploadProtocol::Simple - } - }) -} - -pub fn input_file_from_opts(file_path: &str, err: &mut InvalidOptionsError) -> Option { - match fs::File::open(file_path) { - Ok(f) => Some(f), - Err(io_err) => { - err.issues.push(CLIError::Input(InputError::Io(( - file_path.to_string(), - io_err, - )))); - None - } - } -} - -pub fn input_mime_from_opts(mime: &str, err: &mut InvalidOptionsError) -> Option { - match mime.parse() { - Ok(m) => Some(m), - Err(_) => { - err.issues - .push(CLIError::Input(InputError::Mime(mime.to_string()))); - None - } - } -} - -pub fn writer_from_opts(arg: Option<&str>) -> Result, io::Error> { - let f = arg.unwrap_or("-"); - match f { - "-" => Ok(Box::new(stdout())), - _ => match fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(f) - { - Ok(f) => Ok(Box::new(f)), - Err(io_err) => Err(io_err), - }, - } -} - -pub fn arg_from_str<'a, T>( - arg: &str, - err: &mut InvalidOptionsError, - arg_name: &'a str, - arg_type: &'a str, -) -> T -where - T: FromStr + Default, - ::Err: fmt::Display, -{ - match FromStr::from_str(arg) { - Err(perr) => { - err.issues.push(CLIError::ParseError( - arg_name.to_owned(), - arg_type.to_owned(), - arg.to_string(), - format!("{}", perr), - )); - Default::default() - } - Ok(v) => v, - } -} - -#[derive(Debug)] -pub enum ApplicationSecretError { - DecoderError((String, json::Error)), - FormatError(String), -} - -impl fmt::Display for ApplicationSecretError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ApplicationSecretError::DecoderError((ref path, ref err)) => writeln!( - f, - "Could not decode file at '{}' with error: {}.", - path, err - ), - ApplicationSecretError::FormatError(ref path) => writeln!( - f, - "'installed' field is unset in secret file at '{}'.", - path - ), - } - } -} - -#[derive(Debug)] -pub enum ConfigurationError { - DirectoryCreationFailed((String, io::Error)), - DirectoryUnset, - HomeExpansionFailed(String), - Secret(ApplicationSecretError), - Io((String, io::Error)), -} - -impl fmt::Display for ConfigurationError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - ConfigurationError::DirectoryCreationFailed((ref dir, ref err)) => writeln!( - f, - "Directory '{}' could not be created with error: {}.", - dir, err - ), - ConfigurationError::DirectoryUnset => writeln!(f, "--config-dir was unset or empty."), - ConfigurationError::HomeExpansionFailed(ref dir) => writeln!( - f, - "Couldn't find HOME directory of current user, failed to expand '{}'.", - dir - ), - ConfigurationError::Secret(ref err) => writeln!(f, "Secret -> {}", err), - ConfigurationError::Io((ref path, ref err)) => writeln!( - f, - "IO operation failed on path '{}' with error: {}.", - path, err - ), - } - } -} - -#[derive(Debug)] -pub enum InputError { - Io((String, io::Error)), - Mime(String), -} - -impl fmt::Display for InputError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - InputError::Io((ref file_path, ref io_err)) => writeln!( - f, - "Failed to open '{}' for reading with error: {}.", - file_path, io_err - ), - InputError::Mime(ref mime) => writeln!(f, "'{}' is not a known mime-type.", mime), - } - } -} - -#[derive(Debug)] -pub enum FieldError { - PopOnEmpty(String), - TrailingFieldSep(String), - Unknown(String, Option, Option), - Duplicate(String), - Empty, -} - -impl fmt::Display for FieldError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - FieldError::PopOnEmpty(ref field) => { - writeln!(f, "'{}': Cannot move up on empty field cursor.", field) - } - FieldError::TrailingFieldSep(ref field) => writeln!( - f, - "'{}': Single field separator may not be last character.", - field - ), - FieldError::Unknown(ref field, ref suggestion, ref value) => { - let suffix = match *suggestion { - Some(ref s) => { - let kv = match *value { - Some(ref v) => format!("{}={}", s, v), - None => s.clone(), - }; - format!(" Did you mean '{}' ?", kv) - } - None => String::new(), - }; - writeln!(f, "Field '{}' does not exist.{}", field, suffix) - } - FieldError::Duplicate(ref cursor) => { - writeln!(f, "Value at '{}' was already set", cursor) - } - FieldError::Empty => writeln!(f, "Field names must not be empty."), - } - } -} - -#[derive(Debug)] -pub enum CLIError { - Configuration(ConfigurationError), - ParseError(String, String, String, String), - UnknownParameter(String, Vec<&'static str>), - InvalidUploadProtocol(String, Vec), - InvalidKeyValueSyntax(String, bool), - Input(InputError), - Field(FieldError), - MissingCommandError, - MissingMethodError(String), -} - -impl fmt::Display for CLIError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match *self { - CLIError::Configuration(ref err) => write!(f, "Configuration -> {}", err), - CLIError::Input(ref err) => write!(f, "Input -> {}", err), - CLIError::Field(ref err) => write!(f, "Field -> {}", err), - CLIError::InvalidUploadProtocol(ref proto_name, ref valid_names) => writeln!( - f, - "'{}' is not a valid upload protocol. Choose from one of {}.", - proto_name, - valid_names.join(", ") - ), - CLIError::ParseError(ref arg_name, ref type_name, ref value, ref err_desc) => writeln!( - f, - "Failed to parse argument '{}' with value '{}' as {} with error: {}.", - arg_name, value, type_name, err_desc - ), - CLIError::UnknownParameter(ref param_name, ref possible_values) => { - let suffix = match did_you_mean(param_name, &possible_values) { - Some(v) => format!(" Did you mean '{}' ?", v), - None => String::new(), - }; - writeln!(f, "Parameter '{}' is unknown.{}", param_name, suffix) - } - CLIError::InvalidKeyValueSyntax(ref kv, is_hashmap) => { - let hashmap_info = if is_hashmap { "hashmap " } else { "" }; - writeln!( - f, - "'{}' does not match {}pattern =.", - kv, hashmap_info - ) - } - CLIError::MissingCommandError => writeln!(f, "Please specify the main sub-command."), - CLIError::MissingMethodError(ref cmd) => writeln!( - f, - "Please specify the method to call on the '{}' command.", - cmd - ), - } - } -} - -#[derive(Debug)] -pub struct InvalidOptionsError { - pub issues: Vec, - pub exit_code: i32, -} - -impl fmt::Display for InvalidOptionsError { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - for issue in &self.issues { - issue.fmt(f)?; - } - Ok(()) - } -} - -impl InvalidOptionsError { - pub fn single(err: CLIError, exit_code: i32) -> InvalidOptionsError { - InvalidOptionsError { - issues: vec![err], - exit_code, - } - } - - pub fn new() -> InvalidOptionsError { - InvalidOptionsError { - issues: Vec::new(), - exit_code: 1, - } - } -} - -pub fn assure_config_dir_exists(dir: &str) -> Result { - let trdir = dir.trim(); - if trdir.is_empty() { - return Err(CLIError::Configuration(ConfigurationError::DirectoryUnset)); - } - - let expanded_config_dir = if trdir.as_bytes()[0] == b'~' { - match env::var("HOME") - .ok() - .or_else(|| env::var("UserProfile").ok()) - { - None => { - return Err(CLIError::Configuration( - ConfigurationError::HomeExpansionFailed(trdir.to_string()), - )) - } - Some(mut user) => { - user.push_str(&trdir[1..]); - user - } - } - } else { - trdir.to_string() - }; - - if let Err(err) = fs::create_dir(&expanded_config_dir) { - if err.kind() != io::ErrorKind::AlreadyExists { - return Err(CLIError::Configuration( - ConfigurationError::DirectoryCreationFailed((expanded_config_dir, err)), - )); - } - } - - Ok(expanded_config_dir) -} - -pub fn application_secret_from_directory( - dir: &str, - secret_basename: &str, - json_console_secret: &str, -) -> Result { - let secret_path = Path::new(dir).join(secret_basename); - let secret_str = || secret_path.as_path().to_str().unwrap().to_string(); - let secret_io_error = |io_err: io::Error| { - Err(CLIError::Configuration(ConfigurationError::Io(( - secret_str(), - io_err, - )))) - }; - - for _ in 0..2 { - match fs::File::open(&secret_path) { - Err(mut err) => { - if err.kind() == io::ErrorKind::NotFound { - // Write our built-in one - user may adjust the written file at will - - err = match fs::OpenOptions::new() - .create(true) - .write(true) - .truncate(true) - .open(&secret_path) - { - Err(cfe) => cfe, - Ok(mut f) => { - // Assure we convert 'ugly' json string into pretty one - let console_secret: ConsoleApplicationSecret = - json::from_str(json_console_secret).unwrap(); - match json::to_writer_pretty(&mut f, &console_secret) { - Err(serde_err) => { - panic!("Unexpected serde error: {:#?}", serde_err) - } - Ok(_) => continue, - } - } - }; - // fall through to IO error handling - } - return secret_io_error(err); - } - Ok(f) => match json::de::from_reader::<_, ConsoleApplicationSecret>(f) { - Err(json_err) => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::DecoderError((secret_str(), json_err)), - ))) - } - Ok(console_secret) => match console_secret.installed { - Some(secret) => return Ok(secret), - None => { - return Err(CLIError::Configuration(ConfigurationError::Secret( - ApplicationSecretError::FormatError(secret_str()), - ))) - } - }, - }, - } - } - unreachable!(); -} diff --git a/gen/youtubereporting1-cli/src/main.rs b/gen/youtubereporting1-cli/src/main.rs index 09b91fe083..1cdd27a567 100644 --- a/gen/youtubereporting1-cli/src/main.rs +++ b/gen/youtubereporting1-cli/src/main.rs @@ -3,8 +3,6 @@ // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] -extern crate tokio; - #[macro_use] extern crate clap; @@ -12,9 +10,10 @@ use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; -use google_youtubereporting1::{api, Error, oauth2}; +use google_youtubereporting1::{api, Error, oauth2, client::chrono, FieldMask}; -mod client; + +use google_clis_common as client; use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, @@ -267,13 +266,13 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "include-system-managed" => { - call = call.include_system_managed(arg_from_str(value.unwrap_or("false"), err, "include-system-managed", "boolean")); + call = call.include_system_managed( value.map(|v| arg_from_str(v, err, "include-system-managed", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -385,22 +384,22 @@ where let (key, value) = parse_kv_arg(&*parg, err, false); match key { "start-time-before" => { - call = call.start_time_before(value.unwrap_or("")); + call = call.start_time_before( value.map(|v| arg_from_str(v, err, "start-time-before", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "start-time-at-or-after" => { - call = call.start_time_at_or_after(value.unwrap_or("")); + call = call.start_time_at_or_after( value.map(|v| arg_from_str(v, err, "start-time-at-or-after", "google-datetime")).unwrap_or(chrono::Utc::now())); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "created-after" => { - call = call.created_after(value.unwrap_or("")); + call = call.created_after( value.map(|v| arg_from_str(v, err, "created-after", "google-datetime")).unwrap_or(chrono::Utc::now())); }, _ => { let mut found = false; @@ -521,13 +520,13 @@ where call = call.page_token(value.unwrap_or("")); }, "page-size" => { - call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); + call = call.page_size( value.map(|v| arg_from_str(v, err, "page-size", "int32")).unwrap_or(-0)); }, "on-behalf-of-content-owner" => { call = call.on_behalf_of_content_owner(value.unwrap_or("")); }, "include-system-managed" => { - call = call.include_system_managed(arg_from_str(value.unwrap_or("false"), err, "include-system-managed", "boolean")); + call = call.include_system_managed( value.map(|v| arg_from_str(v, err, "include-system-managed", "boolean")).unwrap_or(false)); }, _ => { let mut found = false; @@ -885,7 +884,7 @@ async fn main() { let mut app = App::new("youtubereporting1") .author("Sebastian Thiel ") - .version("4.0.1+20220305") + .version("5.0.2+20230123") .about("Schedules reporting jobs containing your YouTube Analytics data and downloads the resulting bulk data reports in the form of CSV files.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_youtubereporting1_cli") .arg(Arg::with_name("url") diff --git a/gen/youtubereporting1/Cargo.toml b/gen/youtubereporting1/Cargo.toml index af7fcb0fb5..ce83be9bc1 100644 --- a/gen/youtubereporting1/Cargo.toml +++ b/gen/youtubereporting1/Cargo.toml @@ -4,12 +4,12 @@ [package] name = "google-youtubereporting1" -version = "5.0.2-beta-1+20230123" +version = "5.0.2+20230123" authors = ["Sebastian Thiel "] description = "A complete library to interact with YouTube Reporting (protocol v1)" repository = "https://github.com/Byron/google-apis-rs/tree/main/gen/youtubereporting1" homepage = "https://developers.google.com/youtube/reporting/v1/reports/" -documentation = "https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123" +documentation = "https://docs.rs/google-youtubereporting1/5.0.2+20230123" license = "MIT" keywords = ["youtubereporting", "google", "protocol", "web", "api"] autobins = false diff --git a/gen/youtubereporting1/README.md b/gen/youtubereporting1/README.md index f25d3662e9..119b62bfad 100644 --- a/gen/youtubereporting1/README.md +++ b/gen/youtubereporting1/README.md @@ -5,25 +5,25 @@ DO NOT EDIT ! --> The `google-youtubereporting1` library allows access to all features of the *Google YouTube Reporting* service. -This documentation was generated from *YouTube Reporting* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *youtubereporting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +This documentation was generated from *YouTube Reporting* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *youtubereporting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. Everything else about the *YouTube Reporting* *v1* API can be found at the [official documentation site](https://developers.google.com/youtube/reporting/v1/reports/). # Features -Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/YouTubeReporting) ... +Handle the following *Resources* with ease from the central [hub](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/YouTubeReporting) ... -* [jobs](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::Job) - * [*create*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::JobCreateCall), [*delete*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::JobDeleteCall), [*get*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::JobGetCall), [*list*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::JobListCall), [*reports get*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::JobReportGetCall) and [*reports list*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::JobReportListCall) +* [jobs](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::Job) + * [*create*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::JobCreateCall), [*delete*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::JobDeleteCall), [*get*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::JobGetCall), [*list*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::JobListCall), [*reports get*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::JobReportGetCall) and [*reports list*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::JobReportListCall) * media - * [*download*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::MediaDownloadCall) -* [report types](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::ReportType) - * [*list*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::ReportTypeListCall) + * [*download*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::MediaDownloadCall) +* [report types](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::ReportType) + * [*list*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::ReportTypeListCall) Download supported by ... -* [*download media*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/api::MediaDownloadCall) +* [*download media*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/api::MediaDownloadCall) @@ -31,17 +31,17 @@ Download supported by ... The API is structured into the following primary items: -* **[Hub](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/YouTubeReporting)** +* **[Hub](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/YouTubeReporting)** * a central object to maintain state and allow accessing all *Activities* - * creates [*Method Builders*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::MethodsBuilder) which in turn - allow access to individual [*Call Builders*](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::CallBuilder) -* **[Resources](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Resource)** + * creates [*Method Builders*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::MethodsBuilder) which in turn + allow access to individual [*Call Builders*](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::CallBuilder) +* **[Resources](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Resource)** * primary types that you can apply *Activities* to * a collection of properties and *Parts* - * **[Parts](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Part)** + * **[Parts](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Part)** * a collection of properties * never directly used in *Activities* -* **[Activities](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::CallBuilder)** +* **[Activities](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::CallBuilder)** * operations to apply to *Resources* All *structures* are marked with applicable traits to further categorize them and ease browsing. @@ -137,17 +137,17 @@ match result { ``` ## Handling Errors -All errors produced by the system are provided either as [Result](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Result) enumeration as return value of +All errors produced by the system are provided either as [Result](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Result) enumeration as return value of the doit() methods, or handed as possibly intermediate results to either the -[Hub Delegate](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). +[Hub Delegate](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This makes the system potentially resilient to all kinds of errors. ## Uploads and Downloads -If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Result), should be +If a method supports downloads, the response body, which is part of the [Result](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Result), should be read by you to obtain the media. -If such a method also supports a [Response Result](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::ResponseResult), it will return that by default. +If such a method also supports a [Response Result](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::ResponseResult), it will return that by default. You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making this call: `.param("alt", "media")`. @@ -157,29 +157,29 @@ Methods supporting uploads can do so using up to 2 different protocols: ## Customization and Callbacks -You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Delegate) to the -[Method Builder](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::CallBuilder) before making the final `doit()` call. +You may alter the way an `doit()` method is called by providing a [delegate](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Delegate) to the +[Method Builder](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::CallBuilder) before making the final `doit()` call. Respective methods will be called to provide progress information, as well as determine whether the system should retry on failure. -The [delegate trait](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. +The [delegate trait](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Delegate) is default-implemented, allowing you to customize it with minimal effort. ## Optional Parts in Server-Requests -All structures provided by this library are made to be [encodable](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::RequestValue) and -[decodable](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses +All structures provided by this library are made to be [encodable](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::RequestValue) and +[decodable](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses are valid. -Most optionals are are considered [Parts](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::Part) which are identifiable by name, which will be sent to +Most optionals are are considered [Parts](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::Part) which are identifiable by name, which will be sent to the server to indicate either the set parts of the request or the desired parts in the response. ## Builder Arguments -Using [method builders](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. +Using [method builders](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. These will always take a single argument, for which the following statements are true. * [PODs][wiki-pod] are handed by copy * strings are passed as `&str` -* [request values](https://docs.rs/google-youtubereporting1/5.0.2-beta-1+20230123/google_youtubereporting1/client::RequestValue) are moved +* [request values](https://docs.rs/google-youtubereporting1/5.0.2+20230123/google_youtubereporting1/client::RequestValue) are moved Arguments will always be copied or cloned into the builder, to make them independent of their original life times. diff --git a/gen/youtubereporting1/src/api.rs b/gen/youtubereporting1/src/api.rs index edb0b143da..b2ac3d6c3b 100644 --- a/gen/youtubereporting1/src/api.rs +++ b/gen/youtubereporting1/src/api.rs @@ -129,7 +129,7 @@ impl<'a, S> YouTubeReporting { YouTubeReporting { client, auth: Box::new(auth), - _user_agent: "google-api-rust-client/5.0.2-beta-1".to_string(), + _user_agent: "google-api-rust-client/5.0.2".to_string(), _base_url: "https://youtubereporting.googleapis.com/".to_string(), _root_url: "https://youtubereporting.googleapis.com/".to_string(), } @@ -146,7 +146,7 @@ impl<'a, S> YouTubeReporting { } /// Set the user-agent header field to use in all requests to the server. - /// It defaults to `google-api-rust-client/5.0.2-beta-1`. + /// It defaults to `google-api-rust-client/5.0.2`. /// /// Returns the previously set user-agent. pub fn user_agent(&mut self, agent_name: String) -> String { diff --git a/gen/youtubereporting1/src/client.rs b/gen/youtubereporting1/src/client.rs deleted file mode 100644 index ae3618a136..0000000000 --- a/gen/youtubereporting1/src/client.rs +++ /dev/null @@ -1,789 +0,0 @@ -// COPY OF 'src/rust/api/client.rs' -// DO NOT EDIT -use std::error; -use std::error::Error as StdError; -use std::fmt::{self, Display}; -use std::io::{self, Cursor, Read, Seek, SeekFrom, Write}; -use std::str::FromStr; -use std::thread::sleep; -use std::time::Duration; - -use itertools::Itertools; - -use hyper::http::Uri; - -use hyper::body::Buf; -use hyper::client::connect; -use hyper::header::{HeaderMap, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, USER_AGENT}; -use hyper::Method; -use hyper::StatusCode; - -use mime::{Attr, Mime, SubLevel, TopLevel, Value}; - -use serde_json as json; - -use tokio::io::{AsyncRead, AsyncWrite}; -use tower_service; - -const LINE_ENDING: &str = "\r\n"; - -pub enum Retry { - /// Signal you don't want to retry - Abort, - /// Signals you want to retry after the given duration - After(Duration), -} - -/// Identifies the Hub. There is only one per library, this trait is supposed -/// to make intended use more explicit. -/// The hub allows to access all resource methods more easily. -pub trait Hub {} - -/// Identifies types for building methods of a particular resource type -pub trait MethodsBuilder {} - -/// Identifies types which represent builders for a particular resource method -pub trait CallBuilder {} - -/// Identifies types which can be inserted and deleted. -/// Types with this trait are most commonly used by clients of this API. -pub trait Resource {} - -/// Identifies types which are used in API responses. -pub trait ResponseResult {} - -/// Identifies types which are used in API requests. -pub trait RequestValue {} - -/// Identifies types which are not actually used by the API -/// This might be a bug within the google API schema. -pub trait UnusedType {} - -/// Identifies types which are only used as part of other types, which -/// usually are carrying the `Resource` trait. -pub trait Part {} - -/// Identifies types which are only used by other types internally. -/// They have no special meaning, this trait just marks them for completeness. -pub trait NestedType {} - -/// A utility to specify reader types which provide seeking capabilities too -pub trait ReadSeek: Seek + Read + Send {} -impl ReadSeek for T {} - -/// A trait for all types that can convert themselves into a *parts* string -pub trait ToParts { - fn to_parts(&self) -> String; -} - -/// A trait specifying functionality to help controlling any request performed by the API. -/// The trait has a conservative default implementation. -/// -/// It contains methods to deal with all common issues, as well with the ones related to -/// uploading media -pub trait Delegate: Send { - /// Called at the beginning of any API request. The delegate should store the method - /// information if he is interesting in knowing more context when further calls to it - /// are made. - /// The matching `finished()` call will always be made, no matter whether or not the API - /// request was successful. That way, the delegate may easily maintain a clean state - /// between various API calls. - fn begin(&mut self, _info: MethodInfo) {} - - /// Called whenever there is an [HttpError](hyper::Error), usually if there are network problems. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - /// - /// Return retry information. - fn http_error(&mut self, _err: &hyper::Error) -> Retry { - Retry::Abort - } - - /// Called whenever there is the need for your applications API key after - /// the official authenticator implementation didn't provide one, for some reason. - /// If this method returns None as well, the underlying operation will fail - fn api_key(&mut self) -> Option { - None - } - - /// Called whenever the Authenticator didn't yield a token. The delegate - /// may attempt to provide one, or just take it as a general information about the - /// impending failure. - /// The given Error provides information about why the token couldn't be acquired in the - /// first place - fn token(&mut self, err: &oauth2::Error) -> Option { - let _ = err; - None - } - - /// Called during resumable uploads to provide a URL for the impending upload. - /// It was saved after a previous call to `store_upload_url(...)`, and if not None, - /// will be used instead of asking the server for a new upload URL. - /// This is useful in case a previous resumable upload was aborted/canceled, but should now - /// be resumed. - /// The returned URL will be used exactly once - if it fails again and the delegate allows - /// to retry, we will ask the server for a new upload URL. - fn upload_url(&mut self) -> Option { - None - } - - /// Called after we have retrieved a new upload URL for a resumable upload to store it - /// in case we fail or cancel. That way, we can attempt to resume the upload later, - /// see `upload_url()`. - /// It will also be called with None after a successful upload, which allows the delegate - /// to forget the URL. That way, we will not attempt to resume an upload that has already - /// finished. - fn store_upload_url(&mut self, url: Option<&str>) { - let _ = url; - } - - /// Called whenever a server response could not be decoded from json. - /// It's for informational purposes only, the caller will return with an error - /// accordingly. - /// - /// # Arguments - /// - /// * `json_encoded_value` - The json-encoded value which failed to decode. - /// * `json_decode_error` - The decoder error - fn response_json_decode_error( - &mut self, - json_encoded_value: &str, - json_decode_error: &json::Error, - ) { - let _ = json_encoded_value; - let _ = json_decode_error; - } - - /// Called whenever the http request returns with a non-success status code. - /// This can involve authentication issues, or anything else that very much - /// depends on the used API method. - /// The delegate should check the status, header and decoded json error to decide - /// whether to retry or not. In the latter case, the underlying call will fail. - /// - /// If you choose to retry after a duration, the duration should be chosen using the - /// [exponential backoff algorithm](http://en.wikipedia.org/wiki/Exponential_backoff). - fn http_failure( - &mut self, - _: &hyper::Response, - _err: Option, - ) -> Retry { - Retry::Abort - } - - /// Called prior to sending the main request of the given method. It can be used to time - /// the call or to print progress information. - /// It's also useful as you can be sure that a request will definitely be made. - fn pre_request(&mut self) {} - - /// Return the size of each chunk of a resumable upload. - /// Must be a power of two, with 1<<18 being the smallest allowed chunk size. - /// Will be called once before starting any resumable upload. - fn chunk_size(&mut self) -> u64 { - 1 << 23 - } - - /// Called before the given chunk is uploaded to the server. - /// If true is returned, the upload will be interrupted. - /// However, it may be resumable if you stored the upload URL in a previous call - /// to `store_upload_url()` - fn cancel_chunk_upload(&mut self, chunk: &ContentRange) -> bool { - let _ = chunk; - false - } - - /// Called before the API request method returns, in every case. It can be used to clean up - /// internal state between calls to the API. - /// This call always has a matching call to `begin(...)`. - /// - /// # Arguments - /// - /// * `is_success` - a true value indicates the operation was successful. If false, you should - /// discard all values stored during `store_upload_url`. - fn finished(&mut self, is_success: bool) { - let _ = is_success; - } -} - -/// A delegate with a conservative default implementation, which is used if no other delegate is -/// set. -#[derive(Default)] -pub struct DefaultDelegate; - -impl Delegate for DefaultDelegate {} - -#[derive(Debug)] -pub enum Error { - /// The http connection failed - HttpError(hyper::Error), - - /// An attempt was made to upload a resource with size stored in field `.0` - /// even though the maximum upload size is what is stored in field `.1`. - UploadSizeLimitExceeded(u64, u64), - - /// Represents information about a request that was not understood by the server. - /// Details are included. - BadRequest(serde_json::Value), - - /// We needed an API key for authentication, but didn't obtain one. - /// Neither through the authenticator, nor through the Delegate. - MissingAPIKey, - - /// We required a Token, but didn't get one from the Authenticator - MissingToken(oauth2::Error), - - /// The delgate instructed to cancel the operation - Cancelled, - - /// An additional, free form field clashed with one of the built-in optional ones - FieldClash(&'static str), - - /// Shows that we failed to decode the server response. - /// This can happen if the protocol changes in conjunction with strict json decoding. - JsonDecodeError(String, json::Error), - - /// Indicates an HTTP repsonse with a non-success status code - Failure(hyper::Response), - - /// An IO error occurred while reading a stream into memory - Io(std::io::Error), -} - -impl Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Io(ref err) => err.fmt(f), - Error::HttpError(ref err) => err.fmt(f), - Error::UploadSizeLimitExceeded(ref resource_size, ref max_size) => writeln!( - f, - "The media size {} exceeds the maximum allowed upload size of {}", - resource_size, max_size - ), - Error::MissingAPIKey => { - (writeln!( - f, - "The application's API key was not found in the configuration" - )) - .ok(); - writeln!( - f, - "It is used as there are no Scopes defined for this method." - ) - } - Error::BadRequest(ref message) => { - writeln!(f, "Bad Request: {}", message)?; - Ok(()) - } - Error::MissingToken(ref err) => { - writeln!(f, "Token retrieval failed with error: {}", err) - } - Error::Cancelled => writeln!(f, "Operation cancelled by delegate"), - Error::FieldClash(field) => writeln!( - f, - "The custom parameter '{}' is already provided natively by the CallBuilder.", - field - ), - Error::JsonDecodeError(ref json_str, ref err) => writeln!(f, "{}: {}", err, json_str), - Error::Failure(ref response) => { - writeln!(f, "Http status indicates failure: {:?}", response) - } - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - match *self { - Error::HttpError(ref err) => err.source(), - Error::JsonDecodeError(_, ref err) => err.source(), - _ => None, - } - } -} - -impl From for Error { - fn from(err: std::io::Error) -> Self { - Error::Io(err) - } -} - -/// A universal result type used as return for all calls. -pub type Result = std::result::Result; - -/// Contains information about an API request. -pub struct MethodInfo { - pub id: &'static str, - pub http_method: Method, -} - -const BOUNDARY: &str = "MDuXWGyeE33QFXGchb2VFWc4Z7945d"; - -/// Provides a `Read` interface that converts multiple parts into the protocol -/// identified by [RFC2387](https://tools.ietf.org/html/rfc2387). -/// **Note**: This implementation is just as rich as it needs to be to perform uploads -/// to google APIs, and might not be a fully-featured implementation. -#[derive(Default)] -pub struct MultiPartReader<'a> { - raw_parts: Vec<(HeaderMap, &'a mut (dyn Read + Send))>, - current_part: Option<(Cursor>, &'a mut (dyn Read + Send))>, - last_part_boundary: Option>>, -} - -impl<'a> MultiPartReader<'a> { - /// Reserve memory for exactly the given amount of parts - pub fn reserve_exact(&mut self, cap: usize) { - self.raw_parts.reserve_exact(cap); - } - - /// Add a new part to the queue of parts to be read on the first `read` call. - /// - /// # Arguments - /// - /// `headers` - identifying the body of the part. It's similar to the header - /// in an ordinary single-part call, and should thus contain the - /// same information. - /// `reader` - a reader providing the part's body - /// `size` - the amount of bytes provided by the reader. It will be put onto the header as - /// content-size. - /// `mime` - It will be put onto the content type - pub fn add_part( - &mut self, - reader: &'a mut (dyn Read + Send), - size: u64, - mime_type: Mime, - ) -> &mut MultiPartReader<'a> { - let mut headers = HeaderMap::new(); - headers.insert( - CONTENT_TYPE, - hyper::header::HeaderValue::from_str(&format!("{}", mime_type)).unwrap(), - ); - headers.insert(CONTENT_LENGTH, size.into()); - self.raw_parts.push((headers, reader)); - self - } - - /// Returns the mime-type representing our multi-part message. - /// Use it with the ContentType header. - pub fn mime_type(&self) -> Mime { - Mime( - TopLevel::Multipart, - SubLevel::Ext("related".to_string()), - vec![( - Attr::Ext("boundary".to_string()), - Value::Ext(BOUNDARY.to_string()), - )], - ) - } - - /// Returns true if we are totally used - fn is_depleted(&self) -> bool { - self.raw_parts.is_empty() - && self.current_part.is_none() - && self.last_part_boundary.is_none() - } - - /// Returns true if we are handling our last part - fn is_last_part(&self) -> bool { - self.raw_parts.is_empty() && self.current_part.is_some() - } -} - -impl<'a> Read for MultiPartReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - match ( - self.raw_parts.len(), - self.current_part.is_none(), - self.last_part_boundary.is_none(), - ) { - (_, _, false) => { - let br = self - .last_part_boundary - .as_mut() - .unwrap() - .read(buf) - .unwrap_or(0); - if br < buf.len() { - self.last_part_boundary = None; - } - return Ok(br); - } - (0, true, true) => return Ok(0), - (n, true, _) if n > 0 => { - let (headers, reader) = self.raw_parts.remove(0); - let mut c = Cursor::new(Vec::::new()); - // TODO: The first line ending should be omitted for the first part, - // fortunately Google's API serves don't seem to mind. - (write!( - &mut c, - "{}--{}{}{}{}{}", - LINE_ENDING, - BOUNDARY, - LINE_ENDING, - headers - .iter() - .map(|(k, v)| format!("{}: {}", k, v.to_str().unwrap())) - .join(LINE_ENDING), - LINE_ENDING, - LINE_ENDING, - )) - .unwrap(); - c.seek(SeekFrom::Start(0)).unwrap(); - self.current_part = Some((c, reader)); - } - _ => {} - } - - // read headers as long as possible - let (hb, rr) = { - let &mut (ref mut c, ref mut reader) = self.current_part.as_mut().unwrap(); - let b = c.read(buf).unwrap_or(0); - (b, reader.read(&mut buf[b..])) - }; - - match rr { - Ok(bytes_read) => { - if hb < buf.len() && bytes_read == 0 { - if self.is_last_part() { - // before clearing the last part, we will add the boundary that - // will be written last - self.last_part_boundary = Some(Cursor::new( - format!("{}--{}--{}", LINE_ENDING, BOUNDARY, LINE_ENDING).into_bytes(), - )) - } - // We are depleted - this can trigger the next part to come in - self.current_part = None; - } - let mut total_bytes_read = hb + bytes_read; - while total_bytes_read < buf.len() && !self.is_depleted() { - match self.read(&mut buf[total_bytes_read..]) { - Ok(br) => total_bytes_read += br, - Err(err) => return Err(err), - } - } - Ok(total_bytes_read) - } - Err(err) => { - // fail permanently - self.current_part = None; - self.last_part_boundary = None; - self.raw_parts.clear(); - Err(err) - } - } - } -} - -/// The `X-Upload-Content-Type` header. -/// -/// Generated via rustc --pretty expanded -Z unstable-options, and manually -/// processed to be more readable. -#[derive(PartialEq, Debug, Clone)] -pub struct XUploadContentType(pub Mime); - -impl ::std::ops::Deref for XUploadContentType { - type Target = Mime; - fn deref(&self) -> &Mime { - &self.0 - } -} -impl ::std::ops::DerefMut for XUploadContentType { - fn deref_mut(&mut self) -> &mut Mime { - &mut self.0 - } -} -impl Display for XUploadContentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct Chunk { - pub first: u64, - pub last: u64, -} - -impl fmt::Display for Chunk { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - (write!(fmt, "{}-{}", self.first, self.last)).ok(); - Ok(()) - } -} - -impl FromStr for Chunk { - type Err = &'static str; - - /// NOTE: only implements `%i-%i`, not `*` - fn from_str(s: &str) -> std::result::Result { - let parts: Vec<&str> = s.split('-').collect(); - if parts.len() != 2 { - return Err("Expected two parts: %i-%i"); - } - Ok(Chunk { - first: match FromStr::from_str(parts[0]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'first' as digit"), - }, - last: match FromStr::from_str(parts[1]) { - Ok(d) => d, - _ => return Err("Couldn't parse 'last' as digit"), - }, - }) - } -} - -/// Implements the Content-Range header, for serialization only -#[derive(Clone, PartialEq, Debug)] -pub struct ContentRange { - pub range: Option, - pub total_length: u64, -} - -impl ContentRange { - pub fn header_value(&self) -> String { - format!( - "bytes {}/{}", - match self.range { - Some(ref c) => format!("{}", c), - None => "*".to_string(), - }, - self.total_length - ) - } -} - -#[derive(Clone, PartialEq, Debug)] -pub struct RangeResponseHeader(pub Chunk); - -impl RangeResponseHeader { - fn from_bytes(raw: &[u8]) -> Self { - if !raw.is_empty() { - if let Ok(s) = std::str::from_utf8(raw) { - const PREFIX: &str = "bytes "; - if let Some(stripped) = s.strip_prefix(PREFIX) { - if let Ok(c) = ::from_str(&stripped) { - return RangeResponseHeader(c); - } - } - } - } - - panic!("Unable to parse Range header {:?}", raw) - } -} - -/// A utility type to perform a resumable upload from start to end. -pub struct ResumableUploadHelper<'a, A: 'a, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - pub client: &'a hyper::client::Client< - S, - hyper::body::Body, - >, - pub delegate: &'a mut dyn Delegate, - pub start_at: Option, - pub auth: &'a A, - pub user_agent: &'a str, - pub auth_header: String, - pub url: &'a str, - pub reader: &'a mut dyn ReadSeek, - pub media_type: Mime, - pub content_length: u64, -} - -impl<'a, A, S> ResumableUploadHelper<'a, A, S> -where - S: tower_service::Service + Clone + Send + Sync + 'static, - S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static, - S::Future: Send + Unpin + 'static, - S::Error: Into>, -{ - async fn query_transfer_status( - &mut self, - ) -> std::result::Result>> { - loop { - match self - .client - .request( - hyper::Request::builder() - .method(hyper::Method::POST) - .uri(self.url) - .header(USER_AGENT, self.user_agent.to_string()) - .header( - "Content-Range", - ContentRange { - range: None, - total_length: self.content_length, - } - .header_value(), - ) - .header(AUTHORIZATION, self.auth_header.clone()) - .body(hyper::body::Body::empty()) - .unwrap(), - ) - .await - { - Ok(r) => { - // 308 = resume-incomplete == PermanentRedirect - let headers = r.headers().clone(); - let h: RangeResponseHeader = match headers.get("Range") { - Some(hh) if r.status() == StatusCode::PERMANENT_REDIRECT => { - RangeResponseHeader::from_bytes(hh.as_bytes()) - } - None | Some(_) => { - if let Retry::After(d) = self.delegate.http_failure(&r, None) { - sleep(d); - continue; - } - return Err(Ok(r)); - } - }; - return Ok(h.0.last); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Err(Err(err)); - } - } - } - } - - /// returns None if operation was cancelled by delegate, or the HttpResult. - /// It can be that we return the result just because we didn't understand the status code - - /// caller should check for status himself before assuming it's OK to use - pub async fn upload(&mut self) -> Option>> { - let mut start = match self.start_at { - Some(s) => s, - None => match self.query_transfer_status().await { - Ok(s) => s, - Err(result) => return Some(result), - }, - }; - - const MIN_CHUNK_SIZE: u64 = 1 << 18; - let chunk_size = match self.delegate.chunk_size() { - cs if cs > MIN_CHUNK_SIZE => cs, - _ => MIN_CHUNK_SIZE, - }; - - loop { - self.reader.seek(SeekFrom::Start(start)).unwrap(); - - let request_size = match self.content_length - start { - rs if rs > chunk_size => chunk_size, - rs => rs, - }; - - let mut section_reader = self.reader.take(request_size); - let mut req_bytes = vec![]; - section_reader.read_to_end(&mut req_bytes).unwrap(); - let range_header = ContentRange { - range: Some(Chunk { - first: start, - last: start + request_size - 1, - }), - total_length: self.content_length, - }; - if self.delegate.cancel_chunk_upload(&range_header) { - return None; - } - let res = self - .client - .request( - hyper::Request::builder() - .uri(self.url) - .method(hyper::Method::POST) - .header("Content-Range", range_header.header_value()) - .header(CONTENT_TYPE, format!("{}", self.media_type)) - .header(USER_AGENT, self.user_agent.to_string()) - .body(hyper::body::Body::from(req_bytes)) - .unwrap(), - ) - .await; - match res { - Ok(res) => { - start += request_size; - - if res.status() == StatusCode::PERMANENT_REDIRECT { - continue; - } - - let (res_parts, res_body) = res.into_parts(); - let res_body = match hyper::body::to_bytes(res_body).await { - Ok(res_body) => res_body.into_iter().collect(), - Err(err) => return Some(Err(err)), - }; - let res_body_string: String = String::from_utf8(res_body).unwrap(); - let reconstructed_result = - hyper::Response::from_parts(res_parts, res_body_string.clone().into()); - - if !reconstructed_result.status().is_success() { - if let Retry::After(d) = self.delegate.http_failure( - &reconstructed_result, - json::from_str(&res_body_string).ok(), - ) { - sleep(d); - continue; - } - } - return Some(Ok(reconstructed_result)); - } - Err(err) => { - if let Retry::After(d) = self.delegate.http_error(&err) { - sleep(d); - continue; - } - return Some(Err(err)); - } - } - } - } -} - -// Copy of src/rust/cli/client.rs -// TODO(ST): Allow sharing common code between program types -pub fn remove_json_null_values(value: &mut json::value::Value) { - match *value { - json::value::Value::Object(ref mut map) => { - let mut for_removal = Vec::new(); - - for (key, mut value) in map.iter_mut() { - if value.is_null() { - for_removal.push(key.clone()); - } else { - remove_json_null_values(&mut value); - } - } - - for key in &for_removal { - map.remove(key); - } - } - json::value::Value::Array(ref mut arr) => { - let mut i = 0; - while i < arr.len() { - if arr[i].is_null() { - arr.remove(i); - } else { - remove_json_null_values(&mut arr[i]); - i += 1; - } - } - } - _ => {} - } -} - -// Borrowing the body object as mutable and converts it to a string -pub async fn get_body_as_string(res_body: &mut hyper::Body) -> String { - let res_body_buf = hyper::body::to_bytes(res_body).await.unwrap(); - let res_body_string = String::from_utf8_lossy(&res_body_buf); - res_body_string.to_string() -} diff --git a/gen/youtubereporting1/src/lib.rs b/gen/youtubereporting1/src/lib.rs index b9df5ea29c..67e6229f27 100644 --- a/gen/youtubereporting1/src/lib.rs +++ b/gen/youtubereporting1/src/lib.rs @@ -2,7 +2,7 @@ // This file was generated automatically from 'src/generator/templates/api/lib.rs.mako' // DO NOT EDIT ! -//! This documentation was generated from *YouTube Reporting* crate version *5.0.2-beta-1+20230123*, where *20230123* is the exact revision of the *youtubereporting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2-beta-1*. +//! This documentation was generated from *YouTube Reporting* crate version *5.0.2+20230123*, where *20230123* is the exact revision of the *youtubereporting:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v5.0.2*. //! //! Everything else about the *YouTube Reporting* *v1* API can be found at the //! [official documentation site](https://developers.google.com/youtube/reporting/v1/reports/).